|
1 | 1 | from fake_useragent import UserAgent
|
2 |
| -import requests |
| 2 | +import requests,json |
3 | 3 | from urllib.parse import quote
|
4 | 4 | from bs4 import BeautifulSoup
|
5 | 5 |
|
6 |
| -def Gathering_Links(Count,Type,TopicSubject,Datemin,Datemax): |
| 6 | + |
| 7 | +def Gathering_Links(Count, Type, TopicSubject, Datemin, Datemax): |
7 | 8 | Search_results = []
|
8 | 9 | UA = UserAgent(use_cache_server=False)
|
9 | 10 | term = quote(TopicSubject)
|
10 | 11 | if Type.lower() == 'pubmed':
|
11 |
| - response = requests.get('https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term='+term+'&mindate='+Datemin+'&maxdate='+Datemax+'&retmax='+Count, headers={"User-Agent": UA.random}, timeout=(10, 30)) |
12 |
| - infoids=BeautifulSoup(response.content, 'xml') |
13 |
| - ids = infoids.find_all('Id') |
14 |
| - for id in ids: |
15 |
| - Search_results.append({'Url': 'https://pubmed.ncbi.nlm.nih.gov/'+id.get_text()+'/'}) |
| 12 | + try: |
| 13 | + response = requests.get('https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term='+term + |
| 14 | + '&mindate='+Datemin+'&maxdate='+Datemax+'&retmax='+Count, headers={"User-Agent": UA.random}, timeout=(10, 30)) |
| 15 | + infoids = BeautifulSoup(response.content, 'xml') |
| 16 | + ids = infoids.find_all('Id') |
| 17 | + for id in ids: |
| 18 | + Search_results.append( |
| 19 | + {'Url': 'https://pubmed.ncbi.nlm.nih.gov/'+id.get_text()+'/'}) |
| 20 | + except: |
| 21 | + Search_results.append({'Error': 'Error raised in Request'}) |
16 | 22 | elif Type.lower() == 'poc':
|
17 |
| - reponse ='do somthings' |
| 23 | + try: |
| 24 | + articles = requests.get( |
| 25 | + 'https://secureapi.atpoc.com/api-contentstream/beta/factually/', headers={"User-Agent": UA.random}).json() |
| 26 | + for article in articles.values(): |
| 27 | + Search_results.append( |
| 28 | + {'Url': 'https://breakingmed.org/article.html?articleid=' + article['articleid']}) |
| 29 | + except Exception as e: |
| 30 | + Search_results.append({'Error': str(e)}) |
18 | 31 | else:
|
19 |
| - Search_results.append( |
20 |
| - {'Error': 'Type not supported'}) |
| 32 | + Search_results.append({'Error': 'Type not supported'}) |
21 | 33 | return Search_results
|
22 | 34 |
|
23 | 35 |
|
24 |
| - |
25 |
| -print (Gathering_Links('1000','PubMed','breast cancer','2000','2020')) |
| 36 | +print(Gathering_Links('100', 'poc', 'breast cancer', '2000', '2020')) |
0 commit comments