Skip to content

Commit 84b1b3e

Browse files
committed
Added New Algorithems ...
1 parent 9ba1c10 commit 84b1b3e

File tree

1 file changed

+23
-12
lines changed

1 file changed

+23
-12
lines changed

Pubmed-Poc-link-gather.py

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,36 @@
11
from fake_useragent import UserAgent
2-
import requests
2+
import requests,json
33
from urllib.parse import quote
44
from bs4 import BeautifulSoup
55

6-
def Gathering_Links(Count,Type,TopicSubject,Datemin,Datemax):
6+
7+
def Gathering_Links(Count, Type, TopicSubject, Datemin, Datemax):
78
Search_results = []
89
UA = UserAgent(use_cache_server=False)
910
term = quote(TopicSubject)
1011
if Type.lower() == 'pubmed':
11-
response = requests.get('https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term='+term+'&mindate='+Datemin+'&maxdate='+Datemax+'&retmax='+Count, headers={"User-Agent": UA.random}, timeout=(10, 30))
12-
infoids=BeautifulSoup(response.content, 'xml')
13-
ids = infoids.find_all('Id')
14-
for id in ids:
15-
Search_results.append({'Url': 'https://pubmed.ncbi.nlm.nih.gov/'+id.get_text()+'/'})
12+
try:
13+
response = requests.get('https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term='+term +
14+
'&mindate='+Datemin+'&maxdate='+Datemax+'&retmax='+Count, headers={"User-Agent": UA.random}, timeout=(10, 30))
15+
infoids = BeautifulSoup(response.content, 'xml')
16+
ids = infoids.find_all('Id')
17+
for id in ids:
18+
Search_results.append(
19+
{'Url': 'https://pubmed.ncbi.nlm.nih.gov/'+id.get_text()+'/'})
20+
except:
21+
Search_results.append({'Error': 'Error raised in Request'})
1622
elif Type.lower() == 'poc':
17-
reponse ='do somthings'
23+
try:
24+
articles = requests.get(
25+
'https://secureapi.atpoc.com/api-contentstream/beta/factually/', headers={"User-Agent": UA.random}).json()
26+
for article in articles.values():
27+
Search_results.append(
28+
{'Url': 'https://breakingmed.org/article.html?articleid=' + article['articleid']})
29+
except Exception as e:
30+
Search_results.append({'Error': str(e)})
1831
else:
19-
Search_results.append(
20-
{'Error': 'Type not supported'})
32+
Search_results.append({'Error': 'Type not supported'})
2133
return Search_results
2234

2335

24-
25-
print (Gathering_Links('1000','PubMed','breast cancer','2000','2020'))
36+
print(Gathering_Links('100', 'poc', 'breast cancer', '2000', '2020'))

0 commit comments

Comments
 (0)