-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain.py
46 lines (38 loc) · 951 Bytes
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import threading
from queue import Queue
from spider import Spider
from domain import *
from functions import *
PROJECT_NAME = '9gag'
BASE_URL = 'http://9gag.com'
DOMAIN_NAME = getDomainName(BASE_URL)
QUEUE_FILE = PROJECT_NAME + '/queue.txt'
CRAWLED_FILE = PROJECT_NAME + '/crawled.txt'
NUMBER_OF_THREADS = 4
threadQueue = Queue()
Spider(PROJECT_NAME, BASE_URL, DOMAIN_NAME)
def crawl():
while True:
queuedLinks = fileToSet(QUEUE_FILE)
if len(queuedLinks) > 0:
createJobs()
else:
break
# Each link in queued links is a new job
def createJobs():
for link in fileToSet(QUEUE_FILE):
threadQueue.put(link)
threadQueue.join()
# Create worker threads
def createThreads():
for _ in range(NUMBER_OF_THREADS):
t = threading.Thread(target=work)
t.daemon = True
t.start()
def work():
while True:
url = threadQueue.get()
Spider.crawlPage(threading.current_thread().name, url)
threadQueue.task_done()
createThreads()
crawl()