Silly me. (bis)
This commit is contained in:
parent
f6da179820
commit
f0b8672c89
1 changed files with 2 additions and 2 deletions
|
@ -162,7 +162,7 @@ class CrawlingThread(Thread):
|
||||||
def run(self):
|
def run(self):
|
||||||
tasks = []
|
tasks = []
|
||||||
#tasks.append(async_crawler("http://plus.google.com/+Python"))
|
#tasks.append(async_crawler("http://plus.google.com/+Python"))
|
||||||
tasks.append(async_print('https://python.org/'))
|
tasks.append(async_crawler('https://python.org/'))
|
||||||
|
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
|
@ -209,7 +209,7 @@ async def async_print(url):
|
||||||
async def async_crawler(url):
|
async def async_crawler(url):
|
||||||
queue = [url]
|
queue = [url]
|
||||||
crawled = []
|
crawled = []
|
||||||
while queue or (len(crawled) < HARD_LIMIT):
|
while queue and (len(crawled) < HARD_LIMIT):
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
try:
|
try:
|
||||||
url = queue.pop(0)
|
url = queue.pop(0)
|
||||||
|
|
Loading…
Reference in a new issue