More robust crawling

This commit is contained in:
Théophile Bastian 2018-02-26 13:01:05 +01:00
parent 5d4bd30e20
commit 968ff6d24c
2 changed files with 3 additions and 1 deletions

View File

@ -291,6 +291,8 @@ async def async_crawler(url, output_tree, headers=None):
if sample_url not in queued
and sample_url not in crawled
]
else:
print("No html received")
print(crawled)
output_tree += crawl_tree

View File

@ -244,6 +244,6 @@ def generate_history(user, start_time):
new_line.full_clean()
new_line.save()
except ValidationError:
pass
continue
return history