backend: consider crawl complete if pages crawled exceeds pages found, due to retries, should fix #306

(will look at cause separately in crawler)
This commit is contained in:
Ilya Kreymer 2022-09-28 11:32:08 -07:00
parent e696104ffa
commit ef7a7e538c

View File

@ -203,7 +203,7 @@ class CrawlJob(ABC):
self.finished = dt_now()
completed = self.last_done and self.last_done == self.last_found
completed = self.last_done and self.last_done >= self.last_found
state = "complete" if completed else "partial_complete"
print("marking crawl as: " + state, flush=True)