diff --git a/backend/btrixcloud/crawlmanager.py b/backend/btrixcloud/crawlmanager.py index 8bdae825..b6c9b79a 100644 --- a/backend/btrixcloud/crawlmanager.py +++ b/backend/btrixcloud/crawlmanager.py @@ -105,7 +105,7 @@ class CrawlManager(K8sAPI): STORAGE_NAME=storage_name, PROFILE_FILENAME=profile_filename, INITIAL_SCALE=str(crawlconfig.scale), - CRAWL_TIMEOUT=str(crawlconfig.crawlTimeout) + CRAWL_TIMEOUT=str(crawlconfig.crawlTimeout or 0) # REV=str(crawlconfig.rev), ) diff --git a/backend/btrixcloud/main_scheduled_job.py b/backend/btrixcloud/main_scheduled_job.py index bd13e747..d94bd7f3 100644 --- a/backend/btrixcloud/main_scheduled_job.py +++ b/backend/btrixcloud/main_scheduled_job.py @@ -29,6 +29,8 @@ class ScheduledJob(K8sAPI): async def run(self): """run crawl!""" + register_exit_handler() + config_map = await self.core_api.read_namespaced_config_map( name=f"crawl-config-{self.cid}", namespace=self.namespace ) @@ -36,7 +38,12 @@ class ScheduledJob(K8sAPI): userid = data["USER_ID"] scale = int(data.get("INITIAL_SCALE", 0)) - crawl_timeout = int(data.get("CRAWL_TIMEOUT", 0)) + try: + crawl_timeout = int(data.get("CRAWL_TIMEOUT", 0)) + # pylint: disable=bare-except + except: + crawl_timeout = 0 + oid = data["ORG_ID"] crawlconfig = await get_crawl_config(self.crawlconfigs, uuid.UUID(self.cid)) @@ -71,5 +78,4 @@ def main(): if __name__ == "__main__": - register_exit_handler() main()