From 821d29bd2a9da02043a6c2174680fab115b219a9 Mon Sep 17 00:00:00 2001 From: Ilya Kreymer Date: Mon, 17 Apr 2023 23:13:13 -0700 Subject: [PATCH] crawlconfig api: add 'currCrawlState' and 'currCrawlTimeStart' to crawlconfig list api (already queried on backend) (#770) * crawlconfig api: add 'currCrawlState' and 'currCrawlTimeStart' to crawlconfig list api (already queried on backend) --- backend/btrixcloud/crawlconfigs.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/backend/btrixcloud/crawlconfigs.py b/backend/btrixcloud/crawlconfigs.py index d1089125..8f3bfad5 100644 --- a/backend/btrixcloud/crawlconfigs.py +++ b/backend/btrixcloud/crawlconfigs.py @@ -188,6 +188,9 @@ class CrawlConfigOut(CrawlConfig): """Crawl Config Output, includes currCrawlId of running crawl""" currCrawlId: Optional[str] + currCrawlStartTime: Optional[datetime] + currCrawlState: Optional[str] + profileName: Optional[str] createdByName: Optional[str] @@ -470,7 +473,7 @@ class CrawlConfigOps: sort_direction: int = -1, ): """Get all crawl configs for an organization is a member of""" - # pylint: disable=too-many-locals + # pylint: disable=too-many-locals,too-many-branches # Zero-index page for query page = page - 1 skip = page * page_size @@ -635,13 +638,13 @@ class CrawlConfigOps: ) running = {} for crawl in crawls: - running[crawl.cid] = crawl.id + running[crawl.cid] = crawl configs = [] for res in items: config = CrawlConfigOut.from_dict(res) # pylint: disable=invalid-name - config.currCrawlId = running.get(config.id) + self._add_curr_crawl_stats(config, running.get(config.id)) configs.append(config) return configs, total @@ -667,7 +670,7 @@ class CrawlConfigOps: ) if len(crawls) == 1: - return crawls[0].id + return crawls[0] return None @@ -684,6 +687,15 @@ class CrawlConfigOps: crawlconfig.lastCrawlState = crawl_stats["last_crawl_state"] return crawlconfig + async def _add_curr_crawl_stats(self, crawlconfig, crawl): + """Add stats from current running crawl, if any""" + if not crawl: + return + + crawlconfig.currCrawlId = crawl.id + crawlconfig.currCrawlStartTime = crawl.started + crawlconfig.currCrawlState = crawl.state + async def get_crawl_config_out(self, cid: uuid.UUID, org: Organization): """Return CrawlConfigOut, including state of currently running crawl, if active also include inactive crawl configs""" @@ -697,7 +709,9 @@ class CrawlConfigOps: ) if not crawlconfig.inactive: - crawlconfig.currCrawlId = await self.get_running_crawl(crawlconfig) + self._add_curr_crawl_stats( + crawlconfig, await self.get_running_crawl(crawlconfig) + ) user = await self.user_manager.get(crawlconfig.createdBy) # pylint: disable=invalid-name