diff --git a/backend/btrixcloud/crawlconfigs.py b/backend/btrixcloud/crawlconfigs.py index d2b81187..cfd88f52 100644 --- a/backend/btrixcloud/crawlconfigs.py +++ b/backend/btrixcloud/crawlconfigs.py @@ -191,6 +191,10 @@ class CrawlConfig(CrawlConfigCore): lastCrawlState: Optional[str] lastCrawlSize: Optional[int] + lastRun: Optional[datetime] + + isCrawlRunning: Optional[bool] = False + def get_raw_config(self): """serialize config for browsertrix-crawler""" return self.config.dict(exclude_unset=True, exclude_none=True) @@ -198,13 +202,9 @@ class CrawlConfig(CrawlConfigCore): # ============================================================================ class CrawlConfigOut(CrawlConfig): - """Crawl Config Output, includes currCrawlId of running crawl""" + """Crawl Config Output""" - currCrawlId: Optional[str] - currCrawlStartTime: Optional[datetime] - currCrawlState: Optional[str] - currCrawlSize: Optional[int] = 0 - currCrawlStopping: Optional[bool] = False + lastCrawlStopping: Optional[bool] = False profileName: Optional[str] @@ -281,6 +281,10 @@ class CrawlConfigOps: [("oid", pymongo.ASCENDING), ("tags", pymongo.ASCENDING)] ) + await self.crawl_configs.create_index( + [("lastRun", pymongo.DESCENDING), ("modified", pymongo.DESCENDING)] + ) + await self.config_revs.create_index([("cid", pymongo.HASHED)]) await self.config_revs.create_index( @@ -480,7 +484,7 @@ class CrawlConfigOps: description: str = None, tags: Optional[List[str]] = None, schedule: Optional[bool] = None, - sort_by: str = None, + sort_by: str = "lastRun", sort_direction: int = -1, ): """Get all crawl configs for an organization is a member of""" @@ -525,12 +529,31 @@ class CrawlConfigOps: aggregate.extend([{"$match": {"firstSeed": first_seed}}]) if sort_by: - if sort_by not in ("created", "modified", "firstSeed", "lastCrawlTime"): + if sort_by not in ( + "created", + "modified", + "firstSeed", + "lastCrawlTime", + "lastCrawlStartTime", + "lastRun", + ): raise HTTPException(status_code=400, detail="invalid_sort_by") if sort_direction not in (1, -1): raise HTTPException(status_code=400, detail="invalid_sort_direction") - aggregate.extend([{"$sort": {sort_by: sort_direction}}]) + sort_query = {sort_by: sort_direction} + + # Add modified as final sort key to give some order to workflows that + # haven't been run yet. + if sort_by in ( + "firstSeed", + "lastCrawlTime", + "lastCrawlStartTime", + "lastRun", + ): + sort_query = {sort_by: sort_direction, "modified": sort_direction} + + aggregate.extend([{"$sort": sort_query}]) aggregate.extend( [ @@ -641,11 +664,9 @@ class CrawlConfigOps: if not crawl: return - crawlconfig.currCrawlId = crawl.id - crawlconfig.currCrawlStartTime = crawl.started - crawlconfig.currCrawlState = crawl.state - crawlconfig.currCrawlSize = crawl.stats.get("size", 0) if crawl.stats else 0 - crawlconfig.currCrawlStopping = crawl.stopping + crawlconfig.lastCrawlState = crawl.state + crawlconfig.lastCrawlSize = crawl.stats.get("size", 0) if crawl.stats else 0 + crawlconfig.lastCrawlStopping = crawl.stopping async def get_crawl_config_out(self, cid: uuid.UUID, org: Organization): """Return CrawlConfigOut, including state of currently running crawl, if active @@ -890,10 +911,37 @@ async def inc_crawl_count(crawl_configs, cid: uuid.UUID): ) +# ============================================================================ +async def set_config_current_crawl_info( + crawl_configs, cid: uuid.UUID, crawl_id: str, crawl_start: datetime +): + """Set current crawl info in config when crawl begins""" + result = await crawl_configs.find_one_and_update( + {"_id": cid, "inactive": {"$ne": True}}, + { + "$set": { + "lastCrawlId": crawl_id, + "lastCrawlStartTime": crawl_start, + "lastCrawlTime": None, + "lastRun": crawl_start, + "isCrawlRunning": True, + } + }, + return_document=pymongo.ReturnDocument.AFTER, + ) + if result: + return True + return False + + # ============================================================================ # pylint: disable=too-many-locals async def update_config_crawl_stats(crawl_configs, crawls, cid: uuid.UUID): - """re-calculate and update crawl statistics for config""" + """Re-calculate and update crawl statistics for config. + + Should only be called when a crawl completes from operator or on migration + when no crawls are running. + """ update_query = { "crawlCount": 0, "totalSize": 0, @@ -903,6 +951,8 @@ async def update_config_crawl_stats(crawl_configs, crawls, cid: uuid.UUID): "lastCrawlTime": None, "lastCrawlState": None, "lastCrawlSize": None, + "lastCrawlStopping": False, + "isCrawlRunning": False, } match_query = {"cid": cid, "finished": {"$ne": None}} @@ -912,15 +962,21 @@ async def update_config_crawl_stats(crawl_configs, crawls, cid: uuid.UUID): update_query["crawlCount"] = len(results) last_crawl = results[0] + + last_crawl_finished = last_crawl.get("finished") + update_query["lastCrawlId"] = str(last_crawl.get("_id")) update_query["lastCrawlStartTime"] = last_crawl.get("started") update_query["lastStartedBy"] = last_crawl.get("userid") - update_query["lastCrawlTime"] = last_crawl.get("finished") + update_query["lastCrawlTime"] = last_crawl_finished update_query["lastCrawlState"] = last_crawl.get("state") update_query["lastCrawlSize"] = sum( file_.get("size", 0) for file_ in last_crawl.get("files", []) ) + if last_crawl_finished: + update_query["lastRun"] = last_crawl_finished + total_size = 0 for res in results: files = res.get("files", []) diff --git a/backend/btrixcloud/crawls.py b/backend/btrixcloud/crawls.py index 362358b5..ee1ce874 100644 --- a/backend/btrixcloud/crawls.py +++ b/backend/btrixcloud/crawls.py @@ -18,7 +18,13 @@ from pydantic import BaseModel, UUID4, conint, HttpUrl from redis import asyncio as aioredis, exceptions import pymongo -from .crawlconfigs import Seed, CrawlConfigCore, CrawlConfig, UpdateCrawlConfig +from .crawlconfigs import ( + Seed, + CrawlConfigCore, + CrawlConfig, + UpdateCrawlConfig, + set_config_current_crawl_info, +) from .db import BaseMongoModel from .orgs import Organization, MAX_CRAWL_SCALE from .pagination import DEFAULT_PAGE_SIZE, paginated_format @@ -536,7 +542,13 @@ class CrawlOps: async def add_new_crawl(self, crawl_id: str, crawlconfig: CrawlConfig, user: User): """initialize new crawl""" - return await add_new_crawl(self.crawls, crawl_id, crawlconfig, user.id) + new_crawl = await add_new_crawl(self.crawls, crawl_id, crawlconfig, user.id) + return await set_config_current_crawl_info( + self.crawl_configs.crawl_configs, + crawlconfig.id, + new_crawl["id"], + new_crawl["started"], + ) async def update_crawl(self, crawl_id: str, org: Organization, update: UpdateCrawl): """Update existing crawl (tags and notes only for now)""" @@ -835,6 +847,8 @@ async def add_new_crawl( crawls, crawl_id: str, crawlconfig: CrawlConfig, userid: UUID4, manual=True ): """initialize new crawl""" + started = ts_now() + crawl = Crawl( id=crawl_id, state="starting", @@ -849,13 +863,13 @@ async def add_new_crawl( schedule=crawlconfig.schedule, crawlTimeout=crawlconfig.crawlTimeout, manual=manual, - started=ts_now(), + started=started, tags=crawlconfig.tags, ) try: - await crawls.insert_one(crawl.to_dict()) - return True + result = await crawls.insert_one(crawl.to_dict()) + return {"id": str(result.inserted_id), "started": started} except pymongo.errors.DuplicateKeyError: # print(f"Crawl Already Added: {crawl.id} - {crawl.state}") return False diff --git a/backend/btrixcloud/main_scheduled_job.py b/backend/btrixcloud/main_scheduled_job.py index f4cb04bf..8cb88e6b 100644 --- a/backend/btrixcloud/main_scheduled_job.py +++ b/backend/btrixcloud/main_scheduled_job.py @@ -6,7 +6,11 @@ import uuid from .k8sapi import K8sAPI from .db import init_db -from .crawlconfigs import get_crawl_config, inc_crawl_count +from .crawlconfigs import ( + get_crawl_config, + inc_crawl_count, + set_config_current_crawl_info, +) from .crawls import add_new_crawl from .utils import register_exit_handler @@ -43,9 +47,16 @@ class ScheduledJob(K8sAPI): # db create await inc_crawl_count(self.crawlconfigs, crawlconfig.id) - await add_new_crawl( + new_crawl = await add_new_crawl( self.crawls, crawl_id, crawlconfig, uuid.UUID(userid), manual=False ) + # pylint: disable=duplicate-code + await set_config_current_crawl_info( + self.crawlconfigs.crawl_configs, + crawlconfig.id, + new_crawl["id"], + new_crawl["started"], + ) print("Crawl Created: " + crawl_id) diff --git a/backend/test/test_crawlconfigs.py b/backend/test/test_crawlconfigs.py index d0dcbe09..ba4ec868 100644 --- a/backend/test/test_crawlconfigs.py +++ b/backend/test/test_crawlconfigs.py @@ -232,6 +232,7 @@ def test_workflow_total_size_and_last_crawl_stats( assert workflow["lastStartedByName"] assert workflow["lastCrawlTime"] assert workflow["lastCrawlState"] + assert workflow["lastRun"] assert workflow["lastCrawlSize"] > 0 if last_crawl_id == admin_crawl_id: @@ -254,4 +255,5 @@ def test_workflow_total_size_and_last_crawl_stats( assert data["lastStartedByName"] assert data["lastCrawlTime"] assert data["lastCrawlState"] + assert data["lastRun"] assert data["lastCrawlSize"] > 0 diff --git a/backend/test/test_filter_sort_results.py b/backend/test/test_filter_sort_results.py index 49b2bdf2..d30585c0 100644 --- a/backend/test/test_filter_sort_results.py +++ b/backend/test/test_filter_sort_results.py @@ -433,11 +433,12 @@ def test_sort_crawl_configs( last_crawl_time = None for config in items: - if not config.get("lastCrawlTime"): + config_last_time = config.get("lastCrawlTime") + if not config_last_time: continue - if last_crawl_time: - assert config["lastCrawlTime"] <= last_crawl_time - last_crawl_time = config["lastCrawlTime"] + elif last_crawl_time and config_last_time: + assert config_last_time <= last_crawl_time + last_crawl_time = config_last_time # Sort by lastCrawlTime, ascending r = requests.get( @@ -449,11 +450,80 @@ def test_sort_crawl_configs( last_crawl_time = None for config in items: - if not config.get("lastCrawlTime"): + config_last_time = config.get("lastCrawlTime") + if not config_last_time: continue - if last_crawl_time: - assert config["lastCrawlTime"] >= last_crawl_time - last_crawl_time = config["lastCrawlTime"] + elif last_crawl_time and config_last_time: + assert config_last_time >= last_crawl_time + last_crawl_time = config_last_time + + # Sort by lastCrawlStartTime + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?sortBy=lastCrawlStartTime", + headers=crawler_auth_headers, + ) + data = r.json() + items = data["items"] + + last_crawl_time = None + for config in items: + config_last_time = config.get("lastCrawlStartTime") + if not config_last_time: + continue + elif last_crawl_time and config_last_time: + assert config_last_time <= last_crawl_time + last_crawl_time = config_last_time + + # Sort by lastCrawlStartTime, ascending + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?sortBy=lastCrawlStartTime&sortDirection=1", + headers=crawler_auth_headers, + ) + data = r.json() + items = data["items"] + + last_crawl_time = None + for config in items: + config_last_time = config.get("lastCrawlStartTime") + if not config_last_time: + continue + elif last_crawl_time and config_last_time: + assert config_last_time >= last_crawl_time + last_crawl_time = config_last_time + + # Sort by lastRun + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?sortBy=lastRun", + headers=crawler_auth_headers, + ) + data = r.json() + items = data["items"] + + last_updated_time = None + for config in items: + config_last_updated = config.get("lastRun") + if not config_last_updated: + continue + elif last_updated_time and config_last_updated: + assert config_last_updated <= last_updated_time + last_updated_time = config_last_updated + + # Sort by lastRun, ascending + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?sortBy=lastRun&sortDirection=1", + headers=crawler_auth_headers, + ) + data = r.json() + items = data["items"] + + last_updated_time = None + for config in items: + config_last_updated = config.get("lastRun") + if not config_last_updated: + continue + elif last_updated_time and config_last_updated: + assert config_last_updated >= last_updated_time + last_updated_time = config_last_updated # Invalid sort value r = requests.get( diff --git a/frontend/src/components/workflow-list.ts b/frontend/src/components/workflow-list.ts index bfae3365..4259ddfa 100644 --- a/frontend/src/components/workflow-list.ts +++ b/frontend/src/components/workflow-list.ts @@ -255,7 +255,7 @@ export class WorkflowListItem extends LitElement { role="button" href=${`/orgs/${this.workflow?.oid}/workflows/crawl/${ this.workflow?.id - }#${this.workflow?.currCrawlId ? "watch" : "artifacts"}`} + }#${this.workflow?.isCrawlRunning ? "watch" : "artifacts"}`} @click=${async (e: MouseEvent) => { e.preventDefault(); await this.updateComplete; @@ -294,20 +294,19 @@ export class WorkflowListItem extends LitElement { (workflow) => html` ` )}
${this.safeRender((workflow) => { - if (workflow.currCrawlStartTime) { + if (workflow.lastCrawlStartTime) { const diff = new Date().valueOf() - - new Date(`${workflow.currCrawlStartTime}Z`).valueOf(); + new Date(`${workflow.lastCrawlStartTime}Z`).valueOf(); if (diff < 1000) { return ""; } @@ -333,7 +332,7 @@ export class WorkflowListItem extends LitElement {
${this.safeRender((workflow) => { - if (workflow.totalSize && workflow.currCrawlSize) { + if (workflow.isCrawlRunning && workflow.totalSize && workflow.lastCrawlSize) { return html` + `; } - if (workflow.currCrawlSize) { + if (workflow.totalSize && workflow.lastCrawlSize) { + return html``; + } + if (workflow.isCrawlRunning && workflow.lastCrawlSize) { return html` `; diff --git a/frontend/src/pages/org/workflow-detail.ts b/frontend/src/pages/org/workflow-detail.ts index 4eb9c8b4..991372f9 100644 --- a/frontend/src/pages/org/workflow-detail.ts +++ b/frontend/src/pages/org/workflow-detail.ts @@ -63,13 +63,13 @@ export class WorkflowDetail extends LiteElement { private crawls?: APIPaginatedList; // Only inactive crawls @state() - private currentCrawlId: Workflow["currCrawlId"] = null; + private lastCrawlId: Workflow["lastCrawlId"] = null; @state() - private currentCrawlStartTime: Workflow["currCrawlStartTime"] = null; + private lastCrawlStartTime: Workflow["lastCrawlStartTime"] = null; @state() - private currentCrawlStats?: Crawl["stats"]; + private lastCrawlStats?: Crawl["stats"]; @state() private activePanel: Tab = SECTIONS[0]; @@ -150,8 +150,8 @@ export class WorkflowDetail extends LiteElement { this.stopPoll(); } if ( - changedProperties.get("currentCrawlId") && - !this.currentCrawlId && + changedProperties.get("lastCrawlId") && + !this.lastCrawlId && this.activePanel === "watch" ) { this.handleCrawlRunEnd(); @@ -244,9 +244,9 @@ export class WorkflowDetail extends LiteElement { try { this.getWorkflowPromise = this.getWorkflow(); this.workflow = await this.getWorkflowPromise; - this.currentCrawlId = this.workflow.currCrawlId; - this.currentCrawlStartTime = this.workflow.currCrawlStartTime; - if (this.currentCrawlId) { + this.lastCrawlId = this.workflow.lastCrawlId; + this.lastCrawlStartTime = this.workflow.lastCrawlStartTime; + if (this.lastCrawlId) { this.fetchCurrentCrawlStats(); } } catch (e: any) { @@ -417,7 +417,7 @@ export class WorkflowDetail extends LiteElement { ${this.renderTab("artifacts")} - ${this.renderTab("watch", { disabled: !this.currentCrawlId })} + ${this.renderTab("watch", { disabled: !this.lastCrawlId })} ${this.renderTab("settings")} html` ${when(this.activePanel === "watch", () => - this.currentCrawlId + this.lastCrawlId ? html`
${this.renderCurrentCrawl()}
@@ -455,7 +455,7 @@ export class WorkflowDetail extends LiteElement { () => html` (${this.crawls!.total.toLocaleString()}${this.currentCrawlId + >(${this.crawls!.total.toLocaleString()}${this.workflow?.isCrawlRunning ? html` + 1` : ""}) @@ -479,7 +479,7 @@ export class WorkflowDetail extends LiteElement { return html`

${this.tabLabels[this.activePanel]}

(this.openDialogName = "scale")} > @@ -545,15 +545,15 @@ export class WorkflowDetail extends LiteElement { return html` ${when( - this.currentCrawlId, + this.workflow?.isCrawlRunning, () => html` (this.openDialogName = "stop")} - ?disabled=${!this.currentCrawlId || + ?disabled=${!this.lastCrawlId || this.isCancelingOrStoppingCrawl || - this.workflow?.currCrawlStopping} + this.workflow?.lastCrawlStopping} > ${msg("Stop")} @@ -561,7 +561,7 @@ export class WorkflowDetail extends LiteElement { (this.openDialogName = "cancel")} - ?disabled=${!this.currentCrawlId || + ?disabled=${!this.lastCrawlId || this.isCancelingOrStoppingCrawl} > ${when( - this.currentCrawlId, + this.workflow?.isCrawlRunning, // HACK shoelace doesn't current have a way to override non-hover // color without resetting the --sl-color-neutral-700 variable () => html` (this.openDialogName = "stop")} - ?disabled=${workflow.currCrawlStopping || + ?disabled=${workflow.lastCrawlStopping || this.isCancelingOrStoppingCrawl} > @@ -624,7 +624,7 @@ export class WorkflowDetail extends LiteElement { ` )} ${when( - workflow.currCrawlState === "running", + workflow.isCrawlRunning, () => html` (this.openDialogName = "scale")}> @@ -660,7 +660,7 @@ export class WorkflowDetail extends LiteElement { ${msg("Duplicate Workflow")} - ${when(!this.currentCrawlId, () => { + ${when(!this.lastCrawlId, () => { const shouldDeactivate = workflow.crawlCount && !workflow.inactive; return html` @@ -693,10 +693,9 @@ export class WorkflowDetail extends LiteElement { msg("Status"), () => html` ` )} @@ -805,7 +804,7 @@ export class WorkflowDetail extends LiteElement {
${when( - this.currentCrawlId, + this.workflow?.isCrawlRunning, () => html`
${msg( @@ -887,28 +886,28 @@ export class WorkflowDetail extends LiteElement { return html`
${this.renderDetailItem(msg("Pages Crawled"), () => - this.currentCrawlStats + this.lastCrawlStats ? msg( str`${this.numberFormatter.format( - +(this.currentCrawlStats.done || 0) + +(this.lastCrawlStats.done || 0) )} / ${this.numberFormatter.format( - +(this.currentCrawlStats.found || 0) + +(this.lastCrawlStats.found || 0) )}` ) : html`` )} ${this.renderDetailItem(msg("Run Duration"), () => - this.currentCrawlStartTime + this.lastCrawlStartTime ? RelativeDuration.humanize( new Date().valueOf() - - new Date(`${this.currentCrawlStartTime}Z`).valueOf() + new Date(`${this.lastCrawlStartTime}Z`).valueOf() ) : skeleton )} ${this.renderDetailItem(msg("Crawl Size"), () => this.workflow ? html`` : skeleton @@ -923,12 +922,12 @@ export class WorkflowDetail extends LiteElement { }; private renderWatchCrawl = () => { - if (!this.authState || !this.workflow?.currCrawlState) return ""; + if (!this.authState || !(this.workflow?.lastCrawlState)) return ""; - const isStarting = this.workflow.currCrawlState === "starting"; - const isWaiting = this.workflow.currCrawlState === "waiting"; - const isRunning = this.workflow.currCrawlState === "running"; - const isStopping = this.workflow.currCrawlStopping; + const isStarting = this.workflow.lastCrawlState === "starting"; + const isWaiting = this.workflow.lastCrawlState === "waiting"; + const isRunning = this.workflow.lastCrawlState === "running"; + const isStopping = this.workflow.lastCrawlStopping; const authToken = this.authState.headers.Authorization.split(" ")[1]; return html` @@ -942,7 +941,7 @@ export class WorkflowDetail extends LiteElement { )}

` - : isActive(this.workflow.currCrawlState) + : isActive(this.workflow.lastCrawlState) ? html` ${isStopping ? html` @@ -962,7 +961,7 @@ export class WorkflowDetail extends LiteElement {
@@ -1048,11 +1047,11 @@ export class WorkflowDetail extends LiteElement { ${when( - this.currentCrawlId, + this.lastCrawlId, () => html` ` @@ -1069,10 +1068,10 @@ export class WorkflowDetail extends LiteElement { ${this.workflow && this.isDialogVisible ? html`` : ""} @@ -1159,12 +1158,12 @@ export class WorkflowDetail extends LiteElement { } private async scale(value: Crawl["scale"]) { - if (!this.currentCrawlId) return; + if (!this.lastCrawlId) return; this.isSubmittingUpdate = true; try { const data = await this.apiFetch( - `/orgs/${this.orgId}/crawls/${this.currentCrawlId}/scale`, + `/orgs/${this.orgId}/crawls/${this.lastCrawlId}/scale`, this.authState!, { method: "POST", @@ -1233,12 +1232,12 @@ export class WorkflowDetail extends LiteElement { } private async fetchCurrentCrawlStats() { - if (!this.currentCrawlId) return; + if (!this.lastCrawlId) return; try { // TODO see if API can pass stats in GET workflow - const { stats } = await this.getCrawl(this.currentCrawlId); - this.currentCrawlStats = stats; + const { stats } = await this.getCrawl(this.lastCrawlId); + this.lastCrawlStats = stats; } catch (e) { // TODO handle error console.debug(e); @@ -1349,13 +1348,13 @@ export class WorkflowDetail extends LiteElement { } private async cancel() { - if (!this.currentCrawlId) return; + if (!this.lastCrawlId) return; this.isCancelingOrStoppingCrawl = true; try { const data = await this.apiFetch( - `/orgs/${this.orgId}/crawls/${this.currentCrawlId}/cancel`, + `/orgs/${this.orgId}/crawls/${this.lastCrawlId}/cancel`, this.authState!, { method: "POST", @@ -1378,13 +1377,13 @@ export class WorkflowDetail extends LiteElement { } private async stop() { - if (!this.currentCrawlId) return; + if (!this.lastCrawlId) return; this.isCancelingOrStoppingCrawl = true; try { const data = await this.apiFetch( - `/orgs/${this.orgId}/crawls/${this.currentCrawlId}/stop`, + `/orgs/${this.orgId}/crawls/${this.lastCrawlId}/stop`, this.authState!, { method: "POST", @@ -1415,9 +1414,9 @@ export class WorkflowDetail extends LiteElement { method: "POST", } ); - this.currentCrawlId = data.started; + this.lastCrawlId = data.started; // remove 'Z' from timestamp to match API response - this.currentCrawlStartTime = new Date().toISOString().slice(0, -1); + this.lastCrawlStartTime = new Date().toISOString().slice(0, -1); this.fetchWorkflow(); this.goToTab("watch"); diff --git a/frontend/src/pages/org/workflows-list.ts b/frontend/src/pages/org/workflows-list.ts index 4550e77a..fb6e1071 100644 --- a/frontend/src/pages/org/workflows-list.ts +++ b/frontend/src/pages/org/workflows-list.ts @@ -355,20 +355,20 @@ export class WorkflowsList extends LiteElement { private renderMenuItems(workflow: Workflow) { return html` ${when( - workflow.currCrawlId, + workflow.isCrawlRunning, // HACK shoelace doesn't current have a way to override non-hover // color without resetting the --sl-color-neutral-700 variable () => html` this.stop(workflow.currCrawlId)} - ?disabled=${workflow.currCrawlStopping} + @click=${() => this.stop(workflow.lastCrawlId)} + ?disabled=${workflow.lastCrawlStopping} > ${msg("Stop Crawl")} this.cancel(workflow.currCrawlId)} + @click=${() => this.cancel(workflow.lastCrawlId)} > ${msg("Cancel & Discard Crawl")} @@ -385,7 +385,9 @@ export class WorkflowsList extends LiteElement { ` )} ${when( - workflow.currCrawlState === "running", + workflow.isCrawlRunning, + // HACK shoelace doesn't current have a way to override non-hover + // color without resetting the --sl-color-neutral-700 variable () => html` ${msg("Duplicate Workflow")} - ${when(!workflow.currCrawlId, () => { + ${when(workflow.isCrawlRunning, () => { const shouldDeactivate = workflow.crawlCount && !workflow.inactive; return html` @@ -482,7 +484,6 @@ export class WorkflowsList extends LiteElement { return new Date( Math.max( ...[ - workflow.currCrawlStartTime, workflow.lastCrawlTime, workflow.lastCrawlStartTime, workflow.modified, @@ -597,7 +598,7 @@ export class WorkflowsList extends LiteElement { } } - private async cancel(crawlId: Workflow["currCrawlId"]) { + private async cancel(crawlId: Workflow["lastCrawlId"]) { if (!crawlId) return; if (window.confirm(msg("Are you sure you want to cancel the crawl?"))) { const data = await this.apiFetch( @@ -619,7 +620,7 @@ export class WorkflowsList extends LiteElement { } } - private async stop(crawlId: Workflow["currCrawlId"]) { + private async stop(crawlId: Workflow["lastCrawlId"]) { if (!crawlId) return; if (window.confirm(msg("Are you sure you want to stop the crawl?"))) { const data = await this.apiFetch( diff --git a/frontend/src/types/crawler.ts b/frontend/src/types/crawler.ts index 9024d36e..879a3ddf 100644 --- a/frontend/src/types/crawler.ts +++ b/frontend/src/types/crawler.ts @@ -61,20 +61,17 @@ export type Workflow = CrawlConfig & { modified: string; // Date string crawlCount: number; crawlAttemptCount: number; - lastCrawlId: string; // last finished crawl - lastCrawlStartTime: string; - lastCrawlTime: string; // when last crawl finished + lastCrawlId: string | null; // last finished or current crawl + lastCrawlStartTime: string | null; + lastCrawlTime: string | null; // when last crawl finished lastCrawlState: CrawlState; lastCrawlSize: number | null; lastStartedByName: string | null; - currCrawlId: string | null; - currCrawlState: CrawlState | null; - currCrawlStartTime: string | null; - currCrawlSize: number | null; - currCrawlStopping: boolean | null; + lastCrawlStopping: boolean | null; totalSize: string | null; inactive: boolean; firstSeed: string; + isCrawlRunning: boolean | null; }; export type Profile = {