diff --git a/backend/btrixcloud/crawls.py b/backend/btrixcloud/crawls.py index 31ec001e..cc3ccc8d 100644 --- a/backend/btrixcloud/crawls.py +++ b/backend/btrixcloud/crawls.py @@ -520,7 +520,7 @@ class CrawlOps(BaseCrawlOps): total = await redis.llen(f"{crawl_id}:e") except exceptions.ConnectionError: # pylint: disable=raise-missing-from - raise HTTPException(status_code=503, detail="redis_connection_error") + raise HTTPException(status_code=503, detail="error_logs_not_available") parsed_errors = parse_jsonl_error_messages(errors) return parsed_errors, total diff --git a/frontend/src/components/crawl-logs.ts b/frontend/src/components/crawl-logs.ts index 81e20a29..c4a32a5f 100644 --- a/frontend/src/components/crawl-logs.ts +++ b/frontend/src/components/crawl-logs.ts @@ -84,6 +84,9 @@ export class CrawlLogs extends LitElement { @property({ type: Object }) logs?: APIPaginatedList; + @property({ type: Boolean }) + paginate = false; + @state() private selectedLog: | (CrawlLog & { @@ -145,14 +148,17 @@ export class CrawlLogs extends LitElement { `; })} - + ${this.paginate + ? html`` + : ""} + { await this.fetchCrawlLogs({ page: e.detail.page, @@ -917,7 +918,7 @@ ${this.crawl?.description} return; } try { - this.logs = await this.getCrawlLogs(params); + this.logs = await this.getCrawlErrors(params); } catch { this.notify({ message: msg("Sorry, couldn't retrieve crawl logs at this time."), @@ -927,7 +928,7 @@ ${this.crawl?.description} } } - private async getCrawlLogs( + private async getCrawlErrors( params: Partial ): Promise { const page = params.page || this.logs?.page || 1; diff --git a/frontend/src/pages/org/workflow-detail.ts b/frontend/src/pages/org/workflow-detail.ts index c8005607..1a7dc9bf 100644 --- a/frontend/src/pages/org/workflow-detail.ts +++ b/frontend/src/pages/org/workflow-detail.ts @@ -23,12 +23,14 @@ import { humanizeSchedule, humanizeNextDate } from "../../utils/cron"; import { APIPaginatedList } from "../../types/api"; import { inactiveCrawlStates, isActive } from "../../utils/crawler"; import { SlSelect } from "@shoelace-style/shoelace"; +import type { PageChangeEvent } from "../../components/pagination"; -const SECTIONS = ["crawls", "watch", "settings"] as const; +const SECTIONS = ["crawls", "watch", "settings", "logs"] as const; type Tab = (typeof SECTIONS)[number]; const DEFAULT_SECTION: Tab = "crawls"; const POLL_INTERVAL_SECONDS = 10; const ABORT_REASON_CANCLED = "canceled"; +const LOGS_PAGE_SIZE = 50; /** * Usage: @@ -73,6 +75,9 @@ export class WorkflowDetail extends LiteElement { @state() private crawls?: APIPaginatedList; // Only inactive crawls + @state() + private logs?: APIPaginatedList; + @state() private lastCrawlId: Workflow["lastCrawlId"] = null; @@ -126,6 +131,7 @@ export class WorkflowDetail extends LiteElement { private readonly tabLabels: Record = { crawls: msg("Crawls"), watch: msg("Watch Crawl"), + logs: msg("Error Logs"), settings: msg("Workflow Settings"), }; @@ -167,13 +173,6 @@ export class WorkflowDetail extends LiteElement { if (changedProperties.has("isEditing") && this.isEditing) { this.stopPoll(); } - if ( - changedProperties.get("lastCrawlId") && - !this.lastCrawlId && - this.activePanel === "watch" - ) { - this.handleCrawlRunEnd(); - } if ( !this.isEditing && changedProperties.has("activePanel") && @@ -214,58 +213,25 @@ export class WorkflowDetail extends LiteElement { this.activePanel = tab; } - private async handleCrawlRunEnd() { - this.goToTab("crawls", { replace: true }); - await this.fetchWorkflow(); - - let notifyOpts = { - message: msg("Crawl finished."), - variant: "info", - icon: "info-circle", - } as any; - // TODO consolidate with `CrawlStatus.getContent` - switch (this.workflow!.lastCrawlState) { - case "complete": - notifyOpts = { - message: msg("Crawl complete."), - variant: "success", - icon: "check-circle", - }; - break; - case "canceled": - notifyOpts = { - message: msg("Crawl canceled."), - variant: "danger", - icon: "x-octagon", - }; - break; - case "failed": - notifyOpts = { - message: msg("Crawl failed."), - variant: "danger", - icon: "exclamation-triangle", - }; - break; - default: - break; - } - this.notify({ - ...notifyOpts, - duration: 8000, - }); - } - private async fetchWorkflow() { this.stopPoll(); this.isLoading = true; try { + const prevLastCrawlId = this.lastCrawlId; this.getWorkflowPromise = this.getWorkflow(); this.workflow = await this.getWorkflowPromise; this.lastCrawlId = this.workflow.lastCrawlId; this.lastCrawlStartTime = this.workflow.lastCrawlStartTime; + if (this.lastCrawlId) { - this.fetchCurrentCrawlStats(); + if (this.workflow.isCrawlRunning) { + this.fetchCurrentCrawlStats(); + this.fetchCrawlLogs(); + } else if (this.lastCrawlId !== prevLastCrawlId) { + this.logs = undefined; + this.fetchCrawlLogs(); + } } // TODO: Check if storage quota has been exceeded here by running // crawl?? @@ -428,9 +394,8 @@ export class WorkflowDetail extends LiteElement { - ${this.renderTab("crawls")} - ${this.renderTab("watch", { disabled: !this.lastCrawlId })} - ${this.renderTab("settings")} + ${this.renderTab("crawls")} ${this.renderTab("watch")} + ${this.renderTab("logs")} ${this.renderTab("settings")} ${this.renderCrawls()} @@ -449,6 +414,7 @@ export class WorkflowDetail extends LiteElement { ) )} + ${this.renderLogs()} ${this.renderSettings()} @@ -497,6 +463,31 @@ export class WorkflowDetail extends LiteElement { ${msg("Edit Crawler Instances")} `; } + if (this.activePanel === "logs") { + const authToken = this.authState!.headers.Authorization.split(" ")[1]; + const isDownloadEnabled = Boolean( + this.logs?.total && + this.workflow?.lastCrawlId && + !this.workflow.isCrawlRunning + ); + return html`

${this.tabLabels[this.activePanel]}

+ + + + ${msg("Download Logs")} + + `; + } return html`

${this.tabLabels[this.activePanel]}

`; } @@ -933,7 +924,7 @@ export class WorkflowDetail extends LiteElement { case "waiting_capacity": waitingMsg = msg( - "Crawl waiting for available resources before it can start..." + "Crawl waiting for available resources before it can continue..." ); break; @@ -980,6 +971,7 @@ export class WorkflowDetail extends LiteElement { > +
${this.renderCrawlErrors()}
${this.renderExclusions()}
+ ${when( + this.workflow?.isCrawlRunning, + () => html`
+ + ${msg( + html`Viewing error logs for currently running crawl. + Watch Crawl Progress` + )} + +
` + )} + ${when( + this.lastCrawlId, + () => + this.logs?.total + ? html` { + await this.fetchCrawlLogs({ + page: e.detail.page, + }); + // Scroll to top of list + this.scrollIntoView(); + }} + >` + : html` +
+

+ ${this.workflow?.lastCrawlState === "waiting_capacity" + ? msg("Error logs currently not available.") + : msg("No error logs found yet for latest crawl.")} +

+
+ `, + () => this.renderNoCrawlLogs() + )} + + `; + } + + private renderNoCrawlLogs() { + return html` +
+

+ ${msg("Logs will show here after you run a crawl.")} +

+
+ + this.runNow()} + > + + ${msg("Run Crawl")} + + +
+
+ `; + } + + private renderCrawlErrors() { + return html` + +

+ ${msg("Error Logs")} + ${this.logs?.total + ? this.logs?.total.toLocaleString() + : 0} +

+ + ${when( + this.logs?.total && this.logs.total > LOGS_PAGE_SIZE, + () => html` +

+ ${msg( + str`Displaying latest ${LOGS_PAGE_SIZE.toLocaleString()} errors of ${this.logs!.total.toLocaleString()}.` + )} +

+ ` + )} +
+ `; + } + private renderExclusions() { return html`
-

+

${msg("Crawl URLs")}

= {} + ): Promise { + try { + this.logs = await this.getCrawlErrors(params); + } catch (e: any) { + if (e.isApiError && e.statusCode === 503) { + // do nothing, keep logs if previously loaded + } else { + this.notify({ + message: msg( + "Sorry, couldn't retrieve crawl error logs at this time." + ), + variant: "danger", + icon: "exclamation-octagon", + }); + } + } + } + + private async getCrawlErrors( + params: Partial + ): Promise { + const page = params.page || this.logs?.page || 1; + const pageSize = params.pageSize || this.logs?.pageSize || LOGS_PAGE_SIZE; + + const data: APIPaginatedList = await this.apiFetch( + `/orgs/${this.orgId}/crawls/${ + this.workflow!.lastCrawlId + }/errors?page=${page}&pageSize=${pageSize}`, + this.authState! + ); + + return data; + } } customElements.define("btrix-workflow-detail", WorkflowDetail);