From 2372f43c2cd3a1ea8d641ef535a6161d63502f52 Mon Sep 17 00:00:00 2001 From: Ilya Kreymer Date: Mon, 10 Jul 2023 19:29:19 +0200 Subject: [PATCH] frontend: fix to collection editor with crawls and uploads (#971) * frontend: - follow up to #969, fixes crawl workflows by using crawl-specific endpoint and merging results * get crawls and uploads concurrently --------- Co-authored-by: sua yoo --- frontend/src/pages/org/collection-editor.ts | 48 +++++++++------------ 1 file changed, 21 insertions(+), 27 deletions(-) diff --git a/frontend/src/pages/org/collection-editor.ts b/frontend/src/pages/org/collection-editor.ts index 154c4f5e..0d98e1a8 100644 --- a/frontend/src/pages/org/collection-editor.ts +++ b/frontend/src/pages/org/collection-editor.ts @@ -1332,26 +1332,6 @@ export class CollectionEditor extends LiteElement { } } - private async getCrawlsAndUploads( - params: Partial<{ - collectionId?: string; - state: CrawlState[]; - }> & - APIPaginationQuery & - APISortQuery - ): Promise { - const query = queryString.stringify({ - state: "complete", - ...params, - }); - const data: APIPaginatedList = await this.apiFetch( - `/orgs/${this.orgId}/all-crawls?${query}`, - this.authState! - ); - - return data; - } - private async getUploads( params: Partial<{ collectionId?: string; @@ -1376,14 +1356,27 @@ export class CollectionEditor extends LiteElement { if (!this.collectionId) return; try { - const { items: crawls } = await this.getCrawlsAndUploads({ - collectionId: this.collectionId, - sortBy: "finished", - pageSize: WORKFLOW_CRAWL_LIMIT, - }); + const [crawlsRes, uploadsRes] = await Promise.allSettled([ + this.getCrawls({ + collectionId: this.collectionId, + sortBy: "finished", + pageSize: WORKFLOW_CRAWL_LIMIT, + }), + this.getUploads({ + collectionId: this.collectionId, + sortBy: "finished", + pageSize: WORKFLOW_CRAWL_LIMIT, + }), + ]); + const crawls = + crawlsRes.status === "fulfilled" ? crawlsRes.value.items : []; + const uploads = + uploadsRes.status === "fulfilled" ? uploadsRes.value.items : []; + const crawlsAndUploads = [...crawls, ...uploads]; + this.selectedCrawls = mergeDeep( this.selectedCrawls, - crawls.reduce( + crawlsAndUploads.reduce( (acc, crawl) => ({ ...acc, [crawl.id]: crawl, @@ -1391,8 +1384,9 @@ export class CollectionEditor extends LiteElement { {} ) ); + // TODO remove omit once API removes errors - this.collectionCrawls = crawls.map(omit("errors")) as Crawl[]; + this.collectionCrawls = crawlsAndUploads.map(omit("errors")) as Crawl[]; // Store crawl IDs to compare later this.savedCollectionCrawlIds = this.collectionCrawls.map(({ id }) => id); } catch {