1.6.3 Fixes - Fix workflow sort order for Latest Crawl + 'Remove From Collection' action menu on archived items in collections (#1113)
* fix latest crawl (lastRun) sort: - don't cast 'started' value to string when setting as starting crawl time (regression from #937) - caused incorrect sorting as finished crawl time was a datetime, while starting crawl time was a string - move updated config crawl info in one place, simplify to avoid returning started time altogether, just set directly - pass mdb crawlconfigs and crawls collections directly to add_new_crawl() function - fixes #1108 * Add dropdown menu containing 'Remove from Collection' to archived items in collection view (#1110) - Enables users to remove an item from a collection from the collection detail view - menu was previously missing - Fixes: #1102 (missing dropdown menu) by making use of the inactive menu trigger button. - Updates collection items page size to match "Archived Items" page size (20 items per page) --------- Co-authored-by: sua yoo <sua@webrecorder.org>
This commit is contained in:
parent
8b16124675
commit
2da6c1c905
@ -236,12 +236,12 @@ class CrawlOps(BaseCrawlOps):
|
||||
|
||||
async def add_new_crawl(self, crawl_id: str, crawlconfig: CrawlConfig, user: User):
|
||||
"""initialize new crawl"""
|
||||
new_crawl = await add_new_crawl(self.crawls, crawl_id, crawlconfig, user.id)
|
||||
return await set_config_current_crawl_info(
|
||||
return await add_new_crawl(
|
||||
self.crawls,
|
||||
self.crawl_configs.crawl_configs,
|
||||
crawlconfig.id,
|
||||
new_crawl["id"],
|
||||
new_crawl["started"],
|
||||
crawl_id,
|
||||
crawlconfig,
|
||||
user.id,
|
||||
)
|
||||
|
||||
async def update_crawl_scale(
|
||||
@ -513,8 +513,14 @@ class CrawlOps(BaseCrawlOps):
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# pylint: disable=too-many-arguments
|
||||
async def add_new_crawl(
|
||||
crawls, crawl_id: str, crawlconfig: CrawlConfig, userid: UUID4, manual=True
|
||||
crawls,
|
||||
crawlconfigs,
|
||||
crawl_id: str,
|
||||
crawlconfig: CrawlConfig,
|
||||
userid: UUID4,
|
||||
manual=True,
|
||||
):
|
||||
"""initialize new crawl"""
|
||||
started = dt_now()
|
||||
@ -540,7 +546,14 @@ async def add_new_crawl(
|
||||
|
||||
try:
|
||||
result = await crawls.insert_one(crawl.to_dict())
|
||||
return {"id": str(result.inserted_id), "started": str(started)}
|
||||
|
||||
return await set_config_current_crawl_info(
|
||||
crawlconfigs,
|
||||
crawlconfig.id,
|
||||
result.inserted_id,
|
||||
started,
|
||||
)
|
||||
|
||||
except pymongo.errors.DuplicateKeyError:
|
||||
# print(f"Crawl Already Added: {crawl.id} - {crawl.state}")
|
||||
return False
|
||||
|
@ -9,7 +9,6 @@ from .db import init_db
|
||||
from .crawlconfigs import (
|
||||
get_crawl_config,
|
||||
inc_crawl_count,
|
||||
set_config_current_crawl_info,
|
||||
)
|
||||
from .crawls import add_new_crawl
|
||||
from .utils import register_exit_handler
|
||||
@ -55,17 +54,14 @@ class ScheduledJob(K8sAPI):
|
||||
|
||||
# db create
|
||||
await inc_crawl_count(self.crawlconfigs, crawlconfig.id)
|
||||
new_crawl = await add_new_crawl(
|
||||
self.crawls, crawl_id, crawlconfig, uuid.UUID(userid), manual=False
|
||||
await add_new_crawl(
|
||||
self.crawls,
|
||||
self.crawlconfigs,
|
||||
crawl_id,
|
||||
crawlconfig,
|
||||
uuid.UUID(userid),
|
||||
manual=False,
|
||||
)
|
||||
# pylint: disable=duplicate-code
|
||||
await set_config_current_crawl_info(
|
||||
self.crawlconfigs.crawl_configs,
|
||||
crawlconfig.id,
|
||||
new_crawl["id"],
|
||||
new_crawl["started"],
|
||||
)
|
||||
|
||||
print("Crawl Created: " + crawl_id)
|
||||
|
||||
|
||||
|
@ -20,6 +20,7 @@ import type { PageChangeEvent } from "../../components/pagination";
|
||||
|
||||
const ABORT_REASON_THROTTLE = "throttled";
|
||||
const DESCRIPTION_MAX_HEIGHT_PX = 200;
|
||||
const INITIAL_ITEMS_PAGE_SIZE = 20;
|
||||
const TABS = ["replay", "items"] as const;
|
||||
export type Tab = (typeof TABS)[number];
|
||||
|
||||
@ -80,7 +81,7 @@ export class CollectionDetail extends LiteElement {
|
||||
this.fetchCollection();
|
||||
}
|
||||
if (changedProperties.has("collectionId")) {
|
||||
this.fetchArchivedItems();
|
||||
this.fetchArchivedItems({ page: 1 });
|
||||
}
|
||||
}
|
||||
|
||||
@ -597,10 +598,18 @@ export class CollectionDetail extends LiteElement {
|
||||
`;
|
||||
}
|
||||
|
||||
private renderArchivedItem = (wc: Crawl | Upload) =>
|
||||
private renderArchivedItem = (wc: Crawl | Upload, idx: number) =>
|
||||
html`
|
||||
<btrix-crawl-list-item .crawl=${wc}>
|
||||
<div slot="menuTrigger" role="none"></div>
|
||||
<sl-menu slot="menu">
|
||||
<sl-menu-item
|
||||
style="--sl-color-neutral-700: var(--warning)"
|
||||
@click=${() => this.removeArchivedItem(wc.id, idx)}
|
||||
>
|
||||
<sl-icon name="folder-minus" slot="prefix"></sl-icon>
|
||||
${msg("Remove from Collection")}
|
||||
</sl-menu-item>
|
||||
</sl-menu>
|
||||
</btrix-crawl-list-item>
|
||||
`;
|
||||
|
||||
@ -731,7 +740,7 @@ export class CollectionDetail extends LiteElement {
|
||||
private async fetchArchivedItems(params?: APIPaginationQuery): Promise<void> {
|
||||
this.cancelInProgressGetArchivedItems();
|
||||
try {
|
||||
this.archivedItems = await this.getArchivedItems();
|
||||
this.archivedItems = await this.getArchivedItems(params);
|
||||
} catch (e: any) {
|
||||
if (e === ABORT_REASON_THROTTLE) {
|
||||
console.debug("Fetch web captures aborted to throttle");
|
||||
@ -759,9 +768,19 @@ export class CollectionDetail extends LiteElement {
|
||||
APIPaginationQuery &
|
||||
APISortQuery
|
||||
): Promise<APIPaginatedList> {
|
||||
const query = queryString.stringify(params || {}, {
|
||||
const query = queryString.stringify(
|
||||
{
|
||||
...params,
|
||||
page: params?.page || this.archivedItems?.page || 1,
|
||||
pageSize:
|
||||
params?.pageSize ||
|
||||
this.archivedItems?.pageSize ||
|
||||
INITIAL_ITEMS_PAGE_SIZE,
|
||||
},
|
||||
{
|
||||
arrayFormat: "comma",
|
||||
});
|
||||
}
|
||||
);
|
||||
const data: APIPaginatedList = await this.apiFetch(
|
||||
`/orgs/${this.orgId}/all-crawls?collectionId=${this.collectionId}&${query}`,
|
||||
this.authState!
|
||||
@ -769,5 +788,46 @@ export class CollectionDetail extends LiteElement {
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
private async removeArchivedItem(id: string, pageIndex: number) {
|
||||
try {
|
||||
const data: Crawl | Upload = await this.apiFetch(
|
||||
`/orgs/${this.orgId}/collections/${this.collectionId}/remove`,
|
||||
this.authState!,
|
||||
{
|
||||
method: "POST",
|
||||
body: JSON.stringify({ crawlIds: [id] }),
|
||||
}
|
||||
);
|
||||
|
||||
const { page, items, total } = this.archivedItems!;
|
||||
// Update state for immediate feedback while retrieving list
|
||||
this.archivedItems = {
|
||||
...this.archivedItems!,
|
||||
total: total - 1,
|
||||
items: [...items.slice(0, pageIndex), ...items.slice(pageIndex + 1)],
|
||||
};
|
||||
|
||||
this.notify({
|
||||
message: msg(str`Successfully removed item from Collection.`),
|
||||
variant: "success",
|
||||
icon: "check2-circle",
|
||||
});
|
||||
this.fetchCollection();
|
||||
this.fetchArchivedItems({
|
||||
// Update page if last item
|
||||
page: items.length === 1 && page > 1 ? page - 1 : page,
|
||||
});
|
||||
} catch (e: any) {
|
||||
console.debug(e?.message);
|
||||
this.notify({
|
||||
message: msg(
|
||||
"Sorry, couldn't remove item from Collection at this time."
|
||||
),
|
||||
variant: "danger",
|
||||
icon: "exclamation-octagon",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
customElements.define("btrix-collection-detail", CollectionDetail);
|
||||
|
Loading…
Reference in New Issue
Block a user