restrict qa runs to a 'min_qa_crawler_image' if set in the chart (#1685)
- fixes #1684 - can be used to optionally restrict QA to only some crawls (eg. with browsertrix-crawler>=1.0.0) - enforce error on backend (return 400) and handle special error on the frontend
This commit is contained in:
parent
c800da1732
commit
835014d829
@ -3,6 +3,7 @@
|
||||
# pylint: disable=too-many-lines
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import contextlib
|
||||
import urllib.parse
|
||||
@ -64,6 +65,8 @@ class CrawlOps(BaseCrawlOps):
|
||||
self.colls.set_crawl_ops(self)
|
||||
self.event_webhook_ops.set_crawl_ops(self)
|
||||
|
||||
self.min_qa_crawler_image = os.environ.get("MIN_QA_CRAWLER_IMAGE")
|
||||
|
||||
async def init_index(self):
|
||||
"""init index for crawls db collection"""
|
||||
await self.crawls.create_index([("type", pymongo.HASHED)])
|
||||
@ -734,6 +737,14 @@ class CrawlOps(BaseCrawlOps):
|
||||
if crawl.state not in SUCCESSFUL_STATES:
|
||||
raise HTTPException(status_code=400, detail="crawl_did_not_succeed")
|
||||
|
||||
# if set, can only QA if crawl image is >= min_qa_crawler_image
|
||||
if (
|
||||
self.min_qa_crawler_image
|
||||
and crawl.image
|
||||
and crawl.image < self.min_qa_crawler_image
|
||||
):
|
||||
raise HTTPException(status_code=400, detail="qa_not_supported_for_crawl")
|
||||
|
||||
# can only run one QA at a time
|
||||
if crawl.qa:
|
||||
raise HTTPException(status_code=400, detail="qa_already_running")
|
||||
|
@ -52,6 +52,8 @@ data:
|
||||
|
||||
CRAWLER_CHANNELS_JSON: "/ops-configs/crawler_channels.json"
|
||||
|
||||
MIN_QA_CRAWLER_IMAGE: "{{ .Values.min_qa_crawler_image }}"
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
|
@ -209,6 +209,9 @@ crawler_pull_policy: "Always"
|
||||
|
||||
crawler_namespace: "crawlers"
|
||||
|
||||
# if set, will restrict QA to image names that are >= than this value
|
||||
# min_qa_crawler_image: ""
|
||||
|
||||
# optional: enable to use a persist volume claim for all crawls
|
||||
# can be enabled to use a multi-write shared filesystem
|
||||
# crawler_pv_claim: "nfs-shared-crawls"
|
||||
|
@ -1217,10 +1217,16 @@ ${this.crawl?.description}
|
||||
icon: "check2-circle",
|
||||
});
|
||||
} catch (e: unknown) {
|
||||
let message = msg("Sorry, couldn't start QA run at this time.");
|
||||
if (e instanceof Error && e.message === "qa_not_supported_for_crawl") {
|
||||
message = msg(
|
||||
"Sorry, QA analysis is not supported for this crawl as it was run with an older crawler version. Please run a new crawl with the latest crawler and QA should be available.",
|
||||
);
|
||||
}
|
||||
console.debug(e);
|
||||
|
||||
this.notify.toast({
|
||||
message: msg("Sorry, couldn't start QA run at this time."),
|
||||
message,
|
||||
variant: "danger",
|
||||
icon: "exclamation-octagon",
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user