diff --git a/backend/btrixcloud/basecrawls.py b/backend/btrixcloud/basecrawls.py index 7cd2f30c..8d4983d9 100644 --- a/backend/btrixcloud/basecrawls.py +++ b/backend/btrixcloud/basecrawls.py @@ -200,9 +200,6 @@ class BaseCrawlOps: if not crawl.name: crawl.name = config.name - if not crawl.description: - crawl.description = config.description - if config.config.seeds: if add_first_seed: first_seed = config.config.seeds[0] diff --git a/backend/btrixcloud/crawls.py b/backend/btrixcloud/crawls.py index 13f19908..afeb5b5a 100644 --- a/backend/btrixcloud/crawls.py +++ b/backend/btrixcloud/crawls.py @@ -136,11 +136,6 @@ class CrawlOps(BaseCrawlOps): }, }, {"$set": {"name": {"$arrayElemAt": ["$crawlConfig.name", 0]}}}, - { - "$set": { - "description": {"$arrayElemAt": ["$crawlConfig.description", 0]} - } - }, ] if not resources: diff --git a/backend/btrixcloud/db.py b/backend/btrixcloud/db.py index be425b1b..8151445b 100644 --- a/backend/btrixcloud/db.py +++ b/backend/btrixcloud/db.py @@ -15,7 +15,7 @@ from pymongo.errors import InvalidName from .migrations import BaseMigration -CURR_DB_VERSION = "0011" +CURR_DB_VERSION = "0012" # ============================================================================ diff --git a/backend/btrixcloud/migrations/migration_0012_notes_to_description.py b/backend/btrixcloud/migrations/migration_0012_notes_to_description.py new file mode 100644 index 00000000..b6fb6d70 --- /dev/null +++ b/backend/btrixcloud/migrations/migration_0012_notes_to_description.py @@ -0,0 +1,27 @@ +""" +Migration 0012 - Notes to description +""" +from btrixcloud.migrations import BaseMigration + + +MIGRATION_VERSION = "0012" + + +class Migration(BaseMigration): + """Migration class.""" + + def __init__(self, mdb, migration_version=MIGRATION_VERSION): + super().__init__(mdb, migration_version) + + async def migrate_up(self): + """Perform migration up. + + Rename crawl notes field to description. + """ + # pylint: disable=duplicate-code + crawls = self.mdb["crawls"] + try: + await crawls.update_many({}, {"$rename": {"notes": "description"}}) + # pylint: disable=broad-exception-caught + except Exception as err: + print(f"Error renaming crawl notes to description: {err}", flush=True) diff --git a/backend/btrixcloud/models.py b/backend/btrixcloud/models.py index a5929ef6..c6f07eda 100644 --- a/backend/btrixcloud/models.py +++ b/backend/btrixcloud/models.py @@ -302,7 +302,7 @@ class BaseCrawl(BaseMongoModel): files: Optional[List[CrawlFile]] = [] - notes: Optional[str] + description: Optional[str] errors: Optional[List[str]] = [] @@ -342,8 +342,6 @@ class CrawlOut(BaseMongoModel): tags: Optional[List[str]] = [] - notes: Optional[str] - errors: Optional[List[str]] collections: Optional[List[UUID4]] = [] @@ -351,7 +349,6 @@ class CrawlOut(BaseMongoModel): # automated crawl fields cid: Optional[UUID4] name: Optional[str] - description: Optional[str] firstSeed: Optional[str] seedCount: Optional[int] profileName: Optional[str] @@ -372,7 +369,7 @@ class UpdateCrawl(BaseModel): """Update crawl""" tags: Optional[List[str]] = [] - notes: Optional[str] + description: Optional[str] # ============================================================================ diff --git a/backend/btrixcloud/uploads.py b/backend/btrixcloud/uploads.py index 090c2f47..a22e41ba 100644 --- a/backend/btrixcloud/uploads.py +++ b/backend/btrixcloud/uploads.py @@ -46,7 +46,7 @@ class UploadOps(BaseCrawlOps): stream, filename: str, name: Optional[str], - notes: Optional[str], + description: Optional[str], collections: Optional[List[UUID4]], tags: Optional[List[str]], org: Organization, @@ -96,7 +96,7 @@ class UploadOps(BaseCrawlOps): print("replace file deletion failed", exc) return await self._create_upload( - files, name, notes, collections, tags, id_, org, user + files, name, description, collections, tags, id_, org, user ) # pylint: disable=too-many-arguments, too-many-locals @@ -104,7 +104,7 @@ class UploadOps(BaseCrawlOps): self, uploads: List[UploadFile], name: Optional[str], - notes: Optional[str], + description: Optional[str], collections: Optional[List[UUID4]], tags: Optional[List[str]], org: Organization, @@ -125,11 +125,11 @@ class UploadOps(BaseCrawlOps): files.append(file_reader.file_prep.get_crawl_file()) return await self._create_upload( - files, name, notes, collections, tags, id_, org, user + files, name, description, collections, tags, id_, org, user ) async def _create_upload( - self, files, name, notes, collections, tags, id_, org, user + self, files, name, description, collections, tags, id_, org, user ): now = dt_now() # ts_now = now.strftime("%Y%m%d%H%M%S") @@ -145,7 +145,7 @@ class UploadOps(BaseCrawlOps): uploaded = UploadedCrawl( id=crawl_id, name=name or "New Upload @ " + str(now), - notes=notes, + description=description, collections=collection_uuids, tags=tags, userid=user.id, @@ -240,14 +240,14 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d async def upload_formdata( uploads: List[UploadFile] = File(...), name: Optional[str] = "", - notes: Optional[str] = "", + description: Optional[str] = "", collections: Optional[str] = "", tags: Optional[str] = "", org: Organization = Depends(org_crawl_dep), user: User = Depends(user_dep), ): name = unquote(name) - notes = unquote(notes) + description = unquote(description) colls_list = [] if collections: colls_list = unquote(collections).split(",") @@ -257,7 +257,7 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d tags_list = unquote(tags).split(",") return await ops.upload_formdata( - uploads, name, notes, colls_list, tags_list, org, user + uploads, name, description, colls_list, tags_list, org, user ) @app.put("/orgs/{oid}/uploads/stream", tags=["uploads"]) @@ -265,7 +265,7 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d request: Request, filename: str, name: Optional[str] = "", - notes: Optional[str] = "", + description: Optional[str] = "", collections: Optional[str] = "", tags: Optional[str] = "", replaceId: Optional[str] = "", @@ -273,7 +273,7 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d user: User = Depends(user_dep), ): name = unquote(name) - notes = unquote(notes) + description = unquote(description) colls_list = [] if collections: colls_list = unquote(collections).split(",") @@ -286,7 +286,7 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d request.stream(), filename, name, - notes, + description, colls_list, tags_list, org, diff --git a/backend/test/test_filter_sort_results.py b/backend/test/test_filter_sort_results.py index a92e08d7..f01b5f3a 100644 --- a/backend/test/test_filter_sort_results.py +++ b/backend/test/test_filter_sort_results.py @@ -165,14 +165,23 @@ def test_get_crawls_by_description( crawler_auth_headers, default_org_id, crawler_crawl_id ): description = "crawler test crawl" + + # Give crawl a description + r = requests.patch( + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{crawler_crawl_id}", + headers=crawler_auth_headers, + json={"description": description}, + ) + assert r.status_code == 200 + encoded_description = urllib.parse.quote(description) r = requests.get( f"{API_PREFIX}/orgs/{default_org_id}/crawls?description={encoded_description}", headers=crawler_auth_headers, ) - assert r.json()["total"] >= 1 - for crawl in r.json()["items"]: - assert crawl["description"] == description + data = r.json() + assert data["total"] == 1 + assert data["items"][0]["description"] == description def test_get_crawls_by_collection_id( diff --git a/backend/test/test_run_crawl.py b/backend/test/test_run_crawl.py index 198488e2..e4fb89f1 100644 --- a/backend/test/test_run_crawl.py +++ b/backend/test/test_run_crawl.py @@ -81,7 +81,6 @@ def test_crawl_info(admin_auth_headers, default_org_id, admin_crawl_id): data = r.json() assert data["fileSize"] == wacz_size assert data["fileCount"] == 1 - assert data["description"] == "Admin Test Crawl description" def test_crawls_include_seed_info(admin_auth_headers, default_org_id, admin_crawl_id): @@ -188,19 +187,14 @@ def test_update_crawl(admin_auth_headers, default_org_id, admin_crawl_id): assert r.status_code == 200 data = r.json() assert sorted(data["tags"]) == ["wr-test-1", "wr-test-2"] - # Add exception handling for old crawls without notes field - try: - assert not data["notes"] - except KeyError: - pass - # Submit patch request to update tags and notes + # Submit patch request to update tags and description UPDATED_TAGS = ["wr-test-1-updated", "wr-test-2-updated"] - UPDATED_NOTES = "Lorem ipsum test note." + UPDATED_DESC = "Lorem ipsum test note." r = requests.patch( f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", headers=admin_auth_headers, - json={"tags": UPDATED_TAGS, "notes": UPDATED_NOTES}, + json={"tags": UPDATED_TAGS, "description": UPDATED_DESC}, ) assert r.status_code == 200 data = r.json() @@ -214,13 +208,13 @@ def test_update_crawl(admin_auth_headers, default_org_id, admin_crawl_id): assert r.status_code == 200 data = r.json() assert sorted(data["tags"]) == sorted(UPDATED_TAGS) - assert data["notes"] == UPDATED_NOTES + assert data["description"] == UPDATED_DESC # Verify deleting works as well r = requests.patch( f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}", headers=admin_auth_headers, - json={"tags": [], "notes": None}, + json={"tags": [], "description": None}, ) assert r.status_code == 200 @@ -231,7 +225,7 @@ def test_update_crawl(admin_auth_headers, default_org_id, admin_crawl_id): assert r.status_code == 200 data = r.json() assert data["tags"] == [] - assert not data["notes"] + assert not data["description"] def test_delete_crawls_crawler( diff --git a/backend/test/test_uploads.py b/backend/test/test_uploads.py index 95a671c5..2a46206a 100644 --- a/backend/test/test_uploads.py +++ b/backend/test/test_uploads.py @@ -16,7 +16,7 @@ curr_dir = os.path.dirname(os.path.realpath(__file__)) def test_upload_stream(admin_auth_headers, default_org_id, uploads_collection_id): with open(os.path.join(curr_dir, "data", "example.wacz"), "rb") as fh: r = requests.put( - f"{API_PREFIX}/orgs/{default_org_id}/uploads/stream?filename=test.wacz&name=My%20Upload¬es=Testing%0AData&collections={uploads_collection_id}&tags=one%2Ctwo", + f"{API_PREFIX}/orgs/{default_org_id}/uploads/stream?filename=test.wacz&name=My%20Upload&description=Testing%0AData&collections={uploads_collection_id}&tags=one%2Ctwo", headers=admin_auth_headers, data=read_in_chunks(fh), ) @@ -45,7 +45,7 @@ def test_list_stream_upload(admin_auth_headers, default_org_id, uploads_collecti assert found assert found["name"] == "My Upload" - assert found["notes"] == "Testing\nData" + assert found["description"] == "Testing\nData" assert found["collections"] == [uploads_collection_id] assert sorted(found["tags"]) == ["one", "two"] assert "files" not in found @@ -255,16 +255,16 @@ def test_update_upload_metadata(admin_auth_headers, default_org_id): data = r.json() assert data["name"] == "My Upload Updated" assert not data["tags"] - assert not data["notes"] + assert not data["description"] - # Submit patch request to update name, tags, and notes + # Submit patch request to update name, tags, and description UPDATED_NAME = "New Upload Name" UPDATED_TAGS = ["wr-test-1-updated", "wr-test-2-updated"] - UPDATED_NOTES = "Lorem ipsum test note." + UPDATED_DESC = "Lorem ipsum test note." r = requests.patch( f"{API_PREFIX}/orgs/{default_org_id}/uploads/{upload_id}", headers=admin_auth_headers, - json={"tags": UPDATED_TAGS, "notes": UPDATED_NOTES, "name": UPDATED_NAME}, + json={"tags": UPDATED_TAGS, "description": UPDATED_DESC, "name": UPDATED_NAME}, ) assert r.status_code == 200 data = r.json() @@ -278,7 +278,7 @@ def test_update_upload_metadata(admin_auth_headers, default_org_id): assert r.status_code == 200 data = r.json() assert sorted(data["tags"]) == sorted(UPDATED_TAGS) - assert data["notes"] == UPDATED_NOTES + assert data["description"] == UPDATED_DESC assert data["name"] == UPDATED_NAME diff --git a/frontend/src/components/crawl-metadata-editor.ts b/frontend/src/components/crawl-metadata-editor.ts index 73338665..58fe076b 100644 --- a/frontend/src/components/crawl-metadata-editor.ts +++ b/frontend/src/components/crawl-metadata-editor.ts @@ -59,7 +59,7 @@ export class CrawlMetadataEditor extends LiteElement { threshold: 0.2, // stricter; default is 0.6 }); - private validateCrawlNotesMax = maxLengthValidator(500); + private validateCrawlDescriptionMax = maxLengthValidator(500); willUpdate(changedProperties: Map) { if (changedProperties.has("open") && this.open) { @@ -88,27 +88,26 @@ export class CrawlMetadataEditor extends LiteElement { private renderEditMetadata() { if (!this.crawl) return; - const { helpText, validate } = this.validateCrawlNotesMax; + const { helpText, validate } = this.validateCrawlDescriptionMax; return html`
- ${this.includeName ? html` -
- - -
- ` : ``} + ${this.includeName + ? html` +
+ + +
+ ` + : ``} ${isWorkflowArtifact ? msg("Back to Crawl Workflow") - : (this.crawl?.type === "upload" ? - msg("Back to All Uploads") : msg("Back to All Crawls")) - } @@ -701,14 +701,14 @@ export class CrawlDetail extends LiteElement { const noneText = html`${msg("None")}`; return html` - + ${when( this.crawl, () => when( - this.crawl!.notes?.length, + this.crawl!.description?.length, () => html`
-${this.crawl?.notes}
+${this.crawl?.description}
                 
`, () => noneText diff --git a/frontend/src/types/crawler.ts b/frontend/src/types/crawler.ts index 03a105c7..0556af81 100644 --- a/frontend/src/types/crawler.ts +++ b/frontend/src/types/crawler.ts @@ -119,7 +119,7 @@ export type Crawl = CrawlConfig & { fileCount?: number; fileSize?: number; completions?: number; - notes: string | null; + description: string | null; firstSeed: string; seedCount: number; stopping: boolean;