diff --git a/backend/btrixcloud/crawlconfigs.py b/backend/btrixcloud/crawlconfigs.py index 091838bf..8930b23f 100644 --- a/backend/btrixcloud/crawlconfigs.py +++ b/backend/btrixcloud/crawlconfigs.py @@ -100,6 +100,8 @@ class CrawlConfigIn(BaseModel): name: str + description: Optional[str] + jobType: Optional[JobType] = JobType.CUSTOM profileid: Optional[str] @@ -156,6 +158,7 @@ class CrawlConfig(CrawlConfigCore): """Schedulable config""" name: Optional[str] + description: Optional[str] created: datetime createdBy: Optional[UUID4] @@ -206,6 +209,7 @@ class UpdateCrawlConfig(BaseModel): # metadata: not revision tracked name: Optional[str] tags: Optional[List[str]] + description: Optional[str] # crawl data: revision tracked schedule: Optional[str] @@ -375,6 +379,9 @@ class CrawlConfigOps: ) metadata_changed = self.check_attr_changed(orig_crawl_config, update, "name") + metadata_changed = metadata_changed or self.check_attr_changed( + orig_crawl_config, update, "description" + ) metadata_changed = metadata_changed or ( update.tags is not None and ",".join(orig_crawl_config.tags) != ",".join(update.tags) diff --git a/backend/btrixcloud/crawls.py b/backend/btrixcloud/crawls.py index 73ba09a5..1bf823c2 100644 --- a/backend/btrixcloud/crawls.py +++ b/backend/btrixcloud/crawls.py @@ -94,6 +94,7 @@ class CrawlOut(Crawl): userName: Optional[str] name: Optional[str] + description: Optional[str] profileName: Optional[str] resources: Optional[List[CrawlFileOut]] = [] firstSeed: Optional[str] @@ -112,6 +113,7 @@ class ListCrawlOut(BaseMongoModel): oid: UUID4 cid: UUID4 name: Optional[str] + description: Optional[str] manual: Optional[bool] @@ -318,6 +320,8 @@ class CrawlOps: if not crawl.name: crawl.name = config.name + crawl.description = config.description + if config.config.seeds: first_seed = config.config.seeds[0] if isinstance(first_seed, HttpUrl): diff --git a/backend/test/conftest.py b/backend/test/conftest.py index df2dda07..cc1ba9f4 100644 --- a/backend/test/conftest.py +++ b/backend/test/conftest.py @@ -81,6 +81,7 @@ def admin_crawl_id(admin_auth_headers, default_org_id): crawl_data = { "runNow": True, "name": "Admin Test Crawl", + "description": "Admin Test Crawl description", "tags": ["wr-test-1", "wr-test-2"], "config": { "seeds": ["https://webrecorder.net/"], diff --git a/backend/test/test_crawlconfigs.py b/backend/test/test_crawlconfigs.py index 4e351c8e..bf3fe3d6 100644 --- a/backend/test/test_crawlconfigs.py +++ b/backend/test/test_crawlconfigs.py @@ -5,6 +5,7 @@ from .conftest import API_PREFIX cid = None UPDATED_NAME = "Updated name" +UPDATED_DESCRIPTION = "Updated description" UPDATED_TAGS = ["tag3", "tag4"] @@ -37,12 +38,31 @@ def test_update_name_only(crawler_auth_headers, default_org_id): assert data["settings_changed"] == False -def test_update_crawl_config_name_and_tags(crawler_auth_headers, default_org_id): +def test_update_desription_only(crawler_auth_headers, default_org_id): + # update description only + r = requests.patch( + f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/{cid}/", + headers=crawler_auth_headers, + json={"description": "updated description"}, + ) + assert r.status_code == 200 + + data = r.json() + assert data["success"] + assert data["metadata_changed"] == True + assert data["settings_changed"] == False + + +def test_update_crawl_config_metadata(crawler_auth_headers, default_org_id): # Update crawl config r = requests.patch( f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/{cid}/", headers=crawler_auth_headers, - json={"name": UPDATED_NAME, "tags": UPDATED_TAGS}, + json={ + "name": UPDATED_NAME, + "description": UPDATED_DESCRIPTION, + "tags": UPDATED_TAGS, + }, ) assert r.status_code == 200 @@ -62,6 +82,7 @@ def test_verify_update(crawler_auth_headers, default_org_id): data = r.json() assert data["name"] == UPDATED_NAME + assert data["description"] == UPDATED_DESCRIPTION assert sorted(data["tags"]) == sorted(UPDATED_TAGS) diff --git a/backend/test/test_run_crawl.py b/backend/test/test_run_crawl.py index 88c34114..9cec8bf2 100644 --- a/backend/test/test_run_crawl.py +++ b/backend/test/test_run_crawl.py @@ -74,6 +74,7 @@ def test_crawl_info(admin_auth_headers, default_org_id, admin_crawl_id): ) data = r.json() assert data["fileSize"] == wacz_size + assert data["description"] == "Admin Test Crawl description" def test_crawls_include_seed_info(admin_auth_headers, default_org_id, admin_crawl_id):