diff --git a/backend/btrixcloud/crawlconfigs.py b/backend/btrixcloud/crawlconfigs.py index bf7e9353..b95ecefc 100644 --- a/backend/btrixcloud/crawlconfigs.py +++ b/backend/btrixcloud/crawlconfigs.py @@ -178,12 +178,13 @@ class CrawlConfigsResponse(BaseModel): # ============================================================================ class UpdateCrawlConfig(BaseModel): - """Update crawl config name or crawl schedule""" + """Update crawl config name, crawl schedule, or tags""" name: Optional[str] schedule: Optional[str] profileid: Optional[str] scale: Optional[conint(ge=1, le=MAX_CRAWL_SCALE)] + tags: Optional[List[str]] = [] # ============================================================================ @@ -312,7 +313,7 @@ class CrawlConfigOps: await asyncio.gather(inc, add) async def update_crawl_config(self, cid: uuid.UUID, update: UpdateCrawlConfig): - """Update name, scale and/or schedule for an existing crawl config""" + """Update name, scale, schedule, and/or tags for an existing crawl config""" # set update query query = update.dict( @@ -578,7 +579,7 @@ class CrawlConfigOps: return result.inserted_id async def get_crawl_config_tags(self, org): - """get distinct tags from all crawl configs for this orge""" + """get distinct tags from all crawl configs for this org""" return await self.crawl_configs.distinct("tags", {"oid": org.id}) diff --git a/backend/test/conftest.py b/backend/test/conftest.py index dc1f5e85..f40c79f0 100644 --- a/backend/test/conftest.py +++ b/backend/test/conftest.py @@ -182,3 +182,13 @@ def crawler_crawl_id(crawler_auth_headers, default_org_id): @pytest.fixture(scope="session") def crawler_config_id(crawler_crawl_id): return _crawler_config_id + + +@pytest.fixture(scope="session") +def sample_crawl_data(): + return { + "runNow": False, + "name": "Test Crawl", + "config": {"seeds": ["https://example.com/"]}, + "tags": ["tag1", "tag2"], + } diff --git a/backend/test/test_crawlconfigs.py b/backend/test/test_crawlconfigs.py new file mode 100644 index 00000000..1fc23173 --- /dev/null +++ b/backend/test/test_crawlconfigs.py @@ -0,0 +1,42 @@ +import requests + +from .conftest import API_PREFIX + + +def test_add_update_crawl_config( + crawler_auth_headers, default_org_id, sample_crawl_data +): + # Create crawl config + r = requests.post( + f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/", + headers=crawler_auth_headers, + json=sample_crawl_data, + ) + assert r.status_code == 200 + + data = r.json() + cid = data["added"] + + # Update crawl config + UPDATED_NAME = "Updated name" + UPDATED_TAGS = ["tag3", "tag4"] + r = requests.patch( + f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/{cid}/", + headers=crawler_auth_headers, + json={"name": UPDATED_NAME, "tags": UPDATED_TAGS}, + ) + assert r.status_code == 200 + + data = r.json() + assert data["success"] + + # Verify update was successful + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/{cid}/", + headers=crawler_auth_headers, + ) + assert r.status_code == 200 + + data = r.json() + assert data["name"] == UPDATED_NAME + assert sorted(data["tags"]) == sorted(UPDATED_TAGS) diff --git a/backend/test/test_filter_results.py b/backend/test/test_filter_results.py index 48a00f6e..d8be89bc 100644 --- a/backend/test/test_filter_results.py +++ b/backend/test/test_filter_results.py @@ -3,29 +3,8 @@ import requests from .conftest import API_PREFIX -def get_sample_crawl_data(): - return { - "runNow": False, - "name": "Test Crawl", - "config": {"seeds": ["https://example.com/"]}, - } - - -def test_create_new_config_crawler_user(crawler_auth_headers, default_org_id): - r = requests.post( - f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/", - headers=crawler_auth_headers, - json=get_sample_crawl_data(), - ) - - assert r.status_code == 200 - - data = r.json() - assert data["added"] - assert data["run_now_job"] == None - - def test_get_config_by_user(crawler_auth_headers, default_org_id, crawler_userid): + """Crawlconfig already created for user in test_crawlconfigs.""" r = requests.get( f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?userid={crawler_userid}", headers=crawler_auth_headers,