browsertrix/backend/test/test_crawl_config_search_values.py
Tessa Walsh 4724754efc
Filter and sort crawl and workflow list API endpoints in backend (#724)
* Re-implement pagination and paginate crawlconfig revs

First step toward simplifying pagination to set us up for sorting
and filtering of list endpoints. This commit removes fastapi-pagination
as a dependency.

* Migrate all HttpUrl seeds to Seeds

This commit also updates the frontend to always use Seeds and to
fix display issues resulting from the change.

* Filter and sort crawls and workflows

Crawls:
- Filter by createdBy (via userid param)
- Filter by state (comma-separated string for multiple values)
- Filter by first_seed, name, description
- Sort by started, finished, fileSize, firstSeed
- Sort descending by default to match frontend

Workflows:
- Filter by createdBy (formerly userid) and modifiedBy
- Filter by first_seed, name, description
- Sort by created, modified, firstSeed, lastCrawlTime

* Add crawlconfigs search-values API endpoint and test
2023-03-28 17:55:40 -04:00

93 lines
2.9 KiB
Python

import requests
from .conftest import API_PREFIX
NAME_1 = "Workflow 1"
NAME_2 = "Workflow 2"
DESCRIPTION_1 = "Description 1"
DESCRIPTION_2 = "Description 2"
FIRST_SEED_1 = "https://one.example.com"
FIRST_SEED_2 = "https://two.example.com"
def get_sample_crawl_data(name, description, first_seed):
return {
"runNow": False,
"name": name,
"config": {"seeds": [{"url": first_seed}]},
"description": description,
}
def test_create_new_config_1(admin_auth_headers, default_org_id):
r = requests.post(
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/",
headers=admin_auth_headers,
json=get_sample_crawl_data(NAME_1, DESCRIPTION_1, FIRST_SEED_1),
)
assert r.status_code == 200
data = r.json()
assert data["added"]
assert data["run_now_job"] == None
def test_get_search_values_1(admin_auth_headers, default_org_id):
r = requests.get(
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/search-values",
headers=admin_auth_headers,
)
data = r.json()
assert data["names"] == [NAME_1]
assert data["descriptions"] == [DESCRIPTION_1]
assert data["firstSeeds"] == [FIRST_SEED_1]
def test_create_new_config_2(admin_auth_headers, default_org_id):
r = requests.post(
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/",
headers=admin_auth_headers,
json=get_sample_crawl_data(NAME_2, DESCRIPTION_2, FIRST_SEED_2),
)
assert r.status_code == 200
data = r.json()
assert data["added"]
assert data["run_now_job"] == None
def test_get_search_values_2(admin_auth_headers, default_org_id):
r = requests.get(
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/search-values",
headers=admin_auth_headers,
)
data = r.json()
assert sorted(data["names"]) == [NAME_1, NAME_2]
assert sorted(data["descriptions"]) == [DESCRIPTION_1, DESCRIPTION_2]
assert sorted(data["firstSeeds"]) == [FIRST_SEED_1, FIRST_SEED_2]
def test_create_new_config_3_duplicates(admin_auth_headers, default_org_id):
"""Add some duplicate values to ensure they aren't duplicated in response"""
r = requests.post(
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/",
headers=admin_auth_headers,
json=get_sample_crawl_data(NAME_1, DESCRIPTION_2, FIRST_SEED_1),
)
assert r.status_code == 200
data = r.json()
assert data["added"]
assert data["run_now_job"] == None
def test_get_search_values_3(admin_auth_headers, default_org_id):
"""Test we still only get unique values"""
r = requests.get(
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/search-values",
headers=admin_auth_headers,
)
data = r.json()
assert sorted(data["names"]) == [NAME_1, NAME_2]
assert sorted(data["descriptions"]) == [DESCRIPTION_1, DESCRIPTION_2]
assert sorted(data["firstSeeds"]) == [FIRST_SEED_1, FIRST_SEED_2]