Add option to list workflows API endpoint to filter by schedule (#822)
* Add option to filter workflows by empty or non-empty schedule * Add tests
This commit is contained in:
parent
542ad7a24a
commit
48d34bc3c4
@ -469,6 +469,7 @@ class CrawlConfigOps:
|
|||||||
name: str = None,
|
name: str = None,
|
||||||
description: str = None,
|
description: str = None,
|
||||||
tags: Optional[List[str]] = None,
|
tags: Optional[List[str]] = None,
|
||||||
|
schedule: Optional[bool] = None,
|
||||||
sort_by: str = None,
|
sort_by: str = None,
|
||||||
sort_direction: int = -1,
|
sort_direction: int = -1,
|
||||||
):
|
):
|
||||||
@ -495,6 +496,12 @@ class CrawlConfigOps:
|
|||||||
if description:
|
if description:
|
||||||
match_query["description"] = description
|
match_query["description"] = description
|
||||||
|
|
||||||
|
if schedule is not None:
|
||||||
|
if schedule:
|
||||||
|
match_query["schedule"] = {"$nin": ["", None]}
|
||||||
|
else:
|
||||||
|
match_query["schedule"] = {"$in": ["", None]}
|
||||||
|
|
||||||
# pylint: disable=duplicate-code
|
# pylint: disable=duplicate-code
|
||||||
aggregate = [
|
aggregate = [
|
||||||
{"$match": match_query},
|
{"$match": match_query},
|
||||||
@ -977,6 +984,7 @@ def init_crawl_config_api(
|
|||||||
name: Optional[str] = None,
|
name: Optional[str] = None,
|
||||||
description: Optional[str] = None,
|
description: Optional[str] = None,
|
||||||
tag: Union[List[str], None] = Query(default=None),
|
tag: Union[List[str], None] = Query(default=None),
|
||||||
|
schedule: Optional[bool] = None,
|
||||||
sortBy: str = None,
|
sortBy: str = None,
|
||||||
sortDirection: int = -1,
|
sortDirection: int = -1,
|
||||||
):
|
):
|
||||||
@ -998,6 +1006,7 @@ def init_crawl_config_api(
|
|||||||
name=name,
|
name=name,
|
||||||
description=description,
|
description=description,
|
||||||
tags=tag,
|
tags=tag,
|
||||||
|
schedule=schedule,
|
||||||
page_size=pageSize,
|
page_size=pageSize,
|
||||||
page=page,
|
page=page,
|
||||||
sort_by=sortBy,
|
sort_by=sortBy,
|
||||||
|
@ -11,6 +11,7 @@ UPDATED_TAGS = ["tag3", "tag4"]
|
|||||||
|
|
||||||
def test_add_crawl_config(crawler_auth_headers, default_org_id, sample_crawl_data):
|
def test_add_crawl_config(crawler_auth_headers, default_org_id, sample_crawl_data):
|
||||||
# Create crawl config
|
# Create crawl config
|
||||||
|
sample_crawl_data["schedule"] = "0 0 * * *"
|
||||||
r = requests.post(
|
r = requests.post(
|
||||||
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/",
|
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs/",
|
||||||
headers=crawler_auth_headers,
|
headers=crawler_auth_headers,
|
||||||
|
@ -67,6 +67,28 @@ def test_get_configs_by_description(
|
|||||||
assert config["description"] == description
|
assert config["description"] == description
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_configs_by_schedule_true(crawler_auth_headers, default_org_id, crawler_crawl_id):
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?schedule=True",
|
||||||
|
headers=crawler_auth_headers
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
assert data["total"] == 1
|
||||||
|
workflow = data["items"][0]
|
||||||
|
assert workflow.get("schedule") not in ("", None)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_configs_by_schedule_false(crawler_auth_headers, default_org_id, crawler_crawl_id):
|
||||||
|
r = requests.get(
|
||||||
|
f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?schedule=False",
|
||||||
|
headers=crawler_auth_headers
|
||||||
|
)
|
||||||
|
data = r.json()
|
||||||
|
assert data["total"] >= 1
|
||||||
|
for config in data["items"]:
|
||||||
|
assert config.get("schedule") in ("", None)
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_crawl_and_admin_user_crawls(
|
def test_ensure_crawl_and_admin_user_crawls(
|
||||||
default_org_id, crawler_auth_headers, crawler_crawl_id, admin_crawl_id
|
default_org_id, crawler_auth_headers, crawler_crawl_id, admin_crawl_id
|
||||||
):
|
):
|
||||||
|
Loading…
Reference in New Issue
Block a user