From a640f58657422cd4d40d6d8551214c05f0f23745 Mon Sep 17 00:00:00 2001 From: Ilya Kreymer Date: Sat, 8 Jul 2023 17:16:11 -0700 Subject: [PATCH] Tests: fix test get crawl loop (#967) * tests: add sleep() between all looping get_crawl() calls to avoid tight request loop, also remove unneeded loop will likely fix occasional '504 timeout' test failures where frontend is overwhelmed with /replay.json requests --- backend/test/test_stop_cancel_crawl.py | 3 +++ backend/test_nightly/test_concurrent_crawl_limit.py | 13 ++++++------- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/backend/test/test_stop_cancel_crawl.py b/backend/test/test_stop_cancel_crawl.py index e4b98e75..225840fd 100644 --- a/backend/test/test_stop_cancel_crawl.py +++ b/backend/test/test_stop_cancel_crawl.py @@ -49,6 +49,7 @@ def test_cancel_crawl(default_org_id, crawler_auth_headers): data = get_crawl(default_org_id, crawler_auth_headers, crawl_id) while data["state"] in ("running", "waiting_capacity"): + time.sleep(5) data = get_crawl(default_org_id, crawler_auth_headers, crawl_id) assert data["state"] == "canceled" @@ -88,6 +89,7 @@ def test_start_crawl_and_stop_immediately( assert r.json()["lastCrawlStopping"] == True while data["state"] in ("starting", "running", "waiting_capacity"): + time.sleep(5) data = get_crawl(default_org_id, crawler_auth_headers, crawl_id) assert data["state"] in ("canceled", "partial_complete") @@ -148,6 +150,7 @@ def test_stop_crawl_partial( assert r.json()["lastCrawlStopping"] == True while data["state"] == "running": + time.sleep(5) data = get_crawl(default_org_id, crawler_auth_headers, crawl_id) assert data["state"] in ("partial_complete", "complete") diff --git a/backend/test_nightly/test_concurrent_crawl_limit.py b/backend/test_nightly/test_concurrent_crawl_limit.py index 7a1fc4ec..3f0e680c 100644 --- a/backend/test_nightly/test_concurrent_crawl_limit.py +++ b/backend/test_nightly/test_concurrent_crawl_limit.py @@ -99,10 +99,9 @@ def run_crawl(org_id, headers): def get_crawl_status(org_id, crawl_id, headers): - while True: - r = requests.get( - f"{API_PREFIX}/orgs/{org_id}/crawls/{crawl_id}/replay.json", - headers=headers, - ) - data = r.json() - return data["state"] + r = requests.get( + f"{API_PREFIX}/orgs/{org_id}/crawls/{crawl_id}/replay.json", + headers=headers, + ) + data = r.json() + return data["state"]