Tests: fix test get crawl loop (#967)

* tests: add sleep() between all looping get_crawl() calls to avoid tight request loop, also remove unneeded loop
will likely fix occasional '504 timeout' test failures where frontend is overwhelmed with /replay.json requests
This commit is contained in:
Ilya Kreymer 2023-07-08 17:16:11 -07:00 committed by GitHub
parent d9e73fcbc3
commit a640f58657
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 9 additions and 7 deletions

View File

@ -49,6 +49,7 @@ def test_cancel_crawl(default_org_id, crawler_auth_headers):
data = get_crawl(default_org_id, crawler_auth_headers, crawl_id)
while data["state"] in ("running", "waiting_capacity"):
time.sleep(5)
data = get_crawl(default_org_id, crawler_auth_headers, crawl_id)
assert data["state"] == "canceled"
@ -88,6 +89,7 @@ def test_start_crawl_and_stop_immediately(
assert r.json()["lastCrawlStopping"] == True
while data["state"] in ("starting", "running", "waiting_capacity"):
time.sleep(5)
data = get_crawl(default_org_id, crawler_auth_headers, crawl_id)
assert data["state"] in ("canceled", "partial_complete")
@ -148,6 +150,7 @@ def test_stop_crawl_partial(
assert r.json()["lastCrawlStopping"] == True
while data["state"] == "running":
time.sleep(5)
data = get_crawl(default_org_id, crawler_auth_headers, crawl_id)
assert data["state"] in ("partial_complete", "complete")

View File

@ -99,10 +99,9 @@ def run_crawl(org_id, headers):
def get_crawl_status(org_id, crawl_id, headers):
while True:
r = requests.get(
f"{API_PREFIX}/orgs/{org_id}/crawls/{crawl_id}/replay.json",
headers=headers,
)
data = r.json()
return data["state"]
r = requests.get(
f"{API_PREFIX}/orgs/{org_id}/crawls/{crawl_id}/replay.json",
headers=headers,
)
data = r.json()
return data["state"]