From b3b1e0d7d85e4c549f8c3abea4bb50416c27ec5f Mon Sep 17 00:00:00 2001 From: Tessa Walsh Date: Thu, 21 Mar 2024 20:18:27 -0400 Subject: [PATCH] Fix intermittent crawl timeout test failure (#1621) Fixes #1620 This increases the total timeout from 60 seconds to 120 seconds for crawl to complete, which should be sufficient given how intermittently the failure has been happening. Can increase it further if needed. --- backend/test_nightly/test_crawl_timeout.py | 23 ++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/backend/test_nightly/test_crawl_timeout.py b/backend/test_nightly/test_crawl_timeout.py index 9e325123..51e7ec4c 100644 --- a/backend/test_nightly/test_crawl_timeout.py +++ b/backend/test_nightly/test_crawl_timeout.py @@ -15,17 +15,20 @@ def test_crawl_timeout(admin_auth_headers, default_org_id, timeout_crawl): data = r.json() assert data["state"] in ("starting", "running") - # Wait some time to let crawl start, hit timeout, and gracefully stop - time.sleep(60) + attempts = 0 + while True: + # Try for 2 minutes before failing + if attempts > 24: + assert False - # Verify crawl was stopped - r = requests.get( - f"{API_PREFIX}/orgs/{default_org_id}/crawls/{timeout_crawl}/replay.json", - headers=admin_auth_headers, - ) - assert r.status_code == 200 - data = r.json() - assert data["state"] == "complete" + r = requests.get( + f"{API_PREFIX}/orgs/{default_org_id}/crawls/{timeout_crawl}/replay.json", + headers=admin_auth_headers, + ) + if r.json()["state"] == "complete": + break + time.sleep(10) + attempts += 1 def test_crawl_files_replicated(admin_auth_headers, default_org_id, timeout_crawl):