* Rename archives to orgs and aid to oid on backend * Rename archive to org and aid to oid in frontend * Remove translation artifact * Rename team -> organization * Add database migrations and run once on startup * This commit also applies the new by_one_worker decorator to other asyncio tasks to prevent heavy tasks from being run in each worker. * Run black, pylint, and husky via pre-commit * Set db version and use in migrations * Update and prepare database in single task * Migrate k8s configmaps
		
			
				
	
	
		
			53 lines
		
	
	
		
			1.6 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			53 lines
		
	
	
		
			1.6 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| import requests
 | |
| 
 | |
| from .conftest import API_PREFIX
 | |
| 
 | |
| 
 | |
| def test_admin_get_org_crawls(admin_auth_headers, default_org_id, admin_crawl_id):
 | |
|     r = requests.get(
 | |
|         f"{API_PREFIX}/orgs/{default_org_id}/crawls", headers=admin_auth_headers
 | |
|     )
 | |
|     data = r.json()
 | |
|     crawls = data["crawls"]
 | |
|     crawl_ids = []
 | |
|     assert len(crawls) > 0
 | |
|     for crawl in crawls:
 | |
|         assert crawl["oid"] == default_org_id
 | |
|         crawl_ids.append(crawl["id"])
 | |
|     assert admin_crawl_id in crawl_ids
 | |
| 
 | |
| 
 | |
| def test_viewer_get_org_crawls(viewer_auth_headers, default_org_id, admin_crawl_id):
 | |
|     r = requests.get(
 | |
|         f"{API_PREFIX}/orgs/{default_org_id}/crawls", headers=viewer_auth_headers
 | |
|     )
 | |
|     data = r.json()
 | |
|     crawls = data["crawls"]
 | |
|     crawl_ids = []
 | |
|     assert len(crawls) > 0
 | |
|     for crawl in crawls:
 | |
|         assert crawl["oid"] == default_org_id
 | |
|         crawl_ids.append(crawl["id"])
 | |
|     assert admin_crawl_id in crawl_ids
 | |
| 
 | |
| 
 | |
| def test_viewer_get_crawl(viewer_auth_headers, default_org_id, admin_crawl_id):
 | |
|     r = requests.get(
 | |
|         f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
 | |
|         headers=viewer_auth_headers,
 | |
|     )
 | |
|     data = r.json()
 | |
|     assert data["id"] == admin_crawl_id
 | |
|     assert data["oid"] == default_org_id
 | |
| 
 | |
| 
 | |
| def test_viewer_get_crawl_replay(viewer_auth_headers, default_org_id, admin_crawl_id):
 | |
|     r = requests.get(
 | |
|         f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}/replay.json",
 | |
|         headers=viewer_auth_headers,
 | |
|     )
 | |
|     data = r.json()
 | |
|     assert data["id"] == admin_crawl_id
 | |
|     assert data["oid"] == default_org_id
 | |
|     assert data["resources"]
 |