Add notes to crawl and crawl updates (#587)

This commit is contained in:
Tessa Walsh 2023-02-08 21:36:22 -05:00 committed by GitHub
parent 7463becdff
commit ce8f426978
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 21 additions and 8 deletions

View File

@ -86,6 +86,8 @@ class Crawl(BaseMongoModel):
colls: Optional[List[str]] = []
tags: Optional[List[str]] = []
notes: Optional[str]
# ============================================================================
class CrawlOut(Crawl):
@ -124,6 +126,8 @@ class ListCrawlOut(BaseMongoModel):
colls: Optional[List[str]] = []
tags: Optional[List[str]] = []
notes: Optional[str]
# ============================================================================
class ListCrawls(BaseModel):
@ -149,9 +153,10 @@ class CrawlCompleteIn(BaseModel):
# ============================================================================
class UpdateCrawl(BaseModel):
"""Update crawl tags"""
"""Update crawl"""
tags: Optional[List[str]] = []
notes: Optional[str]
# ============================================================================
@ -376,8 +381,8 @@ class CrawlOps:
return False
async def update_crawl(self, crawl_id: str, org: Organization, update: UpdateCrawl):
"""Update existing crawl (tags only for now)"""
query = update.dict(exclude_unset=True, exclude_none=True)
"""Update existing crawl (tags and notes only for now)"""
query = update.dict(exclude_unset=True)
if len(query) == 0:
raise HTTPException(status_code=400, detail="no_update_data")

View File

@ -98,7 +98,7 @@ def test_verify_wacz():
assert '"https://webrecorder.net/"' in pages
def test_update_tags(admin_auth_headers, default_org_id, admin_crawl_id):
def test_update_crawl(admin_auth_headers, default_org_id, admin_crawl_id):
r = requests.get(
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
headers=admin_auth_headers,
@ -106,13 +106,19 @@ def test_update_tags(admin_auth_headers, default_org_id, admin_crawl_id):
assert r.status_code == 200
data = r.json()
assert sorted(data["tags"]) == ["wr-test-1", "wr-test-2"]
# Add exception handling for old crawls without notes field
try:
assert not data["notes"]
except KeyError:
pass
# Submit patch request to update tags
# Submit patch request to update tags and notes
UPDATED_TAGS = ["wr-test-1-updated", "wr-test-2-updated"]
UPDATED_NOTES = "Lorem ipsum test note."
r = requests.patch(
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
headers=admin_auth_headers,
json={"tags": UPDATED_TAGS},
json={"tags": UPDATED_TAGS, "notes": UPDATED_NOTES},
)
assert r.status_code == 200
data = r.json()
@ -126,12 +132,13 @@ def test_update_tags(admin_auth_headers, default_org_id, admin_crawl_id):
assert r.status_code == 200
data = r.json()
assert sorted(data["tags"]) == sorted(UPDATED_TAGS)
assert data["notes"] == UPDATED_NOTES
# Verify deleting all tags works as well
# Verify deleting works as well
r = requests.patch(
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
headers=admin_auth_headers,
json={"tags": []},
json={"tags": [], "notes": None},
)
assert r.status_code == 200
@ -142,3 +149,4 @@ def test_update_tags(admin_auth_headers, default_org_id, admin_crawl_id):
assert r.status_code == 200
data = r.json()
assert data["tags"] == []
assert not data["notes"]