Rename notes to description in frontend and backend (#1011)
- Rename crawl notes to description - Add migration renaming notes -> description - Stop inheriting workflow description in crawl - Update frontend to replace crawl/upload notes with description - Remove setting of config description from crawl list - Adjust tests for changes
This commit is contained in:
parent
4bea7565bc
commit
c21153255a
@ -200,9 +200,6 @@ class BaseCrawlOps:
|
||||
if not crawl.name:
|
||||
crawl.name = config.name
|
||||
|
||||
if not crawl.description:
|
||||
crawl.description = config.description
|
||||
|
||||
if config.config.seeds:
|
||||
if add_first_seed:
|
||||
first_seed = config.config.seeds[0]
|
||||
|
@ -136,11 +136,6 @@ class CrawlOps(BaseCrawlOps):
|
||||
},
|
||||
},
|
||||
{"$set": {"name": {"$arrayElemAt": ["$crawlConfig.name", 0]}}},
|
||||
{
|
||||
"$set": {
|
||||
"description": {"$arrayElemAt": ["$crawlConfig.description", 0]}
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
if not resources:
|
||||
|
@ -15,7 +15,7 @@ from pymongo.errors import InvalidName
|
||||
from .migrations import BaseMigration
|
||||
|
||||
|
||||
CURR_DB_VERSION = "0011"
|
||||
CURR_DB_VERSION = "0012"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
@ -0,0 +1,27 @@
|
||||
"""
|
||||
Migration 0012 - Notes to description
|
||||
"""
|
||||
from btrixcloud.migrations import BaseMigration
|
||||
|
||||
|
||||
MIGRATION_VERSION = "0012"
|
||||
|
||||
|
||||
class Migration(BaseMigration):
|
||||
"""Migration class."""
|
||||
|
||||
def __init__(self, mdb, migration_version=MIGRATION_VERSION):
|
||||
super().__init__(mdb, migration_version)
|
||||
|
||||
async def migrate_up(self):
|
||||
"""Perform migration up.
|
||||
|
||||
Rename crawl notes field to description.
|
||||
"""
|
||||
# pylint: disable=duplicate-code
|
||||
crawls = self.mdb["crawls"]
|
||||
try:
|
||||
await crawls.update_many({}, {"$rename": {"notes": "description"}})
|
||||
# pylint: disable=broad-exception-caught
|
||||
except Exception as err:
|
||||
print(f"Error renaming crawl notes to description: {err}", flush=True)
|
@ -302,7 +302,7 @@ class BaseCrawl(BaseMongoModel):
|
||||
|
||||
files: Optional[List[CrawlFile]] = []
|
||||
|
||||
notes: Optional[str]
|
||||
description: Optional[str]
|
||||
|
||||
errors: Optional[List[str]] = []
|
||||
|
||||
@ -342,8 +342,6 @@ class CrawlOut(BaseMongoModel):
|
||||
|
||||
tags: Optional[List[str]] = []
|
||||
|
||||
notes: Optional[str]
|
||||
|
||||
errors: Optional[List[str]]
|
||||
|
||||
collections: Optional[List[UUID4]] = []
|
||||
@ -351,7 +349,6 @@ class CrawlOut(BaseMongoModel):
|
||||
# automated crawl fields
|
||||
cid: Optional[UUID4]
|
||||
name: Optional[str]
|
||||
description: Optional[str]
|
||||
firstSeed: Optional[str]
|
||||
seedCount: Optional[int]
|
||||
profileName: Optional[str]
|
||||
@ -372,7 +369,7 @@ class UpdateCrawl(BaseModel):
|
||||
"""Update crawl"""
|
||||
|
||||
tags: Optional[List[str]] = []
|
||||
notes: Optional[str]
|
||||
description: Optional[str]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
@ -46,7 +46,7 @@ class UploadOps(BaseCrawlOps):
|
||||
stream,
|
||||
filename: str,
|
||||
name: Optional[str],
|
||||
notes: Optional[str],
|
||||
description: Optional[str],
|
||||
collections: Optional[List[UUID4]],
|
||||
tags: Optional[List[str]],
|
||||
org: Organization,
|
||||
@ -96,7 +96,7 @@ class UploadOps(BaseCrawlOps):
|
||||
print("replace file deletion failed", exc)
|
||||
|
||||
return await self._create_upload(
|
||||
files, name, notes, collections, tags, id_, org, user
|
||||
files, name, description, collections, tags, id_, org, user
|
||||
)
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-locals
|
||||
@ -104,7 +104,7 @@ class UploadOps(BaseCrawlOps):
|
||||
self,
|
||||
uploads: List[UploadFile],
|
||||
name: Optional[str],
|
||||
notes: Optional[str],
|
||||
description: Optional[str],
|
||||
collections: Optional[List[UUID4]],
|
||||
tags: Optional[List[str]],
|
||||
org: Organization,
|
||||
@ -125,11 +125,11 @@ class UploadOps(BaseCrawlOps):
|
||||
files.append(file_reader.file_prep.get_crawl_file())
|
||||
|
||||
return await self._create_upload(
|
||||
files, name, notes, collections, tags, id_, org, user
|
||||
files, name, description, collections, tags, id_, org, user
|
||||
)
|
||||
|
||||
async def _create_upload(
|
||||
self, files, name, notes, collections, tags, id_, org, user
|
||||
self, files, name, description, collections, tags, id_, org, user
|
||||
):
|
||||
now = dt_now()
|
||||
# ts_now = now.strftime("%Y%m%d%H%M%S")
|
||||
@ -145,7 +145,7 @@ class UploadOps(BaseCrawlOps):
|
||||
uploaded = UploadedCrawl(
|
||||
id=crawl_id,
|
||||
name=name or "New Upload @ " + str(now),
|
||||
notes=notes,
|
||||
description=description,
|
||||
collections=collection_uuids,
|
||||
tags=tags,
|
||||
userid=user.id,
|
||||
@ -240,14 +240,14 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d
|
||||
async def upload_formdata(
|
||||
uploads: List[UploadFile] = File(...),
|
||||
name: Optional[str] = "",
|
||||
notes: Optional[str] = "",
|
||||
description: Optional[str] = "",
|
||||
collections: Optional[str] = "",
|
||||
tags: Optional[str] = "",
|
||||
org: Organization = Depends(org_crawl_dep),
|
||||
user: User = Depends(user_dep),
|
||||
):
|
||||
name = unquote(name)
|
||||
notes = unquote(notes)
|
||||
description = unquote(description)
|
||||
colls_list = []
|
||||
if collections:
|
||||
colls_list = unquote(collections).split(",")
|
||||
@ -257,7 +257,7 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d
|
||||
tags_list = unquote(tags).split(",")
|
||||
|
||||
return await ops.upload_formdata(
|
||||
uploads, name, notes, colls_list, tags_list, org, user
|
||||
uploads, name, description, colls_list, tags_list, org, user
|
||||
)
|
||||
|
||||
@app.put("/orgs/{oid}/uploads/stream", tags=["uploads"])
|
||||
@ -265,7 +265,7 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d
|
||||
request: Request,
|
||||
filename: str,
|
||||
name: Optional[str] = "",
|
||||
notes: Optional[str] = "",
|
||||
description: Optional[str] = "",
|
||||
collections: Optional[str] = "",
|
||||
tags: Optional[str] = "",
|
||||
replaceId: Optional[str] = "",
|
||||
@ -273,7 +273,7 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d
|
||||
user: User = Depends(user_dep),
|
||||
):
|
||||
name = unquote(name)
|
||||
notes = unquote(notes)
|
||||
description = unquote(description)
|
||||
colls_list = []
|
||||
if collections:
|
||||
colls_list = unquote(collections).split(",")
|
||||
@ -286,7 +286,7 @@ def init_uploads_api(app, mdb, users, crawl_manager, crawl_configs, orgs, user_d
|
||||
request.stream(),
|
||||
filename,
|
||||
name,
|
||||
notes,
|
||||
description,
|
||||
colls_list,
|
||||
tags_list,
|
||||
org,
|
||||
|
@ -165,14 +165,23 @@ def test_get_crawls_by_description(
|
||||
crawler_auth_headers, default_org_id, crawler_crawl_id
|
||||
):
|
||||
description = "crawler test crawl"
|
||||
|
||||
# Give crawl a description
|
||||
r = requests.patch(
|
||||
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{crawler_crawl_id}",
|
||||
headers=crawler_auth_headers,
|
||||
json={"description": description},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
encoded_description = urllib.parse.quote(description)
|
||||
r = requests.get(
|
||||
f"{API_PREFIX}/orgs/{default_org_id}/crawls?description={encoded_description}",
|
||||
headers=crawler_auth_headers,
|
||||
)
|
||||
assert r.json()["total"] >= 1
|
||||
for crawl in r.json()["items"]:
|
||||
assert crawl["description"] == description
|
||||
data = r.json()
|
||||
assert data["total"] == 1
|
||||
assert data["items"][0]["description"] == description
|
||||
|
||||
|
||||
def test_get_crawls_by_collection_id(
|
||||
|
@ -81,7 +81,6 @@ def test_crawl_info(admin_auth_headers, default_org_id, admin_crawl_id):
|
||||
data = r.json()
|
||||
assert data["fileSize"] == wacz_size
|
||||
assert data["fileCount"] == 1
|
||||
assert data["description"] == "Admin Test Crawl description"
|
||||
|
||||
|
||||
def test_crawls_include_seed_info(admin_auth_headers, default_org_id, admin_crawl_id):
|
||||
@ -188,19 +187,14 @@ def test_update_crawl(admin_auth_headers, default_org_id, admin_crawl_id):
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert sorted(data["tags"]) == ["wr-test-1", "wr-test-2"]
|
||||
# Add exception handling for old crawls without notes field
|
||||
try:
|
||||
assert not data["notes"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Submit patch request to update tags and notes
|
||||
# Submit patch request to update tags and description
|
||||
UPDATED_TAGS = ["wr-test-1-updated", "wr-test-2-updated"]
|
||||
UPDATED_NOTES = "Lorem ipsum test note."
|
||||
UPDATED_DESC = "Lorem ipsum test note."
|
||||
r = requests.patch(
|
||||
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
|
||||
headers=admin_auth_headers,
|
||||
json={"tags": UPDATED_TAGS, "notes": UPDATED_NOTES},
|
||||
json={"tags": UPDATED_TAGS, "description": UPDATED_DESC},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
@ -214,13 +208,13 @@ def test_update_crawl(admin_auth_headers, default_org_id, admin_crawl_id):
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert sorted(data["tags"]) == sorted(UPDATED_TAGS)
|
||||
assert data["notes"] == UPDATED_NOTES
|
||||
assert data["description"] == UPDATED_DESC
|
||||
|
||||
# Verify deleting works as well
|
||||
r = requests.patch(
|
||||
f"{API_PREFIX}/orgs/{default_org_id}/crawls/{admin_crawl_id}",
|
||||
headers=admin_auth_headers,
|
||||
json={"tags": [], "notes": None},
|
||||
json={"tags": [], "description": None},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
@ -231,7 +225,7 @@ def test_update_crawl(admin_auth_headers, default_org_id, admin_crawl_id):
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["tags"] == []
|
||||
assert not data["notes"]
|
||||
assert not data["description"]
|
||||
|
||||
|
||||
def test_delete_crawls_crawler(
|
||||
|
@ -16,7 +16,7 @@ curr_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
def test_upload_stream(admin_auth_headers, default_org_id, uploads_collection_id):
|
||||
with open(os.path.join(curr_dir, "data", "example.wacz"), "rb") as fh:
|
||||
r = requests.put(
|
||||
f"{API_PREFIX}/orgs/{default_org_id}/uploads/stream?filename=test.wacz&name=My%20Upload¬es=Testing%0AData&collections={uploads_collection_id}&tags=one%2Ctwo",
|
||||
f"{API_PREFIX}/orgs/{default_org_id}/uploads/stream?filename=test.wacz&name=My%20Upload&description=Testing%0AData&collections={uploads_collection_id}&tags=one%2Ctwo",
|
||||
headers=admin_auth_headers,
|
||||
data=read_in_chunks(fh),
|
||||
)
|
||||
@ -45,7 +45,7 @@ def test_list_stream_upload(admin_auth_headers, default_org_id, uploads_collecti
|
||||
|
||||
assert found
|
||||
assert found["name"] == "My Upload"
|
||||
assert found["notes"] == "Testing\nData"
|
||||
assert found["description"] == "Testing\nData"
|
||||
assert found["collections"] == [uploads_collection_id]
|
||||
assert sorted(found["tags"]) == ["one", "two"]
|
||||
assert "files" not in found
|
||||
@ -255,16 +255,16 @@ def test_update_upload_metadata(admin_auth_headers, default_org_id):
|
||||
data = r.json()
|
||||
assert data["name"] == "My Upload Updated"
|
||||
assert not data["tags"]
|
||||
assert not data["notes"]
|
||||
assert not data["description"]
|
||||
|
||||
# Submit patch request to update name, tags, and notes
|
||||
# Submit patch request to update name, tags, and description
|
||||
UPDATED_NAME = "New Upload Name"
|
||||
UPDATED_TAGS = ["wr-test-1-updated", "wr-test-2-updated"]
|
||||
UPDATED_NOTES = "Lorem ipsum test note."
|
||||
UPDATED_DESC = "Lorem ipsum test note."
|
||||
r = requests.patch(
|
||||
f"{API_PREFIX}/orgs/{default_org_id}/uploads/{upload_id}",
|
||||
headers=admin_auth_headers,
|
||||
json={"tags": UPDATED_TAGS, "notes": UPDATED_NOTES, "name": UPDATED_NAME},
|
||||
json={"tags": UPDATED_TAGS, "description": UPDATED_DESC, "name": UPDATED_NAME},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
@ -278,7 +278,7 @@ def test_update_upload_metadata(admin_auth_headers, default_org_id):
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert sorted(data["tags"]) == sorted(UPDATED_TAGS)
|
||||
assert data["notes"] == UPDATED_NOTES
|
||||
assert data["description"] == UPDATED_DESC
|
||||
assert data["name"] == UPDATED_NAME
|
||||
|
||||
|
||||
|
@ -59,7 +59,7 @@ export class CrawlMetadataEditor extends LiteElement {
|
||||
threshold: 0.2, // stricter; default is 0.6
|
||||
});
|
||||
|
||||
private validateCrawlNotesMax = maxLengthValidator(500);
|
||||
private validateCrawlDescriptionMax = maxLengthValidator(500);
|
||||
|
||||
willUpdate(changedProperties: Map<string, any>) {
|
||||
if (changedProperties.has("open") && this.open) {
|
||||
@ -88,27 +88,26 @@ export class CrawlMetadataEditor extends LiteElement {
|
||||
private renderEditMetadata() {
|
||||
if (!this.crawl) return;
|
||||
|
||||
const { helpText, validate } = this.validateCrawlNotesMax;
|
||||
const { helpText, validate } = this.validateCrawlDescriptionMax;
|
||||
return html`
|
||||
<form
|
||||
id="crawlDetailsForm"
|
||||
@submit=${this.onSubmitMetadata}
|
||||
@reset=${this.requestClose}
|
||||
>
|
||||
${this.includeName ? html`
|
||||
<div class="mb-3">
|
||||
<sl-input
|
||||
label="Name"
|
||||
name="name"
|
||||
value="${this.crawl.name}">
|
||||
</sl-input>
|
||||
</div>
|
||||
` : ``}
|
||||
${this.includeName
|
||||
? html`
|
||||
<div class="mb-3">
|
||||
<sl-input label="Name" name="name" value="${this.crawl.name}">
|
||||
</sl-input>
|
||||
</div>
|
||||
`
|
||||
: ``}
|
||||
<sl-textarea
|
||||
class="mb-3 with-max-help-text"
|
||||
name="crawlNotes"
|
||||
label=${msg("Notes")}
|
||||
value=${this.crawl.notes || ""}
|
||||
name="crawlDescription"
|
||||
label=${msg("Description")}
|
||||
value=${this.crawl.description || ""}
|
||||
rows="3"
|
||||
autocomplete="off"
|
||||
resize="auto"
|
||||
@ -172,10 +171,11 @@ export class CrawlMetadataEditor extends LiteElement {
|
||||
|
||||
const formEl = e.target as HTMLFormElement;
|
||||
if (!(await this.checkFormValidity(formEl))) return;
|
||||
const { crawlNotes, name } = serialize(formEl);
|
||||
const { crawlDescription, name } = serialize(formEl);
|
||||
|
||||
if ((!this.includeName || name === this.crawl.name ) &&
|
||||
crawlNotes === (this.crawl!.notes ?? "") &&
|
||||
if (
|
||||
(!this.includeName || name === this.crawl.name) &&
|
||||
crawlDescription === (this.crawl!.description ?? "") &&
|
||||
JSON.stringify(this.tagsToSave) === JSON.stringify(this.crawl!.tags)
|
||||
) {
|
||||
// No changes have been made
|
||||
@ -185,7 +185,7 @@ export class CrawlMetadataEditor extends LiteElement {
|
||||
|
||||
const params = {
|
||||
tags: this.tagsToSave,
|
||||
notes: crawlNotes,
|
||||
description: crawlDescription,
|
||||
name,
|
||||
};
|
||||
|
||||
|
@ -409,7 +409,7 @@ export class FileUploader extends LiteElement {
|
||||
const query = queryString.stringify({
|
||||
filename: file.name,
|
||||
name,
|
||||
notes: description,
|
||||
description: description,
|
||||
collections: this.collectionIds,
|
||||
tags: this.tagsToSave,
|
||||
});
|
||||
|
@ -245,9 +245,9 @@ export class CrawlDetail extends LiteElement {
|
||||
<span class="inline-block align-middle"
|
||||
>${isWorkflowArtifact
|
||||
? msg("Back to Crawl Workflow")
|
||||
: (this.crawl?.type === "upload" ?
|
||||
msg("Back to All Uploads") : msg("Back to All Crawls"))
|
||||
}</span
|
||||
: this.crawl?.type === "upload"
|
||||
? msg("Back to All Uploads")
|
||||
: msg("Back to All Crawls")}</span
|
||||
>
|
||||
</a>
|
||||
</div>
|
||||
@ -701,14 +701,14 @@ export class CrawlDetail extends LiteElement {
|
||||
const noneText = html`<span class="text-neutral-300">${msg("None")}</span>`;
|
||||
return html`
|
||||
<btrix-desc-list>
|
||||
<btrix-desc-list-item label=${msg("Notes")}>
|
||||
<btrix-desc-list-item label=${msg("Description")}>
|
||||
${when(
|
||||
this.crawl,
|
||||
() =>
|
||||
when(
|
||||
this.crawl!.notes?.length,
|
||||
this.crawl!.description?.length,
|
||||
() => html`<pre class="whitespace-pre-line font-sans">
|
||||
${this.crawl?.notes}
|
||||
${this.crawl?.description}
|
||||
</pre
|
||||
>`,
|
||||
() => noneText
|
||||
|
@ -119,7 +119,7 @@ export type Crawl = CrawlConfig & {
|
||||
fileCount?: number;
|
||||
fileSize?: number;
|
||||
completions?: number;
|
||||
notes: string | null;
|
||||
description: string | null;
|
||||
firstSeed: string;
|
||||
seedCount: number;
|
||||
stopping: boolean;
|
||||
|
Loading…
Reference in New Issue
Block a user