browsertrix/backend/btrixcloud/migrations/migration_0018_usernames.py
Tessa Walsh 14189b7cfb
Add crawl pages and related API endpoints (#1516)
Fixes #1502 

- Adds pages to database as they get added to Redis during crawl
- Adds migration to add pages to database for older crawls from
pages.jsonl and extraPages.jsonl files in WACZ
- Adds GET, list GET, and PATCH update endpoints for pages
- Adds POST (add), PATCH, and POST (delete) endpoints for page notes,
each with their own id, timestamp, and user info in addition to text
- Adds page_ops methods for 1. adding resources/urls to page, and 2.
adding automated heuristics and supplemental info (mime, type, etc.) to
page (for use in crawl QA job)
- Modifies `Migration` class to accept kwargs so that we can pass in ops
classes as needed for migrations
- Deletes WACZ files and pages from database for failed crawls during
crawl_finished process
- Deletes crawl pages when a crawl is deleted

Note: Requires a crawler version 1.0.0 beta3 or later, with support for
`--writePagesToRedis` to populate pages at crawl completion. Beta 4 is
configured in the test chart, which should be upgraded to stable 1.0.0
when it's released.

Connected to https://github.com/webrecorder/browsertrix-crawler/pull/464

---------

Co-authored-by: Ilya Kreymer <ikreymer@gmail.com>
2024-02-28 12:11:35 -05:00

91 lines
3.1 KiB
Python

"""
Migration 0018 - Store crawl and workflow userName directly in db
"""
from btrixcloud.migrations import BaseMigration
from btrixcloud.emailsender import EmailSender
from btrixcloud.invites import init_invites
from btrixcloud.users import init_user_manager
MIGRATION_VERSION = "0018"
# pylint: disable=too-many-locals, invalid-name
class Migration(BaseMigration):
"""Migration class."""
# pylint: disable=unused-argument
def __init__(self, mdb, **kwargs):
super().__init__(mdb, migration_version=MIGRATION_VERSION)
async def migrate_up(self):
"""Perform migration up.
Store userName in db for crawls and workflows
"""
mdb_configs = self.mdb["crawl_configs"]
mdb_crawls = self.mdb["crawls"]
email = EmailSender()
invites = init_invites(self.mdb, email)
user_manager = init_user_manager(self.mdb, email, invites)
async for crawl in mdb_crawls.find({}):
crawl_id = crawl["_id"]
if crawl.get("userName"):
continue
try:
user = await user_manager.get_by_id(crawl["userid"])
await mdb_crawls.find_one_and_update(
{"_id": crawl_id},
{"$set": {"userName": user.name}},
)
# pylint: disable=broad-exception-caught
except Exception as err:
print(
f"Unable to update userName for crawl {crawl_id}: {err}", flush=True
)
async for config in mdb_configs.find({}):
cid = config["_id"]
if config.get("createdByName") and config.get("modifiedByName"):
continue
try:
created_by_name = ""
modified_by_name = ""
last_started_by_name = ""
created_user = await user_manager.get_by_id(config["createdBy"])
if created_user:
created_by_name = created_user.name
modified_user = await user_manager.get_by_id(config["modifiedBy"])
if modified_user:
modified_by_name = modified_user.name
last_started_by = config.get("lastStartedBy")
if last_started_by:
last_started_user = await user_manager.get_by_id(last_started_by)
if last_started_user:
last_started_by_name = last_started_user.name
await mdb_configs.find_one_and_update(
{"_id": cid},
{
"$set": {
"createdByName": created_by_name,
"modifiedByName": modified_by_name,
"lastStartedByName": last_started_by_name,
}
},
)
# pylint: disable=broad-exception-caught
except Exception as err:
print(
f"Unable to update usernames for crawlconfig {cid}: {err}",
flush=True,
)