browsertrix/backend/btrixcloud/migrations/migration_0022_partial_complete.py
Tessa Walsh be41c48c27
Add extra and gifted execution minutes (#1361)
Fixes #1358 

- Adds `extraExecMinutes` and `giftedExecMinutes` org quotas, which are
not reset monthly but are updateable amounts that carry across months
- Adds `quotaUpdate` field to `Organization` to track when quotas were
updated with timestamp
- Adds `extraExecMinutesAvailable` and `giftedExecMinutesAvailable`
fields to `Organization` to help with tracking available time left
(includes tested migration to initialize these to 0)
- Modifies org backend to track time across multiple categories, using
monthlyExecSeconds, then giftedExecSeconds, then extraExecSeconds.
All time is also written into crawlExecSeconds, which is now the monthly
total and also contains any overage time above the quotas
- Updates Dashboard crawling meter to include all types of execution
time if `extraExecMinutes` and/or `giftedExecMinutes` are set above 0
- Updates Dashboard Usage History table to include all types of
execution time (only displaying columns that have data)
- Adds backend nightly test to check handling of quotas and execution
time
- Includes migration to add new fields and copy crawlExecSeconds to
monthlyExecSeconds for previous months

Co-authored-by: emma <hi@emma.cafe>
2023-12-07 14:34:37 -05:00

43 lines
1.3 KiB
Python

"""
Migration 0022 -- Partial Complete
"""
from btrixcloud.migrations import BaseMigration
MIGRATION_VERSION = "0022"
class Migration(BaseMigration):
"""Migration class."""
def __init__(self, mdb, migration_version=MIGRATION_VERSION):
super().__init__(mdb, migration_version)
async def migrate_up(self):
"""Perform migration up.
Convert partial_complete -> complete, stopped_by_user or stopped_quota_reached
"""
# pylint: disable=duplicate-code
crawls = self.mdb["crawls"]
crawl_configs = self.mdb["crawl_configs"]
await crawls.update_many(
{"state": "partial_complete", "stopping": True},
{"$set": {"state": "stopped_by_user"}},
)
await crawls.update_many(
{"state": "partial_complete", "stopping": {"$ne": True}},
{"$set": {"state": "complete"}},
)
async for config in crawl_configs.find({"lastCrawlState": "partial_complete"}):
crawl = await crawls.find_one({"_id": config.get("lastCrawlId")})
if not crawl:
continue
await crawl_configs.find_one_and_update(
{"_id": config.get("_id")},
{"$set": {"lastCrawlState": crawl.get("state")}},
)