Fixes #1502 - Adds pages to database as they get added to Redis during crawl - Adds migration to add pages to database for older crawls from pages.jsonl and extraPages.jsonl files in WACZ - Adds GET, list GET, and PATCH update endpoints for pages - Adds POST (add), PATCH, and POST (delete) endpoints for page notes, each with their own id, timestamp, and user info in addition to text - Adds page_ops methods for 1. adding resources/urls to page, and 2. adding automated heuristics and supplemental info (mime, type, etc.) to page (for use in crawl QA job) - Modifies `Migration` class to accept kwargs so that we can pass in ops classes as needed for migrations - Deletes WACZ files and pages from database for failed crawls during crawl_finished process - Deletes crawl pages when a crawl is deleted Note: Requires a crawler version 1.0.0 beta3 or later, with support for `--writePagesToRedis` to populate pages at crawl completion. Beta 4 is configured in the test chart, which should be upgraded to stable 1.0.0 when it's released. Connected to https://github.com/webrecorder/browsertrix-crawler/pull/464 --------- Co-authored-by: Ilya Kreymer <ikreymer@gmail.com>
		
			
				
	
	
		
			49 lines
		
	
	
		
			1.4 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			49 lines
		
	
	
		
			1.4 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| """
 | |
| Migration 0011 - Remove None CRAWL_TIMEOUT values from configmaps
 | |
| """
 | |
| 
 | |
| import os
 | |
| 
 | |
| from btrixcloud.k8sapi import K8sAPI
 | |
| 
 | |
| from btrixcloud.migrations import BaseMigration
 | |
| 
 | |
| 
 | |
| MIGRATION_VERSION = "0011"
 | |
| 
 | |
| 
 | |
| class Migration(BaseMigration):
 | |
|     """Migration class."""
 | |
| 
 | |
|     # pylint: disable=unused-argument
 | |
|     def __init__(self, mdb, **kwargs):
 | |
|         super().__init__(mdb, migration_version=MIGRATION_VERSION)
 | |
| 
 | |
|     async def migrate_up(self):
 | |
|         """Perform migration up.
 | |
| 
 | |
|         Replace any None values in configmaps for CRAWL_TIMEOUT with 0.
 | |
|         """
 | |
|         k8s_api_instance = K8sAPI()
 | |
|         crawler_namespace = os.environ.get("CRAWLER_NAMESPACE") or "crawlers"
 | |
|         config_maps = await k8s_api_instance.core_api.list_namespaced_config_map(
 | |
|             namespace=crawler_namespace
 | |
|         )
 | |
|         for item in config_maps.items:
 | |
|             try:
 | |
|                 crawl_timeout = item.data["CRAWL_TIMEOUT"]
 | |
|                 if crawl_timeout not in (None, "None"):
 | |
|                     continue
 | |
| 
 | |
|                 item.data["CRAWL_TIMEOUT"] = "0"
 | |
| 
 | |
|                 await k8s_api_instance.core_api.patch_namespaced_config_map(
 | |
|                     name=item.metadata.name, namespace=crawler_namespace, body=item
 | |
|                 )
 | |
| 
 | |
|             # pylint: disable=broad-exception-caught
 | |
|             except Exception as err:
 | |
|                 print(
 | |
|                     f"Error modifying configmap CRAWL_TIMEOUT value: {err}", flush=True
 | |
|                 )
 |