- Refactors dashboard and org profile preview to use private API endpoint, to fix public collections not showing when the org visibility is hidden - Adds additional sorting options for collections - Adds unique page url counts for archived items, collections, and organizations to backend and exposes this in collections - Shows collection period (i.e. `dateEarliest` to `dateLatest`) in collections list - Shows same collection metadata in private and public views, updates private view info bar - Fixes "Update Org Profile" action item showing for crawler roles --------- Co-authored-by: sua yoo <sua@webrecorder.org> Co-authored-by: sua yoo <sua@suayoo.com> Co-authored-by: Ilya Kreymer <ikreymer@gmail.com>
		
			
				
	
	
		
			44 lines
		
	
	
		
			1.2 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			44 lines
		
	
	
		
			1.2 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| """
 | |
| Migration 0040 -- archived item pageCount
 | |
| """
 | |
| 
 | |
| from btrixcloud.migrations import BaseMigration
 | |
| 
 | |
| 
 | |
| MIGRATION_VERSION = "0040"
 | |
| 
 | |
| 
 | |
| class Migration(BaseMigration):
 | |
|     """Migration class."""
 | |
| 
 | |
|     # pylint: disable=unused-argument
 | |
|     def __init__(self, mdb, **kwargs):
 | |
|         super().__init__(mdb, migration_version=MIGRATION_VERSION)
 | |
| 
 | |
|         self.page_ops = kwargs.get("page_ops")
 | |
| 
 | |
|     async def migrate_up(self):
 | |
|         """Perform migration up.
 | |
| 
 | |
|         Calculate and store pageCount for archived items that don't have it yet
 | |
|         """
 | |
|         crawls_mdb = self.mdb["crawls"]
 | |
| 
 | |
|         if self.page_ops is None:
 | |
|             print(
 | |
|                 "Unable to set pageCount for archived items, missing page_ops",
 | |
|                 flush=True,
 | |
|             )
 | |
|             return
 | |
| 
 | |
|         async for crawl_raw in crawls_mdb.find({}):
 | |
|             crawl_id = crawl_raw["_id"]
 | |
|             try:
 | |
|                 await self.page_ops.set_archived_item_page_counts(crawl_id)
 | |
|             # pylint: disable=broad-exception-caught
 | |
|             except Exception as err:
 | |
|                 print(
 | |
|                     f"Error saving page counts for archived item {crawl_id}: {err}",
 | |
|                     flush=True,
 | |
|                 )
 |