diff --git a/backend/btrixcloud/crawlconfigs.py b/backend/btrixcloud/crawlconfigs.py
index d2b81187..cfd88f52 100644
--- a/backend/btrixcloud/crawlconfigs.py
+++ b/backend/btrixcloud/crawlconfigs.py
@@ -191,6 +191,10 @@ class CrawlConfig(CrawlConfigCore):
     lastCrawlState: Optional[str]
     lastCrawlSize: Optional[int]
 
+    lastRun: Optional[datetime]
+
+    isCrawlRunning: Optional[bool] = False
+
     def get_raw_config(self):
         """serialize config for browsertrix-crawler"""
         return self.config.dict(exclude_unset=True, exclude_none=True)
@@ -198,13 +202,9 @@ class CrawlConfig(CrawlConfigCore):
 
 # ============================================================================
 class CrawlConfigOut(CrawlConfig):
-    """Crawl Config Output, includes currCrawlId of running crawl"""
+    """Crawl Config Output"""
 
-    currCrawlId: Optional[str]
-    currCrawlStartTime: Optional[datetime]
-    currCrawlState: Optional[str]
-    currCrawlSize: Optional[int] = 0
-    currCrawlStopping: Optional[bool] = False
+    lastCrawlStopping: Optional[bool] = False
 
     profileName: Optional[str]
 
@@ -281,6 +281,10 @@ class CrawlConfigOps:
             [("oid", pymongo.ASCENDING), ("tags", pymongo.ASCENDING)]
         )
 
+        await self.crawl_configs.create_index(
+            [("lastRun", pymongo.DESCENDING), ("modified", pymongo.DESCENDING)]
+        )
+
         await self.config_revs.create_index([("cid", pymongo.HASHED)])
 
         await self.config_revs.create_index(
@@ -480,7 +484,7 @@ class CrawlConfigOps:
         description: str = None,
         tags: Optional[List[str]] = None,
         schedule: Optional[bool] = None,
-        sort_by: str = None,
+        sort_by: str = "lastRun",
         sort_direction: int = -1,
     ):
         """Get all crawl configs for an organization is a member of"""
@@ -525,12 +529,31 @@ class CrawlConfigOps:
             aggregate.extend([{"$match": {"firstSeed": first_seed}}])
 
         if sort_by:
-            if sort_by not in ("created", "modified", "firstSeed", "lastCrawlTime"):
+            if sort_by not in (
+                "created",
+                "modified",
+                "firstSeed",
+                "lastCrawlTime",
+                "lastCrawlStartTime",
+                "lastRun",
+            ):
                 raise HTTPException(status_code=400, detail="invalid_sort_by")
             if sort_direction not in (1, -1):
                 raise HTTPException(status_code=400, detail="invalid_sort_direction")
 
-            aggregate.extend([{"$sort": {sort_by: sort_direction}}])
+            sort_query = {sort_by: sort_direction}
+
+            # Add modified as final sort key to give some order to workflows that
+            # haven't been run yet.
+            if sort_by in (
+                "firstSeed",
+                "lastCrawlTime",
+                "lastCrawlStartTime",
+                "lastRun",
+            ):
+                sort_query = {sort_by: sort_direction, "modified": sort_direction}
+
+            aggregate.extend([{"$sort": sort_query}])
 
         aggregate.extend(
             [
@@ -641,11 +664,9 @@ class CrawlConfigOps:
         if not crawl:
             return
 
-        crawlconfig.currCrawlId = crawl.id
-        crawlconfig.currCrawlStartTime = crawl.started
-        crawlconfig.currCrawlState = crawl.state
-        crawlconfig.currCrawlSize = crawl.stats.get("size", 0) if crawl.stats else 0
-        crawlconfig.currCrawlStopping = crawl.stopping
+        crawlconfig.lastCrawlState = crawl.state
+        crawlconfig.lastCrawlSize = crawl.stats.get("size", 0) if crawl.stats else 0
+        crawlconfig.lastCrawlStopping = crawl.stopping
 
     async def get_crawl_config_out(self, cid: uuid.UUID, org: Organization):
         """Return CrawlConfigOut, including state of currently running crawl, if active
@@ -890,10 +911,37 @@ async def inc_crawl_count(crawl_configs, cid: uuid.UUID):
     )
 
 
+# ============================================================================
+async def set_config_current_crawl_info(
+    crawl_configs, cid: uuid.UUID, crawl_id: str, crawl_start: datetime
+):
+    """Set current crawl info in config when crawl begins"""
+    result = await crawl_configs.find_one_and_update(
+        {"_id": cid, "inactive": {"$ne": True}},
+        {
+            "$set": {
+                "lastCrawlId": crawl_id,
+                "lastCrawlStartTime": crawl_start,
+                "lastCrawlTime": None,
+                "lastRun": crawl_start,
+                "isCrawlRunning": True,
+            }
+        },
+        return_document=pymongo.ReturnDocument.AFTER,
+    )
+    if result:
+        return True
+    return False
+
+
 # ============================================================================
 # pylint: disable=too-many-locals
 async def update_config_crawl_stats(crawl_configs, crawls, cid: uuid.UUID):
-    """re-calculate and update crawl statistics for config"""
+    """Re-calculate and update crawl statistics for config.
+
+    Should only be called when a crawl completes from operator or on migration
+    when no crawls are running.
+    """
     update_query = {
         "crawlCount": 0,
         "totalSize": 0,
@@ -903,6 +951,8 @@ async def update_config_crawl_stats(crawl_configs, crawls, cid: uuid.UUID):
         "lastCrawlTime": None,
         "lastCrawlState": None,
         "lastCrawlSize": None,
+        "lastCrawlStopping": False,
+        "isCrawlRunning": False,
     }
 
     match_query = {"cid": cid, "finished": {"$ne": None}}
@@ -912,15 +962,21 @@ async def update_config_crawl_stats(crawl_configs, crawls, cid: uuid.UUID):
         update_query["crawlCount"] = len(results)
 
         last_crawl = results[0]
+
+        last_crawl_finished = last_crawl.get("finished")
+
         update_query["lastCrawlId"] = str(last_crawl.get("_id"))
         update_query["lastCrawlStartTime"] = last_crawl.get("started")
         update_query["lastStartedBy"] = last_crawl.get("userid")
-        update_query["lastCrawlTime"] = last_crawl.get("finished")
+        update_query["lastCrawlTime"] = last_crawl_finished
         update_query["lastCrawlState"] = last_crawl.get("state")
         update_query["lastCrawlSize"] = sum(
             file_.get("size", 0) for file_ in last_crawl.get("files", [])
         )
 
+        if last_crawl_finished:
+            update_query["lastRun"] = last_crawl_finished
+
         total_size = 0
         for res in results:
             files = res.get("files", [])
diff --git a/backend/btrixcloud/crawls.py b/backend/btrixcloud/crawls.py
index 362358b5..ee1ce874 100644
--- a/backend/btrixcloud/crawls.py
+++ b/backend/btrixcloud/crawls.py
@@ -18,7 +18,13 @@ from pydantic import BaseModel, UUID4, conint, HttpUrl
 from redis import asyncio as aioredis, exceptions
 import pymongo
 
-from .crawlconfigs import Seed, CrawlConfigCore, CrawlConfig, UpdateCrawlConfig
+from .crawlconfigs import (
+    Seed,
+    CrawlConfigCore,
+    CrawlConfig,
+    UpdateCrawlConfig,
+    set_config_current_crawl_info,
+)
 from .db import BaseMongoModel
 from .orgs import Organization, MAX_CRAWL_SCALE
 from .pagination import DEFAULT_PAGE_SIZE, paginated_format
@@ -536,7 +542,13 @@ class CrawlOps:
 
     async def add_new_crawl(self, crawl_id: str, crawlconfig: CrawlConfig, user: User):
         """initialize new crawl"""
-        return await add_new_crawl(self.crawls, crawl_id, crawlconfig, user.id)
+        new_crawl = await add_new_crawl(self.crawls, crawl_id, crawlconfig, user.id)
+        return await set_config_current_crawl_info(
+            self.crawl_configs.crawl_configs,
+            crawlconfig.id,
+            new_crawl["id"],
+            new_crawl["started"],
+        )
 
     async def update_crawl(self, crawl_id: str, org: Organization, update: UpdateCrawl):
         """Update existing crawl (tags and notes only for now)"""
@@ -835,6 +847,8 @@ async def add_new_crawl(
     crawls, crawl_id: str, crawlconfig: CrawlConfig, userid: UUID4, manual=True
 ):
     """initialize new crawl"""
+    started = ts_now()
+
     crawl = Crawl(
         id=crawl_id,
         state="starting",
@@ -849,13 +863,13 @@ async def add_new_crawl(
         schedule=crawlconfig.schedule,
         crawlTimeout=crawlconfig.crawlTimeout,
         manual=manual,
-        started=ts_now(),
+        started=started,
         tags=crawlconfig.tags,
     )
 
     try:
-        await crawls.insert_one(crawl.to_dict())
-        return True
+        result = await crawls.insert_one(crawl.to_dict())
+        return {"id": str(result.inserted_id), "started": started}
     except pymongo.errors.DuplicateKeyError:
         # print(f"Crawl Already Added: {crawl.id} - {crawl.state}")
         return False
diff --git a/backend/btrixcloud/main_scheduled_job.py b/backend/btrixcloud/main_scheduled_job.py
index f4cb04bf..8cb88e6b 100644
--- a/backend/btrixcloud/main_scheduled_job.py
+++ b/backend/btrixcloud/main_scheduled_job.py
@@ -6,7 +6,11 @@ import uuid
 
 from .k8sapi import K8sAPI
 from .db import init_db
-from .crawlconfigs import get_crawl_config, inc_crawl_count
+from .crawlconfigs import (
+    get_crawl_config,
+    inc_crawl_count,
+    set_config_current_crawl_info,
+)
 from .crawls import add_new_crawl
 from .utils import register_exit_handler
 
@@ -43,9 +47,16 @@ class ScheduledJob(K8sAPI):
 
         # db create
         await inc_crawl_count(self.crawlconfigs, crawlconfig.id)
-        await add_new_crawl(
+        new_crawl = await add_new_crawl(
             self.crawls, crawl_id, crawlconfig, uuid.UUID(userid), manual=False
         )
+        # pylint: disable=duplicate-code
+        await set_config_current_crawl_info(
+            self.crawlconfigs.crawl_configs,
+            crawlconfig.id,
+            new_crawl["id"],
+            new_crawl["started"],
+        )
 
         print("Crawl Created: " + crawl_id)
 
diff --git a/backend/test/test_crawlconfigs.py b/backend/test/test_crawlconfigs.py
index d0dcbe09..ba4ec868 100644
--- a/backend/test/test_crawlconfigs.py
+++ b/backend/test/test_crawlconfigs.py
@@ -232,6 +232,7 @@ def test_workflow_total_size_and_last_crawl_stats(
             assert workflow["lastStartedByName"]
             assert workflow["lastCrawlTime"]
             assert workflow["lastCrawlState"]
+            assert workflow["lastRun"]
             assert workflow["lastCrawlSize"] > 0
 
             if last_crawl_id == admin_crawl_id:
@@ -254,4 +255,5 @@ def test_workflow_total_size_and_last_crawl_stats(
     assert data["lastStartedByName"]
     assert data["lastCrawlTime"]
     assert data["lastCrawlState"]
+    assert data["lastRun"]
     assert data["lastCrawlSize"] > 0
diff --git a/backend/test/test_filter_sort_results.py b/backend/test/test_filter_sort_results.py
index 49b2bdf2..d30585c0 100644
--- a/backend/test/test_filter_sort_results.py
+++ b/backend/test/test_filter_sort_results.py
@@ -433,11 +433,12 @@ def test_sort_crawl_configs(
 
     last_crawl_time = None
     for config in items:
-        if not config.get("lastCrawlTime"):
+        config_last_time = config.get("lastCrawlTime")
+        if not config_last_time:
             continue
-        if last_crawl_time:
-            assert config["lastCrawlTime"] <= last_crawl_time
-        last_crawl_time = config["lastCrawlTime"]
+        elif last_crawl_time and config_last_time:
+            assert config_last_time <= last_crawl_time
+        last_crawl_time = config_last_time
 
     # Sort by lastCrawlTime, ascending
     r = requests.get(
@@ -449,11 +450,80 @@ def test_sort_crawl_configs(
 
     last_crawl_time = None
     for config in items:
-        if not config.get("lastCrawlTime"):
+        config_last_time = config.get("lastCrawlTime")
+        if not config_last_time:
             continue
-        if last_crawl_time:
-            assert config["lastCrawlTime"] >= last_crawl_time
-        last_crawl_time = config["lastCrawlTime"]
+        elif last_crawl_time and config_last_time:
+            assert config_last_time >= last_crawl_time
+        last_crawl_time = config_last_time
+
+    # Sort by lastCrawlStartTime
+    r = requests.get(
+        f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?sortBy=lastCrawlStartTime",
+        headers=crawler_auth_headers,
+    )
+    data = r.json()
+    items = data["items"]
+
+    last_crawl_time = None
+    for config in items:
+        config_last_time = config.get("lastCrawlStartTime")
+        if not config_last_time:
+            continue
+        elif last_crawl_time and config_last_time:
+            assert config_last_time <= last_crawl_time
+        last_crawl_time = config_last_time
+
+    # Sort by lastCrawlStartTime, ascending
+    r = requests.get(
+        f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?sortBy=lastCrawlStartTime&sortDirection=1",
+        headers=crawler_auth_headers,
+    )
+    data = r.json()
+    items = data["items"]
+
+    last_crawl_time = None
+    for config in items:
+        config_last_time = config.get("lastCrawlStartTime")
+        if not config_last_time:
+            continue
+        elif last_crawl_time and config_last_time:
+            assert config_last_time >= last_crawl_time
+        last_crawl_time = config_last_time
+
+    # Sort by lastRun
+    r = requests.get(
+        f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?sortBy=lastRun",
+        headers=crawler_auth_headers,
+    )
+    data = r.json()
+    items = data["items"]
+
+    last_updated_time = None
+    for config in items:
+        config_last_updated = config.get("lastRun")
+        if not config_last_updated:
+            continue
+        elif last_updated_time and config_last_updated:
+            assert config_last_updated <= last_updated_time
+        last_updated_time = config_last_updated
+
+    # Sort by lastRun, ascending
+    r = requests.get(
+        f"{API_PREFIX}/orgs/{default_org_id}/crawlconfigs?sortBy=lastRun&sortDirection=1",
+        headers=crawler_auth_headers,
+    )
+    data = r.json()
+    items = data["items"]
+
+    last_updated_time = None
+    for config in items:
+        config_last_updated = config.get("lastRun")
+        if not config_last_updated:
+            continue
+        elif last_updated_time and config_last_updated:
+            assert config_last_updated >= last_updated_time
+        last_updated_time = config_last_updated
 
     # Invalid sort value
     r = requests.get(
diff --git a/frontend/src/components/workflow-list.ts b/frontend/src/components/workflow-list.ts
index bfae3365..4259ddfa 100644
--- a/frontend/src/components/workflow-list.ts
+++ b/frontend/src/components/workflow-list.ts
@@ -255,7 +255,7 @@ export class WorkflowListItem extends LitElement {
       role="button"
       href=${`/orgs/${this.workflow?.oid}/workflows/crawl/${
         this.workflow?.id
-      }#${this.workflow?.currCrawlId ? "watch" : "artifacts"}`}
+      }#${this.workflow?.isCrawlRunning ? "watch" : "artifacts"}`}
       @click=${async (e: MouseEvent) => {
         e.preventDefault();
         await this.updateComplete;
@@ -294,20 +294,19 @@ export class WorkflowListItem extends LitElement {
             (workflow) =>
               html`