backend: fix scaling api response, return error details if available

This commit is contained in:
Ilya Kreymer 2022-06-29 17:26:32 -07:00
parent 9606d59c3d
commit df905682a5
4 changed files with 13 additions and 12 deletions

View File

@ -167,13 +167,16 @@ class CrawlJob(ABC):
async def scale_to(self, scale):
""" scale to 'scale' """
if not await self._do_scale(scale):
return False
try:
await self._do_scale(scale)
# pylint: disable=broad-except
except Exception as exc:
return {"success": False, "error": str(exc)}
self.scale = scale
await self.update_crawl(scale=scale)
return True
return {"success": True}
async def fail_crawl(self):
""" mark crawl as failed """
@ -369,7 +372,7 @@ class CrawlJob(ABC):
@app.post("/scale/{size}")
async def scale(size: int):
return {"success": await self.scale_to(size)}
return await self.scale_to(size)
@app.post("/stop")
async def stop():

View File

@ -489,9 +489,11 @@ def init_crawls_api(
scale: CrawlScale, crawl_id, archive: Archive = Depends(archive_crawl_dep)
):
error = await crawl_manager.scale_crawl(crawl_id, archive.id_str, scale.scale)
if error:
raise HTTPException(status_code=400, detail=error)
result = await crawl_manager.scale_crawl(crawl_id, archive.id_str, scale.scale)
if not result or not result.get("success"):
raise HTTPException(
status_code=400, detail=result.get("error") or "unknown"
)
return {"scaled": scale.scale}

View File

@ -17,7 +17,7 @@ class K8SCrawlJob(K8SJobMixin, CrawlJob):
async def _do_scale(self, new_scale):
crawl = await self._get_crawl()
if not crawl:
return False
raise Exception("crawl_not_found")
# if making scale smaller, ensure existing crawlers saved their data
pods = []
@ -38,8 +38,6 @@ class K8SCrawlJob(K8SJobMixin, CrawlJob):
name=crawl.metadata.name, namespace=self.namespace, body=crawl
)
return True
async def load_initial_scale(self, crawl=None):
""" load scale from crawl, if available """
if crawl:

View File

@ -48,8 +48,6 @@ class SwarmCrawlJob(SwarmJobMixin, CrawlJob):
None, runner.run_service_stack, stack_id, data
)
return True
async def _get_crawl(self):
loop = asyncio.get_running_loop()
return await loop.run_in_executor(