backend: fix scaling api response, return error details if available
This commit is contained in:
parent
9606d59c3d
commit
df905682a5
@ -167,13 +167,16 @@ class CrawlJob(ABC):
|
|||||||
|
|
||||||
async def scale_to(self, scale):
|
async def scale_to(self, scale):
|
||||||
""" scale to 'scale' """
|
""" scale to 'scale' """
|
||||||
if not await self._do_scale(scale):
|
try:
|
||||||
return False
|
await self._do_scale(scale)
|
||||||
|
# pylint: disable=broad-except
|
||||||
|
except Exception as exc:
|
||||||
|
return {"success": False, "error": str(exc)}
|
||||||
|
|
||||||
self.scale = scale
|
self.scale = scale
|
||||||
await self.update_crawl(scale=scale)
|
await self.update_crawl(scale=scale)
|
||||||
|
|
||||||
return True
|
return {"success": True}
|
||||||
|
|
||||||
async def fail_crawl(self):
|
async def fail_crawl(self):
|
||||||
""" mark crawl as failed """
|
""" mark crawl as failed """
|
||||||
@ -369,7 +372,7 @@ class CrawlJob(ABC):
|
|||||||
|
|
||||||
@app.post("/scale/{size}")
|
@app.post("/scale/{size}")
|
||||||
async def scale(size: int):
|
async def scale(size: int):
|
||||||
return {"success": await self.scale_to(size)}
|
return await self.scale_to(size)
|
||||||
|
|
||||||
@app.post("/stop")
|
@app.post("/stop")
|
||||||
async def stop():
|
async def stop():
|
||||||
|
@ -489,9 +489,11 @@ def init_crawls_api(
|
|||||||
scale: CrawlScale, crawl_id, archive: Archive = Depends(archive_crawl_dep)
|
scale: CrawlScale, crawl_id, archive: Archive = Depends(archive_crawl_dep)
|
||||||
):
|
):
|
||||||
|
|
||||||
error = await crawl_manager.scale_crawl(crawl_id, archive.id_str, scale.scale)
|
result = await crawl_manager.scale_crawl(crawl_id, archive.id_str, scale.scale)
|
||||||
if error:
|
if not result or not result.get("success"):
|
||||||
raise HTTPException(status_code=400, detail=error)
|
raise HTTPException(
|
||||||
|
status_code=400, detail=result.get("error") or "unknown"
|
||||||
|
)
|
||||||
|
|
||||||
return {"scaled": scale.scale}
|
return {"scaled": scale.scale}
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ class K8SCrawlJob(K8SJobMixin, CrawlJob):
|
|||||||
async def _do_scale(self, new_scale):
|
async def _do_scale(self, new_scale):
|
||||||
crawl = await self._get_crawl()
|
crawl = await self._get_crawl()
|
||||||
if not crawl:
|
if not crawl:
|
||||||
return False
|
raise Exception("crawl_not_found")
|
||||||
|
|
||||||
# if making scale smaller, ensure existing crawlers saved their data
|
# if making scale smaller, ensure existing crawlers saved their data
|
||||||
pods = []
|
pods = []
|
||||||
@ -38,8 +38,6 @@ class K8SCrawlJob(K8SJobMixin, CrawlJob):
|
|||||||
name=crawl.metadata.name, namespace=self.namespace, body=crawl
|
name=crawl.metadata.name, namespace=self.namespace, body=crawl
|
||||||
)
|
)
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def load_initial_scale(self, crawl=None):
|
async def load_initial_scale(self, crawl=None):
|
||||||
""" load scale from crawl, if available """
|
""" load scale from crawl, if available """
|
||||||
if crawl:
|
if crawl:
|
||||||
|
@ -48,8 +48,6 @@ class SwarmCrawlJob(SwarmJobMixin, CrawlJob):
|
|||||||
None, runner.run_service_stack, stack_id, data
|
None, runner.run_service_stack, stack_id, data
|
||||||
)
|
)
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def _get_crawl(self):
|
async def _get_crawl(self):
|
||||||
loop = asyncio.get_running_loop()
|
loop = asyncio.get_running_loop()
|
||||||
return await loop.run_in_executor(
|
return await loop.run_in_executor(
|
||||||
|
Loading…
Reference in New Issue
Block a user