From 4428184aeadc022452248ad9f1199390f93be284 Mon Sep 17 00:00:00 2001 From: Ilya Kreymer Date: Thu, 8 Jun 2023 11:26:26 -0700 Subject: [PATCH] frontend: configure running with a fixed 'replay.json', auth headers passed via separate config (#899) wabac.js will reload the replay.json on 403 with new token (will be in next version of wabac.js) presign urls: make presign timeout configurable (in minutes), defaults to 60 mins dockerfile: fix configuring RWP_BASE_URL --- backend/btrixcloud/crawls.py | 8 +++++--- chart/templates/configmap.yaml | 2 ++ frontend/Dockerfile | 7 +++---- frontend/src/pages/org/crawl-detail.ts | 10 ++++++---- 4 files changed, 16 insertions(+), 11 deletions(-) diff --git a/backend/btrixcloud/crawls.py b/backend/btrixcloud/crawls.py index 21a877dc..fd3e137a 100644 --- a/backend/btrixcloud/crawls.py +++ b/backend/btrixcloud/crawls.py @@ -220,7 +220,9 @@ class CrawlOps: self.crawl_configs.set_crawl_ops(self) - self.presign_duration = int(os.environ.get("PRESIGN_DURATION_SECONDS", 3600)) + self.presign_duration_seconds = ( + int(os.environ.get("PRESIGN_DURATION_MINUTES", 60)) * 60 + ) async def init_index(self): """init index for crawls db collection""" @@ -463,7 +465,7 @@ class CrawlOps: print("no files") return - delta = timedelta(seconds=self.presign_duration) + delta = timedelta(seconds=self.presign_duration_seconds) updates = [] out_files = [] @@ -475,7 +477,7 @@ class CrawlOps: if not presigned_url or now >= file_.expireAt: exp = now + delta presigned_url = await get_presigned_url( - org, file_, self.crawl_manager, self.presign_duration + org, file_, self.crawl_manager, self.presign_duration_seconds ) updates.append( ( diff --git a/chart/templates/configmap.yaml b/chart/templates/configmap.yaml index e3361845..5474e6e7 100644 --- a/chart/templates/configmap.yaml +++ b/chart/templates/configmap.yaml @@ -62,6 +62,8 @@ data: RERUN_LAST_MIGRATION: "{{ .Values.rerun_last_migration }}" + PRESIGN_DURATION_MINUTES: "{{ .Values.storage_presign_duration_minutes | default 60 }}" + --- apiVersion: v1 diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 69b51ed7..3f842896 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,7 +1,4 @@ # syntax=docker/dockerfile:1.4 -# central place to configure the production replayweb.page loading prefix -ARG RWP_BASE_URL=https://cdn.jsdelivr.net/npm/replaywebpage/ - FROM docker.io/library/node:16 as build_deps WORKDIR /app @@ -30,10 +27,12 @@ COPY --link src ./src/ ARG GIT_COMMIT_HASH ARG GIT_BRANCH_NAME ARG VERSION +ARG RWP_BASE_URL=https://cdn.jsdelivr.net/npm/replaywebpage/ ENV GIT_COMMIT_HASH=${GIT_COMMIT_HASH} \ GIT_BRANCH_NAME=${GIT_BRANCH_NAME} \ - VERSION=${VERSION} + VERSION=${VERSION} \ + RWP_BASE_URL=${RWP_BASE_URL} # Prevent Docker caching node_modules RUN yarn build && \ diff --git a/frontend/src/pages/org/crawl-detail.ts b/frontend/src/pages/org/crawl-detail.ts index 69a65281..48a037c9 100644 --- a/frontend/src/pages/org/crawl-detail.ts +++ b/frontend/src/pages/org/crawl-detail.ts @@ -571,11 +571,12 @@ export class CrawlDetail extends LiteElement { } private renderReplay() { - const bearer = this.authState?.headers?.Authorization?.split(" ", 2)[1]; + //const replaySource = `/api/orgs/${this.crawl?.oid}/crawls/${this.crawlId}/replay.json?auth_bearer=${bearer}`; + const replaySource = `/api/orgs/${this.crawl?.oid}/crawls/${this.crawlId}/replay.json`; - // for now, just use the first file until multi-wacz support is fully implemented - const replaySource = `/api/orgs/${this.crawl?.oid}/crawls/${this.crawlId}/replay.json?auth_bearer=${bearer}`; - //const replaySource = this.crawl?.resources?.[0]?.path; + const headers = this.authState?.headers; + + const config = JSON.stringify({headers}); const canReplay = replaySource && this.hasFiles; @@ -587,6 +588,7 @@ export class CrawlDetail extends LiteElement {