- Use latest crawler image for tests - Due to webrecorder/browsertrix-crawler#861 change, a crawl with no successful pages should be treated as failed. Update fixture to allow both failed or complete state for backwards compatibility for now.
56 lines
1.1 KiB
YAML
56 lines
1.1 KiB
YAML
# test overrides
|
|
# --------------
|
|
|
|
# use local images built to :latest tag
|
|
backend_image: docker.io/webrecorder/browsertrix-backend:latest
|
|
frontend_image: docker.io/webrecorder/browsertrix-frontend:latest
|
|
|
|
backend_pull_policy: "Never"
|
|
frontend_pull_policy: "Never"
|
|
|
|
default_crawl_filename_template: "@ts-testing-@hostsuffix.wacz"
|
|
|
|
operator_resync_seconds: 3
|
|
|
|
qa_scale: 2
|
|
|
|
# for testing only
|
|
crawler_extra_cpu_per_browser: 300m
|
|
|
|
crawler_extra_memory_per_browser: 256Mi
|
|
|
|
crawler_channels:
|
|
- id: default
|
|
image: "docker.io/webrecorder/browsertrix-crawler:latest"
|
|
|
|
- id: test
|
|
image: "docker.io/webrecorder/browsertrix-crawler:latest"
|
|
|
|
mongo_auth:
|
|
# specify either username + password (for local mongo)
|
|
username: root
|
|
password: PASSWORD@
|
|
|
|
|
|
superuser:
|
|
# set this to enable a superuser admin
|
|
email: admin@example.com
|
|
|
|
# optional: if not set, automatically generated
|
|
# change or remove this
|
|
password: PASSW0RD!
|
|
|
|
|
|
# test max pages per crawl global limit
|
|
max_pages_per_crawl: 4
|
|
|
|
registration_enabled: "0"
|
|
|
|
billing_enabled: true
|
|
|
|
# log failed crawl pods to operator backend
|
|
log_failed_crawl_lines: 200
|
|
|
|
# disable for tests
|
|
disk_utilization_threshold: 0
|