* misc backend fixes: - fix running w/o local minio - ensure crawler image pull policy is configurable, loaded via chart value - use digitalocean repo for main backend image (for now) - add bucket_name to config only if using default bucket * enable all behaviors, support 'access_endpoint_url' for default storages * debugging: add 'no_delete_jobs' setting for k8s and docker to disable deletion of completed jobs
46 lines
1.0 KiB
YAML
46 lines
1.0 KiB
YAML
---
|
|
apiVersion: v1
|
|
kind: ConfigMap
|
|
metadata:
|
|
name: {{ .Values.name }}-env-config
|
|
namespace: {{ .Release.Namespace }}
|
|
|
|
data:
|
|
MONGO_HOST: {{ .Values.mongo_host }}
|
|
|
|
CRAWLER_NAMESPACE: {{ .Values.crawler_namespace }}
|
|
CRAWLER_IMAGE: {{ .Values.crawler_image }}
|
|
CRAWLER_PULL_POLICY: {{ .Values.crawler_pull_policy }}
|
|
|
|
CRAWL_TIMEOUT: "{{ .Values.crawl_timeout }}"
|
|
CRAWL_RETRIES: "{{ .Values.crawl_retries }}"
|
|
|
|
REDIS_URL: "{{ .Values.redis_url }}"
|
|
|
|
REDIS_CRAWLS_DONE_KEY: "crawls-done"
|
|
|
|
NO_DELETE_JOBS: "{{ .Values.no_delete_jobs | default '0' }}"
|
|
|
|
---
|
|
apiVersion: v1
|
|
kind: ConfigMap
|
|
metadata:
|
|
name: shared-crawler-config
|
|
namespace: {{ .Values.crawler_namespace }}
|
|
|
|
data:
|
|
CRAWL_ARGS: "{{ .Values.crawler_args }} --redisStoreUrl {{ .Values.redis_url }}"
|
|
#WEBHOOK_URL: "http://browsertrix-cloud.default/_crawls/done"
|
|
WEBHOOK_URL: "{{ .Values.redis_url }}/crawls-done"
|
|
STORE_USER: ""
|
|
|
|
---
|
|
apiVersion: v1
|
|
kind: ConfigMap
|
|
metadata:
|
|
name: nginx-config
|
|
namespace: {{ .Release.Namespace }}
|
|
|
|
data:
|
|
{{ (.Files.Glob "*.conf").AsConfig | indent 2 }}
|