browsertrix/chart/templates/configmap.yaml
Tessa Walsh 49460bb070
Add default organization + invite to default org (#465), #455
- Add default switch to Archive (org) model
- Set default org name via values.yaml
- Add check to ensure only one org with default org name exists
- Stop creating new orgs for new users
- Add new API endpoints for creating and renaming orgs (part of #457)
- Make Archive.name unique via index
- Wait for db connection on init, log if waiting
- Make archive-less invites invite user to default org with Owner role
- Rename default org from chart value if changed
- Don't create new org for invited users
2023-01-12 16:44:18 -08:00

127 lines
3.5 KiB
YAML

---
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ .Values.name }}-env-config
namespace: {{ .Release.Namespace }}
data:
APP_ORIGIN: {{.Values.ingress.scheme }}://{{ .Values.ingress.host | default "localhost:9870" }}
CRAWLER_NAMESPACE: {{ .Values.crawler_namespace }}
CRAWLER_IMAGE: {{ .Values.crawler_image }}
CRAWLER_PULL_POLICY: {{ .Values.crawler_pull_policy }}
CRAWLER_FQDN_SUFFIX: ".{{ .Values.crawler_namespace }}.svc.cluster.local"
CRAWLER_TIMEOUT: "{{ .Values.crawl_timeout }}"
CRAWLER_RETRIES: "{{ .Values.crawl_retries }}"
CRAWLER_REQUESTS_CPU: "{{ .Values.crawler_requests_cpu }}"
CRAWLER_LIMITS_CPU: "{{ .Values.crawler_limits_cpu }}"
CRAWLER_REQUESTS_MEM: "{{ .Values.crawler_requests_memory }}"
CRAWLER_LIMITS_MEM: "{{ .Values.crawler_limits_memory }}"
CRAWLER_LIVENESS_PORT: "{{ .Values.crawler_liveness_port | default 0 }}"
DEFAULT_ORG: "{{ .Values.default_org }}"
JOB_IMAGE: "{{ .Values.api_image }}"
{{- if .Values.crawler_pv_claim }}
CRAWLER_PV_CLAIM: "{{ .Values.crawler_pv_claim }}"
{{- end }}
CRAWLER_NODE_TYPE: "{{ .Values.crawler_node_type }}"
REDIS_URL: "{{ .Values.redis_url }}"
REDIS_CRAWLS_DONE_KEY: "crawls-done"
NO_DELETE_JOBS: "{{ .Values.no_delete_jobs | default 0 }}"
GRACE_PERIOD_SECS: "{{ .Values.grace_period_secs | default 600 }}"
REGISTRATION_ENABLED: "{{ .Values.registration_enabled | default 0 }}"
JWT_TOKEN_LIFETIME_MINUTES: "{{ .Values.jwt_token_lifetime_minutes | default 60 }}"
WEB_CONCURRENCY: "{{ .Values.backend_workers | default 4 }}"
IDLE_TIMEOUT: "{{ .Values.profile_browser_idle_seconds | default 60 }}"
---
apiVersion: v1
kind: ConfigMap
metadata:
name: shared-crawler-config
namespace: {{ .Values.crawler_namespace }}
data:
#CRAWL_ARGS: "{{ .Values.crawler_args }} --redisStoreUrl {{ .Values.redis_url }}"
CRAWL_ARGS: "{{ .Values.crawler_args }}"
#WEBHOOK_URL: "{{ .Values.redis_url }}/crawls-done"
---
apiVersion: v1
kind: ConfigMap
metadata:
name: shared-job-config
namespace: {{ .Values.crawler_namespace }}
data:
config.yaml: |
namespace: {{ .Values.crawler_namespace }}
termination_grace_secs: "{{ .Values.grace_period_secs | default 600 }}"
volume_storage_class: "{{ .Values.volume_storage_class }}"
requests_hd: "{{ .Values.crawler_requests_storage }}"
# redis
redis_image: {{ .Values.redis_image }}
redis_image_pull_policy: {{ .Values.redis_pull_policy }}
redis_requests_cpu: "{{ .Values.redis_requests_cpu }}"
redis_limits_cpu: "{{ .Values.redis_limits_cpu }}"
redis_requests_memory: "{{ .Values.redis_requests_memory }}"
redis_limits_memory: "{{ .Values.redis_limits_memory }}"
# crawler
crawler_image: {{ .Values.crawler_image }}
crawler_image_pull_policy: {{ .Values.crawler_pull_policy }}
crawler_requests_cpu: "{{ .Values.crawler_requests_cpu }}"
crawler_limits_cpu: "{{ .Values.crawler_limits_cpu }}"
crawler_requests_memory: "{{ .Values.crawler_requests_memory }}"
crawler_limits_memory: "{{ .Values.crawler_limits_memory }}"
crawler_liveness_port: "{{ .Values.crawler_liveness_port | default 0 }}"
crawler_node_type: "{{ .Values.crawler_node_type }}"
redis_node_type: "{{ .Values.redis_node_type }}"
redis.conf: |
appendonly yes
dir /data
---
apiVersion: v1
kind: ConfigMap
metadata:
name: nginx-config
namespace: {{ .Release.Namespace }}
data:
{{ (.Files.Glob "*.conf").AsConfig | indent 2 }}
#{{ (.Files.Glob "frontend/*.*").AsConfig | indent 2 }}