browsertrix/backend/btrixcloud/swarm/templates/crawler.yaml
Ilya Kreymer 418c07bf0d
Local swarm + podman support (#261)
* backend: refactor swarm support to also support podman (#260)
- implement podman support as subclass of swarm deployment
- podman is used when 'RUNTIME=podman' env var is set
- podman socket is mapped instead of docker socket
- podman-compose is used instead of docker-compose (though docker-compose works with podman, it does not support secrets, but podman-compose does)
- separate cli utils into SwarmRunner and PodmanRunner which extends it
- using config.yaml and config.env, both copied from sample versions
- work on simplifying config: add docker-compose.podman.yml and docker-compose.swarm.yml and signing and debug configs in ./configs
- add {build,run,stop}-{swarm,podman}.sh in scripts dir
- add init-configs, only copy if configs don't exist
- build local image use current version of podman, to support both podman 3.x and 4.x
- additional fixes for after testing podman on centos
- docs: update Deployment.md to cover swarm, podman, k8s deployment
2022-06-14 00:13:49 -07:00

102 lines
2.1 KiB
YAML

version: '3.9'
services:
crawler:
image: {{ crawler_image }}
command:
- crawl
- --config
- /var/run/secrets/crawl-config-{{ cid }}
- --redisStoreUrl
- {{ redis_url }}
{%- if profile_filename %}
- --profile
- "@profiles/{{ profile_filename }}"
{%- endif %}
hostname: "crawl-{{ id }}-{{ index }}_crawler"
container_name: "crawl-{{ id }}-{{ index }}_crawler"
networks:
- btrix
secrets:
- crawl-config-{{ cid }}
volumes:
- crawl-data:/crawls
stop_grace_period: 1000s
restart: always
deploy:
endpoint_mode: dnsrr
replicas: 1
labels:
crawl: {{ id }}
role: crawler
resources:
limits:
cpus: "{{ crawler_limits_cpu }}"
memory: "{{ crawler_limits_memory }}"
reservations:
cpus: "{{ crawler_requests_cpu }}"
memory: "{{ crawler_requests_memory }}"
environment:
- CRAWL_ID={{ id }}
- STORE_ENDPOINT_URL={{ storages[storage_name].endpoint_url }}
- STORE_ACCESS_KEY={{ storages[storage_name].access_key }}
- STORE_SECRET_KEY={{ storages[storage_name].secret_key }}
- STORE_PATH={{ storage_path }}
- STORE_FILENAME={{ storage_filename }}
- STORE_USER={{ userid }}
{%- if auth_token %}
- WACZ_SIGN_TOKEN={{ auth_token }}
- WACZ_SIGN_URL=http://authsign:5053/sign
{%- endif %}
- WEBHOOK_URL={{ redis_url }}/crawls-done
- CRAWL_ARGS={{ crawler_args }}
{% if index == 0 %}
redis:
image: {{ redis_image }}
command: ["redis-server", "--appendonly", "yes"]
container_name: "crawl-{{ id }}-{{ index }}_redis"
restart: always
deploy:
endpoint_mode: dnsrr
replicas: 1
labels:
crawl: {{ id }}
role: redis
networks:
- btrix
{% endif %}
networks:
btrix:
external:
name: btrix-net
secrets:
crawl-config-{{ cid }}:
external: true
volumes:
crawl-data:
name: "crawl-{{ id }}-{{ index }}"
labels:
btrix.crawl: {{ id }}