version: '3.9' services: job: image: {{ job_image }} command: ["uvicorn", "btrixcloud.swarm.crawl_job:app", "--host", "0.0.0.0", "--access-log", "--log-level", "info"] container_name: job-{{ id }}_job security_opt: - "label=disable" secrets: - crawl-opts-{{ cid }} volumes: - {{ env.SOCKET_SRC | default("/var/run/docker.sock", true) }}:{{ env.SOCKET_DEST | default("/var/run/docker.sock", true) }}:z networks: - btrix deploy: replicas: {{ 1 if not schedule else 0 }} labels: btrix.run.manual: "{{ manual }}" btrix.user: {{ userid }} btrix.archive: {{ aid }} btrix.crawlconfig: {{ cid }} {% if schedule %} swarm.cronjob.enable: "true" swarm.cronjob.skip-running: "true" swarm.cronjob.schedule: "{{ schedule }}" {% endif %} mode: replicated restart_policy: condition: none environment: CUSTOM_JOB_CONFIG: crawl-opts-{{ cid }} JOB_ID: "{{ id }}" STACK_PREFIX: "crawl-" CRAWLER_IMAGE: "{{ env.CRAWLER_IMAGE }}" REDIS_IMAGE: "{{ env.REDIS_IMAGE }}" CRAWL_ARGS: "{{ env.CRAWL_ARGS }}" CRAWLER_REQUESTS_CPU: "{{ env.CRAWLER_REQUESTS_CPU }}" CRAWLER_REQUESTS_MEMORY: "{{ env.CRAWLER_REQUESTS_MEMORY }}" CRAWLER_LIMITS_CPU: "{{ env.CRAWLER_LIMITS_CPU }}" CRAWLER_LIMITS_MEMORY: "{{ env.CRAWLER_LIMITS_MEMORY }}" STORE_ENDPOINT_URL: "{{ env.STORE_ENDPOINT_URL }}" STORE_ACCESS_KEY: "{{ env.STORE_ACCESS_KEY }}" STORE_SECRET_KEY: "{{ env.STORE_SECRET_KEY }}" STORE_PATH: "{{ storage_path }}" STORAGE_NAME: "{{ storage_name }}" PROFILE_PATH: "{{ profile_path }}" MONGO_DB_URL: "{{ mongo_db_url }}" RUN_MANUAL: "{{ manual }}" RUNTIME: "{{ env.RUNTIME }}" WACZ_SIGN_URL: "{{ env.WACZ_SIGN_URL }}" WACZ_SIGN_TOKEN: "{{ env.WACZ_SIGN_TOKEN }}" networks: btrix: external: name: btrix-net secrets: crawl-opts-{{ cid }}: external: true