version: "3.9" networks: opsNet: driver: overlay attachable: true configs: prometheus.yml: file: ./monitoring/prometheus.yml alertmanager.yml: file: ./monitoring/alertmanager.yml rules.yaml: file: ./agent/rules.yaml secrets: openai_api_key: external: true services: ai-agent: build: context: ./agent image: burnserv/ai-agent:latest networks: [opsNet] ports: - "8080:8080" deploy: mode: replicated replicas: 1 placement: constraints: - node.role == manager labels: - "ai.agent=true" volumes: - /var/run/docker.sock:/var/run/docker.sock configs: - source: rules.yaml target: /app/rules.yaml prometheus: image: prom/prometheus:v2.55.0 networks: [opsNet] deploy: mode: replicated replicas: 1 command: - "--config.file=/etc/prometheus/prometheus.yml" configs: - source: prometheus.yml target: /etc/prometheus/prometheus.yml ports: - "9090:9090" alertmanager: image: prom/alertmanager:v0.27.0 networks: [opsNet] deploy: mode: replicated replicas: 1 command: - "--config.file=/etc/alertmanager/alertmanager.yml" configs: - source: alertmanager.yml target: /etc/alertmanager/alertmanager.yml ports: - "9093:9093" cadvisor: image: gcr.io/cadvisor/cadvisor:v0.49.1 networks: [opsNet] deploy: mode: global placement: constraints: - node.platform.os == linux volumes: - /:/rootfs:ro - /var/run:/var/run:ro - /sys:/sys:ro - /var/lib/docker/:/var/lib/docker:ro ports: - "8081:8080" node-exporter: image: prom/node-exporter:v1.8.2 networks: [opsNet] deploy: mode: global placement: constraints: - node.platform.os == linux command: - --path.rootfs=/host volumes: - /:/host:ro,rslave relay: build: context: ./relay image: burnserv/ai-relay:latest networks: [opsNet] depends_on: [ai-agent] deploy: mode: replicated replicas: 1 placement: constraints: - node.role == manager environment: - OPENAI_MODEL=gpt-4o-mini - AGENT_URL=http://ai-agent:8080 - OPENAI_API_KEY_FILE=/run/secrets/openai_api_key secrets: - source: openai_api_key target: openai_api_key ports: - "8090:8090"