# include: # - compose.observability.yaml services: speaches: extends: file: compose.yaml service: speaches build: context: ./ dockerfile: Dockerfile # Optional if the Dockerfile is named 'Dockerfile' args: BASE_IMAGE: nvidia/cuda:12.6.2-cudnn-runtime-ubuntu24.04 environment: - WHISPER__MODEL=Systran/faster-whisper-large-v3 volumes: - hf-hub-cache:/home/ubuntu/.cache/huggingface/hub ports: - "54127:54127" deploy: resources: reservations: devices: - capabilities: ["gpu"] volumes: hf-hub-cache: