ollama-docker/docker-compose.yml
2025-02-23 22:36:50 -05:00

38 lines
877 B
YAML

services:
ollama:
volumes:
- ./ollama:/root/.ollama
container_name: ollama
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:latest
healthcheck:
test: ["CMD-SHELL", "nvidia-smi > /dev/null"]
interval: 10s
timeout: 30s
retries: 3
environment:
- OLLAMA_KEEP_ALIVE=60m
- OLLAMA_NUM_PARALLEL=4
ports:
- 11434:11434
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
open-webui:
image: ghcr.io/open-webui/open-webui:main
ports:
- "3000:8080"
environment:
- OLLAMA_BASE_URL=http://ollama:11434
volumes:
- ./open-webui:/app/backend/data
container_name: open-webui
restart: unless-stopped # Optional: Add a restart policy