trading-platform/apps/llm-agent/docker-compose.ollama.yml

50 lines
1.0 KiB
YAML

version: '3.8'
services:
ollama:
image: ollama/ollama:latest
container_name: orbiquant-ollama
ports:
- "11434:11434"
volumes:
- ollama-data:/root/.ollama
environment:
- OLLAMA_HOST=0.0.0.0
restart: unless-stopped
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
networks:
- orbiquant-network
# Optional: Ollama Web UI for easy model management
ollama-webui:
image: ghcr.io/open-webui/open-webui:main
container_name: ollama-webui
ports:
- "3087:8080"
environment:
- OLLAMA_BASE_URL=http://ollama:11434
volumes:
- ollama-webui-data:/app/backend/data
depends_on:
- ollama
restart: unless-stopped
networks:
- orbiquant-network
volumes:
ollama-data:
driver: local
ollama-webui-data:
driver: local
networks:
orbiquant-network:
external: true
name: trading-platform_orbiquant-network