workspace/projects/trading-platform/docker-compose.personal.yml
rckrdmrd 789d1ab46b
Some checks failed
CI Pipeline / changes (push) Has been cancelled
CI Pipeline / core (push) Has been cancelled
CI Pipeline / trading-backend (push) Has been cancelled
CI Pipeline / trading-data-service (push) Has been cancelled
CI Pipeline / trading-frontend (push) Has been cancelled
CI Pipeline / erp-core (push) Has been cancelled
CI Pipeline / erp-mecanicas (push) Has been cancelled
CI Pipeline / gamilit-backend (push) Has been cancelled
CI Pipeline / gamilit-frontend (push) Has been cancelled
changes on workspace
2025-12-09 14:46:20 -06:00

268 lines
8.4 KiB
YAML

# =============================================================================
# OrbiQuant IA - Personal Trading Platform
# Docker Compose for single-user deployment
# =============================================================================
#
# Usage:
# cp apps/personal/.env.example apps/personal/.env
# # Edit .env with your credentials
# docker-compose -f docker-compose.personal.yml up -d
#
# Services included:
# - PostgreSQL 16 (database)
# - Redis 7 (caching)
# - ML Engine (Python/FastAPI)
# - Data Service (market data)
# - LLM Agent (AI copilot)
# - Backend API (Express.js)
# - Frontend (React)
#
# GPU support:
# Requires NVIDIA Container Toolkit for GPU acceleration
# https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html
version: '3.8'
services:
# ===========================================================================
# Database Layer
# ===========================================================================
postgres:
image: postgres:16-alpine
container_name: orbiquant-db
restart: unless-stopped
environment:
POSTGRES_DB: ${POSTGRES_DB:-orbiquant}
POSTGRES_USER: ${POSTGRES_USER:-orbiquant}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-orbiquant123}
volumes:
- postgres_data:/var/lib/postgresql/data
- ./apps/database/init:/docker-entrypoint-initdb.d:ro
ports:
- "${POSTGRES_PORT:-5432}:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-orbiquant}"]
interval: 10s
timeout: 5s
retries: 5
networks:
- orbiquant-net
redis:
image: redis:7-alpine
container_name: orbiquant-redis
restart: unless-stopped
command: redis-server --appendonly yes
volumes:
- redis_data:/data
ports:
- "6379:6379"
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
networks:
- orbiquant-net
# ===========================================================================
# ML Engine (Python/FastAPI)
# ===========================================================================
ml-engine:
build:
context: ./apps/ml-engine
dockerfile: Dockerfile
container_name: orbiquant-ml
restart: unless-stopped
environment:
- POSTGRES_HOST=postgres
- POSTGRES_PORT=5432
- POSTGRES_DB=${POSTGRES_DB:-orbiquant}
- POSTGRES_USER=${POSTGRES_USER:-orbiquant}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-orbiquant123}
- REDIS_URL=redis://redis:6379
- DATA_SERVICE_URL=http://data-service:8002
- LOG_LEVEL=${LOG_LEVEL:-info}
ports:
- "${ML_ENGINE_PORT:-8001}:8001"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
volumes:
- ml_models:/app/models
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
networks:
- orbiquant-net
# ===========================================================================
# Data Service (Market Data)
# ===========================================================================
data-service:
build:
context: ./apps/data-service
dockerfile: Dockerfile
container_name: orbiquant-data
restart: unless-stopped
environment:
- POSTGRES_HOST=postgres
- POSTGRES_PORT=5432
- POSTGRES_DB=${POSTGRES_DB:-orbiquant}
- POSTGRES_USER=${POSTGRES_USER:-orbiquant}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-orbiquant123}
- REDIS_URL=redis://redis:6379
- POLYGON_API_KEY=${POLYGON_API_KEY:-}
- METAAPI_TOKEN=${METAAPI_TOKEN:-}
- METAAPI_ACCOUNT_ID=${METAAPI_ACCOUNT_ID:-}
- BINANCE_API_KEY=${BINANCE_API_KEY:-}
- BINANCE_SECRET_KEY=${BINANCE_SECRET_KEY:-}
- LOG_LEVEL=${LOG_LEVEL:-info}
ports:
- "${DATA_SERVICE_PORT:-8002}:8002"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
networks:
- orbiquant-net
# ===========================================================================
# LLM Agent (AI Trading Copilot)
# ===========================================================================
llm-agent:
build:
context: ./apps/llm-agent
dockerfile: Dockerfile
container_name: orbiquant-llm
restart: unless-stopped
environment:
- LLM_PROVIDER=${LLM_PROVIDER:-ollama}
- OLLAMA_URL=${OLLAMA_URL:-http://host.docker.internal:11434}
- OLLAMA_MODEL=${OLLAMA_MODEL:-llama3:8b}
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
- OPENAI_MODEL=${OPENAI_MODEL:-gpt-4-turbo-preview}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
- CLAUDE_MODEL=${CLAUDE_MODEL:-claude-3-5-sonnet-20241022}
- ML_ENGINE_URL=http://ml-engine:8001
- DATA_SERVICE_URL=http://data-service:8002
- BACKEND_URL=http://backend:3000
- TRADE_MODE=${TRADE_MODE:-paper}
- AUTO_TRADE_ENABLED=${AUTO_TRADE_ENABLED:-false}
- AUTO_TRADE_REQUIRE_CONFIRMATION=${AUTO_TRADE_REQUIRE_CONFIRMATION:-true}
- LOG_LEVEL=${LOG_LEVEL:-info}
ports:
- "${LLM_AGENT_PORT:-8003}:8003"
depends_on:
- ml-engine
- data-service
extra_hosts:
- "host.docker.internal:host-gateway"
networks:
- orbiquant-net
# ===========================================================================
# Backend API (Express.js)
# ===========================================================================
backend:
build:
context: ./apps/backend
dockerfile: Dockerfile
container_name: orbiquant-backend
restart: unless-stopped
environment:
- NODE_ENV=production
- APP_MODE=${APP_MODE:-personal}
- PORT=3000
- DATABASE_URL=postgresql://${POSTGRES_USER:-orbiquant}:${POSTGRES_PASSWORD:-orbiquant123}@postgres:5432/${POSTGRES_DB:-orbiquant}
- REDIS_URL=redis://redis:6379
- JWT_SECRET=${JWT_SECRET:-change-this-secret-in-production}
- ML_ENGINE_URL=http://ml-engine:8001
- DATA_SERVICE_URL=http://data-service:8002
- LLM_AGENT_URL=http://llm-agent:8003
- METAAPI_TOKEN=${METAAPI_TOKEN:-}
- METAAPI_ACCOUNT_ID=${METAAPI_ACCOUNT_ID:-}
- LOG_LEVEL=${LOG_LEVEL:-info}
ports:
- "${BACKEND_PORT:-3000}:3000"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
ml-engine:
condition: service_started
networks:
- orbiquant-net
# ===========================================================================
# Frontend (React)
# ===========================================================================
frontend:
build:
context: ./apps/frontend
dockerfile: Dockerfile
args:
- VITE_API_URL=http://localhost:${BACKEND_PORT:-3000}
- VITE_WS_URL=ws://localhost:${BACKEND_PORT:-3000}
- VITE_LLM_URL=http://localhost:${LLM_AGENT_PORT:-8003}
container_name: orbiquant-frontend
restart: unless-stopped
ports:
- "${FRONTEND_PORT:-5173}:80"
depends_on:
- backend
networks:
- orbiquant-net
# ===========================================================================
# Ollama (Local LLM - Optional, run separately if needed)
# ===========================================================================
# Uncomment if you want to run Ollama inside Docker
# Note: It's often better to run Ollama directly on host for GPU access
#
# ollama:
# image: ollama/ollama:latest
# container_name: orbiquant-ollama
# restart: unless-stopped
# ports:
# - "11434:11434"
# volumes:
# - ollama_data:/root/.ollama
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: 1
# capabilities: [gpu]
# networks:
# - orbiquant-net
# ===========================================================================
# Volumes
# ===========================================================================
volumes:
postgres_data:
driver: local
redis_data:
driver: local
ml_models:
driver: local
# ollama_data:
# driver: local
# ===========================================================================
# Networks
# ===========================================================================
networks:
orbiquant-net:
driver: bridge