# LLM Agent Service Configuration # OrbiQuant IA Trading Platform # Service Configuration SERVICE_NAME=llm-agent ENVIRONMENT=development DEBUG=true # API Configuration API_HOST=0.0.0.0 API_PORT=8003 # LLM Provider Configuration # Options: ollama (local GPU), claude (cloud), openai (cloud) LLM_PROVIDER=ollama # Ollama Configuration (for local GPU inference) OLLAMA_BASE_URL=http://localhost:11434 LLM_MODEL=llama3:8b # Available models: llama3:8b, llama3:70b, mistral:7b, mixtral:8x7b # Anthropic/Claude Configuration (optional fallback) # ANTHROPIC_API_KEY=your_anthropic_api_key_here CLAUDE_MODEL=claude-3-5-sonnet-20241022 # LLM Generation Settings MAX_TOKENS=2048 TEMPERATURE=0.7 # Service URLs BACKEND_URL=http://localhost:8000 DATA_SERVICE_URL=http://localhost:8001 ML_ENGINE_URL=http://localhost:8002 # Database Configuration DATABASE_URL=postgresql://orbiquant:your_password@localhost:5432/orbiquant_trading DATABASE_POOL_SIZE=10 DATABASE_MAX_OVERFLOW=20 # Redis Configuration REDIS_URL=redis://localhost:6379/0 REDIS_CACHE_TTL=3600 # Vector DB Configuration (ChromaDB) CHROMA_PERSIST_DIRECTORY=./chroma_db CHROMA_COLLECTION_NAME=trading_knowledge # Logging Configuration LOG_LEVEL=INFO LOG_FORMAT=json # RAG Configuration ENABLE_RAG=true EMBEDDING_MODEL=text-embedding-3-small MAX_CONTEXT_DOCUMENTS=5