local-llm-agent/apps/inference-engine/requirements.txt
Adrian Flores Cortes 3def230d58 Initial commit: local-llm-agent infrastructure project
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-02 16:42:45 -06:00

30 lines
460 B
Plaintext

# Local LLM Agent - Inference Engine
# Python 3.11+
# Web Framework
fastapi>=0.104.0
uvicorn[standard]>=0.24.0
pydantic>=2.5.0
pydantic-settings>=2.1.0
# HTTP Client (for Ollama)
httpx>=0.25.0
aiohttp>=3.9.0
# Utilities
python-dotenv>=1.0.0
structlog>=23.2.0
# Monitoring
prometheus-client>=0.19.0
# Optional: vLLM support (uncomment for production)
# vllm>=0.2.7
# Development
pytest>=7.4.0
pytest-asyncio>=0.21.0
black>=23.11.0
ruff>=0.1.6
mypy>=1.7.0