#!/bin/bash # ============================================================================= # OrbiQuant IA - Personal Trading Platform Quick Start # ============================================================================= set -e SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" # Colors for output RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[1;33m' BLUE='\033[0;34m' NC='\033[0m' # No Color echo -e "${BLUE}" echo "╔══════════════════════════════════════════════════════════════╗" echo "║ OrbiQuant IA - Personal Trading Platform ║" echo "║ Quick Start Script ║" echo "╚══════════════════════════════════════════════════════════════╝" echo -e "${NC}" # Check prerequisites check_prerequisites() { echo -e "${YELLOW}Checking prerequisites...${NC}" # Check Docker if ! command -v docker &> /dev/null; then echo -e "${RED}Error: Docker is not installed${NC}" echo "Please install Docker: https://docs.docker.com/get-docker/" exit 1 fi echo -e " ${GREEN}✓${NC} Docker installed" # Check Docker Compose if ! command -v docker-compose &> /dev/null && ! docker compose version &> /dev/null; then echo -e "${RED}Error: Docker Compose is not installed${NC}" exit 1 fi echo -e " ${GREEN}✓${NC} Docker Compose installed" # Check for NVIDIA GPU (optional) if command -v nvidia-smi &> /dev/null; then echo -e " ${GREEN}✓${NC} NVIDIA GPU detected" GPU_AVAILABLE=true else echo -e " ${YELLOW}!${NC} No NVIDIA GPU detected (ML will use CPU)" GPU_AVAILABLE=false fi # Check Ollama (optional) if command -v ollama &> /dev/null || curl -s http://localhost:11434/api/tags &> /dev/null; then echo -e " ${GREEN}✓${NC} Ollama available" OLLAMA_AVAILABLE=true else echo -e " ${YELLOW}!${NC} Ollama not running (will use OpenAI/Claude if configured)" OLLAMA_AVAILABLE=false fi echo "" } # Setup environment setup_env() { ENV_FILE="$PROJECT_ROOT/apps/personal/.env" ENV_EXAMPLE="$PROJECT_ROOT/apps/personal/.env.example" if [ ! -f "$ENV_FILE" ]; then echo -e "${YELLOW}Creating .env file from template...${NC}" cp "$ENV_EXAMPLE" "$ENV_FILE" echo -e "${YELLOW}Please edit $ENV_FILE with your credentials${NC}" echo "" echo "Required configurations:" echo " 1. METAAPI_TOKEN - Get from https://metaapi.cloud" echo " 2. METAAPI_ACCOUNT_ID - Your MT4/MT5 account" echo " 3. (Optional) OPENAI_API_KEY or ANTHROPIC_API_KEY" echo "" read -p "Press Enter after editing the .env file, or Ctrl+C to exit..." else echo -e " ${GREEN}✓${NC} Environment file exists" fi } # Start Ollama if needed start_ollama() { if [ "$OLLAMA_AVAILABLE" = true ]; then echo -e "${YELLOW}Ensuring Ollama has llama3:8b model...${NC}" ollama pull llama3:8b 2>/dev/null || true fi } # Start services start_services() { echo -e "${YELLOW}Starting OrbiQuant services...${NC}" cd "$PROJECT_ROOT" # Load environment export $(cat apps/personal/.env | grep -v '^#' | xargs) # Start with Docker Compose if docker compose version &> /dev/null; then docker compose -f docker-compose.personal.yml up -d --build else docker-compose -f docker-compose.personal.yml up -d --build fi echo "" } # Wait for services wait_for_services() { echo -e "${YELLOW}Waiting for services to be ready...${NC}" # Wait for backend echo -n " Waiting for Backend API..." for i in {1..60}; do if curl -s http://localhost:${BACKEND_PORT:-3000}/health > /dev/null 2>&1; then echo -e " ${GREEN}Ready${NC}" break fi sleep 2 echo -n "." done # Wait for ML Engine echo -n " Waiting for ML Engine..." for i in {1..60}; do if curl -s http://localhost:${ML_ENGINE_PORT:-8001}/health > /dev/null 2>&1; then echo -e " ${GREEN}Ready${NC}" break fi sleep 2 echo -n "." done echo "" } # Show status show_status() { echo -e "${GREEN}" echo "╔══════════════════════════════════════════════════════════════╗" echo "║ OrbiQuant IA is Running! ║" echo "╚══════════════════════════════════════════════════════════════╝" echo -e "${NC}" echo "Access your trading platform:" echo "" echo -e " ${BLUE}Frontend Dashboard:${NC} http://localhost:${FRONTEND_PORT:-5173}" echo -e " ${BLUE}Backend API:${NC} http://localhost:${BACKEND_PORT:-3000}" echo -e " ${BLUE}ML Engine:${NC} http://localhost:${ML_ENGINE_PORT:-8001}/docs" echo -e " ${BLUE}LLM Agent:${NC} http://localhost:${LLM_AGENT_PORT:-8003}/docs" echo -e " ${BLUE}Data Service:${NC} http://localhost:${DATA_SERVICE_PORT:-8002}/docs" echo "" echo "Quick commands:" echo "" echo " View logs: docker compose -f docker-compose.personal.yml logs -f" echo " Stop: docker compose -f docker-compose.personal.yml down" echo " Restart: docker compose -f docker-compose.personal.yml restart" echo "" echo -e "${YELLOW}Note: Make sure to connect your MT4 account via the dashboard.${NC}" echo "" } # Main main() { check_prerequisites setup_env start_ollama start_services wait_for_services show_status } # Handle arguments case "${1:-}" in stop) cd "$PROJECT_ROOT" docker compose -f docker-compose.personal.yml down echo "Services stopped." ;; restart) cd "$PROJECT_ROOT" docker compose -f docker-compose.personal.yml restart echo "Services restarted." ;; logs) cd "$PROJECT_ROOT" docker compose -f docker-compose.personal.yml logs -f ;; status) cd "$PROJECT_ROOT" docker compose -f docker-compose.personal.yml ps ;; *) main ;; esac