[DDL] feat: Sprint 4 - Add data_sources and ml_predictions schemas
- data_sources schema: - api_providers: Proveedores de datos de mercado - ticker_mapping: Mapeo de simbolos entre proveedores - data_sync_status: Estado de sincronizacion de datos - ml schema additions: - range_predictions: Predicciones de rango (particionada) - entry_signals: Señales de entrada con metodologias ICT/SMC/AMD - market_analysis: Analisis de mercado (sesgo, estructura, volatilidad) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
29dfb82d6c
commit
9f8bbb7494
305
ddl/schemas/data_sources/tables/001_api_providers.sql
Normal file
305
ddl/schemas/data_sources/tables/001_api_providers.sql
Normal file
@ -0,0 +1,305 @@
|
||||
-- ============================================================================
|
||||
-- SCHEMA: data_sources
|
||||
-- TABLE: api_providers
|
||||
-- DESCRIPTION: Proveedores de datos de mercado (Polygon, Alpha Vantage, etc.)
|
||||
-- VERSION: 1.0.0
|
||||
-- CREATED: 2026-01-16
|
||||
-- SPRINT: Sprint 4 - DDL Implementation Roadmap Q1-2026
|
||||
-- ============================================================================
|
||||
|
||||
-- Crear schema si no existe
|
||||
CREATE SCHEMA IF NOT EXISTS data_sources;
|
||||
|
||||
COMMENT ON SCHEMA data_sources IS
|
||||
'Fuentes de datos de mercado y proveedores de API';
|
||||
|
||||
-- Enum para tipo de proveedor
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE data_sources.provider_type AS ENUM (
|
||||
'market_data', -- Datos de mercado (OHLCV, quotes)
|
||||
'news', -- Noticias financieras
|
||||
'fundamental', -- Datos fundamentales
|
||||
'sentiment', -- Analisis de sentimiento
|
||||
'economic_calendar', -- Calendario economico
|
||||
'alternative' -- Datos alternativos
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para estado del proveedor
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE data_sources.provider_status AS ENUM (
|
||||
'active', -- Activo y funcionando
|
||||
'inactive', -- Inactivo temporalmente
|
||||
'degraded', -- Funcionando con problemas
|
||||
'rate_limited', -- Limitado por rate limit
|
||||
'maintenance', -- En mantenimiento
|
||||
'error', -- Con errores
|
||||
'deprecated' -- Deprecado
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para frecuencia de datos
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE data_sources.data_frequency AS ENUM (
|
||||
'tick', -- Tick por tick
|
||||
'second', -- Por segundo
|
||||
'minute', -- Por minuto
|
||||
'five_minutes', -- Cada 5 minutos
|
||||
'fifteen_minutes', -- Cada 15 minutos
|
||||
'hourly', -- Cada hora
|
||||
'daily', -- Diario
|
||||
'weekly', -- Semanal
|
||||
'monthly' -- Mensual
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Tabla de Proveedores de API
|
||||
CREATE TABLE IF NOT EXISTS data_sources.api_providers (
|
||||
-- Identificadores
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
|
||||
|
||||
-- Informacion del proveedor
|
||||
name VARCHAR(100) NOT NULL,
|
||||
code VARCHAR(50) NOT NULL, -- 'polygon', 'alpha_vantage', 'binance'
|
||||
type data_sources.provider_type NOT NULL,
|
||||
status data_sources.provider_status NOT NULL DEFAULT 'inactive',
|
||||
|
||||
-- Configuracion de API
|
||||
base_url TEXT NOT NULL,
|
||||
api_version VARCHAR(20),
|
||||
auth_type VARCHAR(50) NOT NULL DEFAULT 'api_key', -- 'api_key', 'oauth2', 'basic', 'none'
|
||||
|
||||
-- Credenciales (encriptadas)
|
||||
api_key_encrypted TEXT, -- Encriptado con pgcrypto
|
||||
api_secret_encrypted TEXT,
|
||||
additional_credentials JSONB DEFAULT '{}'::JSONB, -- Otras credenciales encriptadas
|
||||
|
||||
-- Rate Limiting
|
||||
rate_limit_requests INTEGER, -- Requests por periodo
|
||||
rate_limit_period_seconds INTEGER DEFAULT 60,
|
||||
current_request_count INTEGER NOT NULL DEFAULT 0,
|
||||
rate_limit_reset_at TIMESTAMPTZ,
|
||||
burst_limit INTEGER, -- Limite de burst
|
||||
|
||||
-- Capacidades
|
||||
supported_symbols JSONB DEFAULT '[]'::JSONB, -- Simbolos soportados
|
||||
supported_frequencies data_sources.data_frequency[],
|
||||
supports_realtime BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
supports_historical BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
max_historical_days INTEGER, -- Dias maximos de historico
|
||||
|
||||
-- Cobertura
|
||||
asset_classes VARCHAR(50)[], -- ['forex', 'stocks', 'crypto']
|
||||
exchanges VARCHAR(50)[], -- ['NYSE', 'NASDAQ', 'BINANCE']
|
||||
regions VARCHAR(50)[], -- ['US', 'EU', 'LATAM']
|
||||
|
||||
-- Costos
|
||||
cost_type VARCHAR(20) DEFAULT 'free', -- 'free', 'paid', 'freemium'
|
||||
monthly_cost DECIMAL(10, 2),
|
||||
cost_per_request DECIMAL(10, 6),
|
||||
free_tier_limit INTEGER, -- Requests gratis por mes
|
||||
|
||||
-- Prioridad y fallback
|
||||
priority INTEGER NOT NULL DEFAULT 100, -- Menor = mayor prioridad
|
||||
fallback_provider_id UUID REFERENCES data_sources.api_providers(id),
|
||||
is_primary BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
-- Calidad de datos
|
||||
data_quality_score DECIMAL(5, 2), -- 0-100
|
||||
latency_avg_ms INTEGER,
|
||||
uptime_percent DECIMAL(5, 2),
|
||||
|
||||
-- Health check
|
||||
health_check_url TEXT,
|
||||
last_health_check_at TIMESTAMPTZ,
|
||||
last_health_status VARCHAR(20),
|
||||
consecutive_failures INTEGER NOT NULL DEFAULT 0,
|
||||
|
||||
-- Estadisticas de uso
|
||||
total_requests INTEGER NOT NULL DEFAULT 0,
|
||||
successful_requests INTEGER NOT NULL DEFAULT 0,
|
||||
failed_requests INTEGER NOT NULL DEFAULT 0,
|
||||
last_request_at TIMESTAMPTZ,
|
||||
last_error TEXT,
|
||||
last_error_at TIMESTAMPTZ,
|
||||
|
||||
-- Documentacion
|
||||
documentation_url TEXT,
|
||||
terms_of_service_url TEXT,
|
||||
support_email VARCHAR(255),
|
||||
|
||||
-- Metadata
|
||||
metadata JSONB DEFAULT '{}'::JSONB,
|
||||
notes TEXT,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
activated_at TIMESTAMPTZ,
|
||||
deactivated_at TIMESTAMPTZ,
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT api_providers_code_unique UNIQUE (tenant_id, code),
|
||||
CONSTRAINT api_providers_rate_check CHECK (rate_limit_requests IS NULL OR rate_limit_requests > 0)
|
||||
);
|
||||
|
||||
COMMENT ON TABLE data_sources.api_providers IS
|
||||
'Proveedores de datos de mercado configurados por tenant';
|
||||
|
||||
COMMENT ON COLUMN data_sources.api_providers.api_key_encrypted IS
|
||||
'API key encriptada - usar pgcrypto para encrypt/decrypt';
|
||||
|
||||
COMMENT ON COLUMN data_sources.api_providers.priority IS
|
||||
'Prioridad de uso - menor numero = mayor prioridad';
|
||||
|
||||
-- Indices
|
||||
CREATE INDEX IF NOT EXISTS idx_api_providers_tenant
|
||||
ON data_sources.api_providers(tenant_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_providers_code
|
||||
ON data_sources.api_providers(code);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_providers_status
|
||||
ON data_sources.api_providers(status);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_providers_type
|
||||
ON data_sources.api_providers(type);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_providers_active
|
||||
ON data_sources.api_providers(tenant_id, status, priority)
|
||||
WHERE status = 'active';
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_providers_primary
|
||||
ON data_sources.api_providers(tenant_id, is_primary)
|
||||
WHERE is_primary = TRUE;
|
||||
|
||||
-- GIN index para busqueda en asset_classes y exchanges
|
||||
CREATE INDEX IF NOT EXISTS idx_api_providers_assets_gin
|
||||
ON data_sources.api_providers USING GIN (asset_classes);
|
||||
|
||||
-- Funcion de timestamp
|
||||
CREATE OR REPLACE FUNCTION data_sources.update_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at := NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger para updated_at
|
||||
DROP TRIGGER IF EXISTS api_provider_updated_at ON data_sources.api_providers;
|
||||
CREATE TRIGGER api_provider_updated_at
|
||||
BEFORE UPDATE ON data_sources.api_providers
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION data_sources.update_timestamp();
|
||||
|
||||
-- Trigger para asegurar solo un proveedor primario por tipo
|
||||
CREATE OR REPLACE FUNCTION data_sources.ensure_single_primary_provider()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF NEW.is_primary = TRUE THEN
|
||||
UPDATE data_sources.api_providers
|
||||
SET is_primary = FALSE
|
||||
WHERE tenant_id = NEW.tenant_id
|
||||
AND type = NEW.type
|
||||
AND id != NEW.id
|
||||
AND is_primary = TRUE;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS api_provider_single_primary ON data_sources.api_providers;
|
||||
CREATE TRIGGER api_provider_single_primary
|
||||
BEFORE INSERT OR UPDATE OF is_primary ON data_sources.api_providers
|
||||
FOR EACH ROW
|
||||
WHEN (NEW.is_primary = TRUE)
|
||||
EXECUTE FUNCTION data_sources.ensure_single_primary_provider();
|
||||
|
||||
-- Funcion para obtener proveedor activo por tipo
|
||||
CREATE OR REPLACE FUNCTION data_sources.get_active_provider(
|
||||
p_tenant_id UUID,
|
||||
p_type data_sources.provider_type,
|
||||
p_symbol VARCHAR DEFAULT NULL
|
||||
)
|
||||
RETURNS data_sources.api_providers AS $$
|
||||
DECLARE
|
||||
v_provider data_sources.api_providers;
|
||||
BEGIN
|
||||
SELECT * INTO v_provider
|
||||
FROM data_sources.api_providers
|
||||
WHERE tenant_id = p_tenant_id
|
||||
AND type = p_type
|
||||
AND status = 'active'
|
||||
AND (p_symbol IS NULL OR p_symbol = ANY(
|
||||
SELECT jsonb_array_elements_text(supported_symbols)
|
||||
))
|
||||
ORDER BY is_primary DESC, priority ASC
|
||||
LIMIT 1;
|
||||
|
||||
RETURN v_provider;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para incrementar contador de requests
|
||||
CREATE OR REPLACE FUNCTION data_sources.increment_request_count(
|
||||
p_provider_id UUID,
|
||||
p_success BOOLEAN DEFAULT TRUE
|
||||
)
|
||||
RETURNS VOID AS $$
|
||||
BEGIN
|
||||
UPDATE data_sources.api_providers
|
||||
SET total_requests = total_requests + 1,
|
||||
successful_requests = successful_requests + CASE WHEN p_success THEN 1 ELSE 0 END,
|
||||
failed_requests = failed_requests + CASE WHEN NOT p_success THEN 1 ELSE 0 END,
|
||||
current_request_count = current_request_count + 1,
|
||||
last_request_at = NOW(),
|
||||
consecutive_failures = CASE WHEN p_success THEN 0 ELSE consecutive_failures + 1 END
|
||||
WHERE id = p_provider_id;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Vista de proveedores activos
|
||||
CREATE OR REPLACE VIEW data_sources.v_active_providers AS
|
||||
SELECT
|
||||
id,
|
||||
tenant_id,
|
||||
name,
|
||||
code,
|
||||
type,
|
||||
status,
|
||||
is_primary,
|
||||
priority,
|
||||
rate_limit_requests,
|
||||
current_request_count,
|
||||
supports_realtime,
|
||||
asset_classes,
|
||||
data_quality_score,
|
||||
uptime_percent,
|
||||
total_requests,
|
||||
last_request_at
|
||||
FROM data_sources.api_providers
|
||||
WHERE status = 'active'
|
||||
ORDER BY is_primary DESC, priority ASC;
|
||||
|
||||
-- RLS Policies
|
||||
ALTER TABLE data_sources.api_providers ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
CREATE POLICY api_providers_tenant_isolation ON data_sources.api_providers
|
||||
FOR ALL
|
||||
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
|
||||
|
||||
-- Grants
|
||||
GRANT USAGE ON SCHEMA data_sources TO trading_app;
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON data_sources.api_providers TO trading_app;
|
||||
GRANT SELECT ON data_sources.api_providers TO trading_readonly;
|
||||
GRANT SELECT ON data_sources.v_active_providers TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.get_active_provider TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.increment_request_count TO trading_app;
|
||||
196
ddl/schemas/data_sources/tables/002_ticker_mapping.sql
Normal file
196
ddl/schemas/data_sources/tables/002_ticker_mapping.sql
Normal file
@ -0,0 +1,196 @@
|
||||
-- ============================================================================
|
||||
-- SCHEMA: data_sources
|
||||
-- TABLE: ticker_mapping
|
||||
-- DESCRIPTION: Mapeo de simbolos entre proveedores y sistema interno
|
||||
-- VERSION: 1.0.0
|
||||
-- CREATED: 2026-01-16
|
||||
-- SPRINT: Sprint 4 - DDL Implementation Roadmap Q1-2026
|
||||
-- ============================================================================
|
||||
|
||||
-- Tabla de Mapeo de Tickers
|
||||
CREATE TABLE IF NOT EXISTS data_sources.ticker_mapping (
|
||||
-- Identificadores
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
|
||||
|
||||
-- Referencias
|
||||
provider_id UUID NOT NULL REFERENCES data_sources.api_providers(id) ON DELETE CASCADE,
|
||||
ticker_id UUID REFERENCES market_data.tickers(id),
|
||||
symbol_id UUID REFERENCES trading.symbols(id),
|
||||
|
||||
-- Simbolos
|
||||
internal_symbol VARCHAR(20) NOT NULL, -- Simbolo interno: EURUSD
|
||||
provider_symbol VARCHAR(50) NOT NULL, -- Simbolo del provider: EUR/USD, EURUSD=X
|
||||
provider_code VARCHAR(50) NOT NULL, -- Codigo del proveedor
|
||||
|
||||
-- Tipo de activo
|
||||
asset_class VARCHAR(30) NOT NULL, -- 'forex', 'crypto', 'stock', 'index', 'commodity'
|
||||
instrument_type VARCHAR(30), -- 'spot', 'future', 'option', 'cfd'
|
||||
|
||||
-- Configuracion de precision
|
||||
price_precision INTEGER NOT NULL DEFAULT 5, -- Decimales de precio
|
||||
lot_precision INTEGER NOT NULL DEFAULT 2, -- Decimales de lote
|
||||
min_lot_size DECIMAL(10, 4) DEFAULT 0.01,
|
||||
max_lot_size DECIMAL(10, 4) DEFAULT 100,
|
||||
lot_step DECIMAL(10, 4) DEFAULT 0.01,
|
||||
|
||||
-- Factor de ajuste
|
||||
price_multiplier DECIMAL(10, 6) DEFAULT 1.0, -- Para convertir precio del provider
|
||||
inverse_quote BOOLEAN NOT NULL DEFAULT FALSE, -- Si el provider da inverso
|
||||
|
||||
-- Monedas
|
||||
base_currency VARCHAR(10), -- EUR
|
||||
quote_currency VARCHAR(10), -- USD
|
||||
|
||||
-- Estado
|
||||
is_active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
is_verified BOOLEAN NOT NULL DEFAULT FALSE, -- Verificado manualmente
|
||||
|
||||
-- Horarios de trading
|
||||
trading_hours JSONB, -- [{"day": 0, "open": "00:00", "close": "23:59"}]
|
||||
timezone VARCHAR(50) DEFAULT 'UTC',
|
||||
|
||||
-- Estadisticas de uso
|
||||
last_data_at TIMESTAMPTZ,
|
||||
data_points_count BIGINT NOT NULL DEFAULT 0,
|
||||
error_count INTEGER NOT NULL DEFAULT 0,
|
||||
last_error TEXT,
|
||||
|
||||
-- Metadata
|
||||
metadata JSONB DEFAULT '{}'::JSONB,
|
||||
notes TEXT,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
verified_at TIMESTAMPTZ,
|
||||
verified_by UUID REFERENCES users.users(id),
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT ticker_mapping_unique UNIQUE (tenant_id, provider_id, internal_symbol),
|
||||
CONSTRAINT ticker_mapping_provider_symbol_unique UNIQUE (provider_id, provider_symbol)
|
||||
);
|
||||
|
||||
COMMENT ON TABLE data_sources.ticker_mapping IS
|
||||
'Mapeo de simbolos entre proveedores externos y nomenclatura interna';
|
||||
|
||||
COMMENT ON COLUMN data_sources.ticker_mapping.price_multiplier IS
|
||||
'Factor para convertir precio del proveedor a precio interno. Ej: 1.0 para EURUSD, 0.01 para JPY pairs';
|
||||
|
||||
COMMENT ON COLUMN data_sources.ticker_mapping.inverse_quote IS
|
||||
'TRUE si el proveedor reporta USD/EUR en lugar de EUR/USD';
|
||||
|
||||
-- Indices
|
||||
CREATE INDEX IF NOT EXISTS idx_ticker_mapping_tenant
|
||||
ON data_sources.ticker_mapping(tenant_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ticker_mapping_provider
|
||||
ON data_sources.ticker_mapping(provider_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ticker_mapping_internal
|
||||
ON data_sources.ticker_mapping(internal_symbol);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ticker_mapping_provider_symbol
|
||||
ON data_sources.ticker_mapping(provider_symbol);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ticker_mapping_active
|
||||
ON data_sources.ticker_mapping(provider_id, is_active)
|
||||
WHERE is_active = TRUE;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ticker_mapping_asset
|
||||
ON data_sources.ticker_mapping(asset_class);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_ticker_mapping_ticker
|
||||
ON data_sources.ticker_mapping(ticker_id)
|
||||
WHERE ticker_id IS NOT NULL;
|
||||
|
||||
-- Trigger para updated_at
|
||||
DROP TRIGGER IF EXISTS ticker_mapping_updated_at ON data_sources.ticker_mapping;
|
||||
CREATE TRIGGER ticker_mapping_updated_at
|
||||
BEFORE UPDATE ON data_sources.ticker_mapping
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION data_sources.update_timestamp();
|
||||
|
||||
-- Funcion para obtener simbolo del proveedor
|
||||
CREATE OR REPLACE FUNCTION data_sources.get_provider_symbol(
|
||||
p_tenant_id UUID,
|
||||
p_provider_code VARCHAR,
|
||||
p_internal_symbol VARCHAR
|
||||
)
|
||||
RETURNS data_sources.ticker_mapping AS $$
|
||||
DECLARE
|
||||
v_mapping data_sources.ticker_mapping;
|
||||
BEGIN
|
||||
SELECT tm.* INTO v_mapping
|
||||
FROM data_sources.ticker_mapping tm
|
||||
JOIN data_sources.api_providers ap ON tm.provider_id = ap.id
|
||||
WHERE tm.tenant_id = p_tenant_id
|
||||
AND ap.code = p_provider_code
|
||||
AND tm.internal_symbol = p_internal_symbol
|
||||
AND tm.is_active = TRUE;
|
||||
|
||||
RETURN v_mapping;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para convertir precio del proveedor a interno
|
||||
CREATE OR REPLACE FUNCTION data_sources.convert_provider_price(
|
||||
p_mapping_id UUID,
|
||||
p_provider_price DECIMAL
|
||||
)
|
||||
RETURNS DECIMAL AS $$
|
||||
DECLARE
|
||||
v_mapping data_sources.ticker_mapping;
|
||||
v_converted DECIMAL;
|
||||
BEGIN
|
||||
SELECT * INTO v_mapping FROM data_sources.ticker_mapping WHERE id = p_mapping_id;
|
||||
|
||||
IF v_mapping IS NULL THEN
|
||||
RETURN p_provider_price;
|
||||
END IF;
|
||||
|
||||
v_converted := p_provider_price * v_mapping.price_multiplier;
|
||||
|
||||
IF v_mapping.inverse_quote THEN
|
||||
v_converted := 1.0 / v_converted;
|
||||
END IF;
|
||||
|
||||
RETURN ROUND(v_converted, v_mapping.price_precision);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Vista de mappings activos por proveedor
|
||||
CREATE OR REPLACE VIEW data_sources.v_ticker_mappings AS
|
||||
SELECT
|
||||
tm.id,
|
||||
tm.tenant_id,
|
||||
ap.name AS provider_name,
|
||||
ap.code AS provider_code,
|
||||
tm.internal_symbol,
|
||||
tm.provider_symbol,
|
||||
tm.asset_class,
|
||||
tm.price_precision,
|
||||
tm.price_multiplier,
|
||||
tm.inverse_quote,
|
||||
tm.is_active,
|
||||
tm.is_verified,
|
||||
tm.last_data_at,
|
||||
tm.data_points_count
|
||||
FROM data_sources.ticker_mapping tm
|
||||
JOIN data_sources.api_providers ap ON tm.provider_id = ap.id
|
||||
WHERE tm.is_active = TRUE
|
||||
ORDER BY tm.internal_symbol, ap.priority;
|
||||
|
||||
-- RLS Policies
|
||||
ALTER TABLE data_sources.ticker_mapping ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
CREATE POLICY ticker_mapping_tenant_isolation ON data_sources.ticker_mapping
|
||||
FOR ALL
|
||||
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
|
||||
|
||||
-- Grants
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON data_sources.ticker_mapping TO trading_app;
|
||||
GRANT SELECT ON data_sources.ticker_mapping TO trading_readonly;
|
||||
GRANT SELECT ON data_sources.v_ticker_mappings TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.get_provider_symbol TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.convert_provider_price TO trading_app;
|
||||
394
ddl/schemas/data_sources/tables/003_data_sync_status.sql
Normal file
394
ddl/schemas/data_sources/tables/003_data_sync_status.sql
Normal file
@ -0,0 +1,394 @@
|
||||
-- ============================================================================
|
||||
-- SCHEMA: data_sources
|
||||
-- TABLE: data_sync_status
|
||||
-- DESCRIPTION: Estado de sincronizacion de datos por ticker/proveedor
|
||||
-- VERSION: 1.0.0
|
||||
-- CREATED: 2026-01-16
|
||||
-- SPRINT: Sprint 4 - DDL Implementation Roadmap Q1-2026
|
||||
-- ============================================================================
|
||||
|
||||
-- Enum para estado de sincronizacion
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE data_sources.sync_status AS ENUM (
|
||||
'idle', -- Sin actividad
|
||||
'pending', -- Pendiente de sincronizar
|
||||
'syncing', -- En proceso de sincronizacion
|
||||
'completed', -- Completada exitosamente
|
||||
'partial', -- Parcialmente completada
|
||||
'failed', -- Fallida
|
||||
'rate_limited', -- Limitada por rate limit
|
||||
'paused', -- Pausada manualmente
|
||||
'disabled' -- Deshabilitada
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para tipo de sincronizacion
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE data_sources.sync_type AS ENUM (
|
||||
'realtime', -- Datos en tiempo real
|
||||
'historical', -- Datos historicos
|
||||
'backfill', -- Llenado de huecos
|
||||
'correction', -- Correccion de datos
|
||||
'initial_load' -- Carga inicial
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Tabla de Estado de Sincronizacion
|
||||
CREATE TABLE IF NOT EXISTS data_sources.data_sync_status (
|
||||
-- Identificadores
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
|
||||
|
||||
-- Referencias
|
||||
provider_id UUID NOT NULL REFERENCES data_sources.api_providers(id) ON DELETE CASCADE,
|
||||
mapping_id UUID NOT NULL REFERENCES data_sources.ticker_mapping(id) ON DELETE CASCADE,
|
||||
ticker_id UUID REFERENCES market_data.tickers(id),
|
||||
|
||||
-- Simbolo
|
||||
symbol VARCHAR(20) NOT NULL,
|
||||
timeframe VARCHAR(10) NOT NULL DEFAULT '5m', -- '1m', '5m', '15m', '1h', '1d'
|
||||
|
||||
-- Estado y tipo
|
||||
status data_sources.sync_status NOT NULL DEFAULT 'idle',
|
||||
sync_type data_sources.sync_type NOT NULL DEFAULT 'realtime',
|
||||
|
||||
-- Configuracion
|
||||
is_enabled BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
priority INTEGER NOT NULL DEFAULT 100,
|
||||
sync_interval_seconds INTEGER DEFAULT 300, -- Intervalo de sincronizacion
|
||||
|
||||
-- Rango de datos
|
||||
data_start_date DATE, -- Primera fecha disponible
|
||||
data_end_date DATE, -- Ultima fecha disponible
|
||||
requested_start_date DATE, -- Fecha inicial solicitada
|
||||
requested_end_date DATE, -- Fecha final solicitada
|
||||
|
||||
-- Estado de la ultima sincronizacion
|
||||
last_sync_started_at TIMESTAMPTZ,
|
||||
last_sync_completed_at TIMESTAMPTZ,
|
||||
last_sync_duration_ms INTEGER,
|
||||
last_sync_status data_sources.sync_status,
|
||||
|
||||
-- Metricas de la ultima sincronizacion
|
||||
last_records_fetched INTEGER DEFAULT 0,
|
||||
last_records_inserted INTEGER DEFAULT 0,
|
||||
last_records_updated INTEGER DEFAULT 0,
|
||||
last_records_skipped INTEGER DEFAULT 0,
|
||||
|
||||
-- Acumulados
|
||||
total_syncs INTEGER NOT NULL DEFAULT 0,
|
||||
successful_syncs INTEGER NOT NULL DEFAULT 0,
|
||||
failed_syncs INTEGER NOT NULL DEFAULT 0,
|
||||
total_records_processed BIGINT NOT NULL DEFAULT 0,
|
||||
|
||||
-- Posicion de sincronizacion
|
||||
last_synced_timestamp TIMESTAMPTZ, -- Ultimo timestamp sincronizado
|
||||
last_synced_id VARCHAR(100), -- Ultimo ID (para paginacion)
|
||||
watermark JSONB DEFAULT '{}'::JSONB, -- Marcadores de posicion
|
||||
|
||||
-- Gaps detectados
|
||||
has_gaps BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
gap_periods JSONB DEFAULT '[]'::JSONB, -- [{"start": "...", "end": "..."}]
|
||||
last_gap_check_at TIMESTAMPTZ,
|
||||
|
||||
-- Errores
|
||||
consecutive_failures INTEGER NOT NULL DEFAULT 0,
|
||||
last_error_code VARCHAR(50),
|
||||
last_error_message TEXT,
|
||||
last_error_at TIMESTAMPTZ,
|
||||
error_count INTEGER NOT NULL DEFAULT 0,
|
||||
|
||||
-- Rate limiting
|
||||
rate_limit_remaining INTEGER,
|
||||
rate_limit_reset_at TIMESTAMPTZ,
|
||||
backoff_until TIMESTAMPTZ, -- Esperar hasta esta hora
|
||||
|
||||
-- Programacion
|
||||
next_sync_at TIMESTAMPTZ,
|
||||
cron_expression VARCHAR(100), -- Para sincronizaciones programadas
|
||||
|
||||
-- Bloqueo
|
||||
locked_by VARCHAR(100), -- ID del worker que tiene el lock
|
||||
locked_at TIMESTAMPTZ,
|
||||
lock_expires_at TIMESTAMPTZ,
|
||||
|
||||
-- Metadata
|
||||
metadata JSONB DEFAULT '{}'::JSONB,
|
||||
notes TEXT,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT sync_status_unique UNIQUE (tenant_id, mapping_id, timeframe)
|
||||
);
|
||||
|
||||
COMMENT ON TABLE data_sources.data_sync_status IS
|
||||
'Estado de sincronizacion de datos de mercado por simbolo, proveedor y timeframe';
|
||||
|
||||
COMMENT ON COLUMN data_sources.data_sync_status.watermark IS
|
||||
'Marcadores de posicion para retomar sincronizacion. Estructura depende del proveedor';
|
||||
|
||||
COMMENT ON COLUMN data_sources.data_sync_status.gap_periods IS
|
||||
'Periodos sin datos detectados: [{"start": "2026-01-01T00:00:00Z", "end": "2026-01-01T05:00:00Z"}]';
|
||||
|
||||
-- Indices
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_tenant
|
||||
ON data_sources.data_sync_status(tenant_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_provider
|
||||
ON data_sources.data_sync_status(provider_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_mapping
|
||||
ON data_sources.data_sync_status(mapping_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_symbol
|
||||
ON data_sources.data_sync_status(symbol, timeframe);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_status
|
||||
ON data_sources.data_sync_status(status);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_pending
|
||||
ON data_sources.data_sync_status(next_sync_at, status)
|
||||
WHERE is_enabled = TRUE AND status IN ('idle', 'pending');
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_failed
|
||||
ON data_sources.data_sync_status(consecutive_failures DESC)
|
||||
WHERE status = 'failed';
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_gaps
|
||||
ON data_sources.data_sync_status(has_gaps, last_gap_check_at)
|
||||
WHERE has_gaps = TRUE;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sync_status_locked
|
||||
ON data_sources.data_sync_status(locked_by, lock_expires_at)
|
||||
WHERE locked_by IS NOT NULL;
|
||||
|
||||
-- Trigger para updated_at
|
||||
DROP TRIGGER IF EXISTS sync_status_updated_at ON data_sources.data_sync_status;
|
||||
CREATE TRIGGER sync_status_updated_at
|
||||
BEFORE UPDATE ON data_sources.data_sync_status
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION data_sources.update_timestamp();
|
||||
|
||||
-- Funcion para adquirir lock de sincronizacion
|
||||
CREATE OR REPLACE FUNCTION data_sources.acquire_sync_lock(
|
||||
p_sync_id UUID,
|
||||
p_worker_id VARCHAR,
|
||||
p_lock_duration_seconds INTEGER DEFAULT 300
|
||||
)
|
||||
RETURNS BOOLEAN AS $$
|
||||
DECLARE
|
||||
v_acquired BOOLEAN;
|
||||
BEGIN
|
||||
UPDATE data_sources.data_sync_status
|
||||
SET locked_by = p_worker_id,
|
||||
locked_at = NOW(),
|
||||
lock_expires_at = NOW() + (p_lock_duration_seconds || ' seconds')::INTERVAL
|
||||
WHERE id = p_sync_id
|
||||
AND (locked_by IS NULL OR lock_expires_at < NOW())
|
||||
RETURNING TRUE INTO v_acquired;
|
||||
|
||||
RETURN COALESCE(v_acquired, FALSE);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para liberar lock
|
||||
CREATE OR REPLACE FUNCTION data_sources.release_sync_lock(
|
||||
p_sync_id UUID,
|
||||
p_worker_id VARCHAR
|
||||
)
|
||||
RETURNS BOOLEAN AS $$
|
||||
DECLARE
|
||||
v_released BOOLEAN;
|
||||
BEGIN
|
||||
UPDATE data_sources.data_sync_status
|
||||
SET locked_by = NULL,
|
||||
locked_at = NULL,
|
||||
lock_expires_at = NULL
|
||||
WHERE id = p_sync_id
|
||||
AND locked_by = p_worker_id
|
||||
RETURNING TRUE INTO v_released;
|
||||
|
||||
RETURN COALESCE(v_released, FALSE);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para marcar sincronizacion iniciada
|
||||
CREATE OR REPLACE FUNCTION data_sources.mark_sync_started(p_sync_id UUID)
|
||||
RETURNS VOID AS $$
|
||||
BEGIN
|
||||
UPDATE data_sources.data_sync_status
|
||||
SET status = 'syncing',
|
||||
last_sync_started_at = NOW(),
|
||||
last_records_fetched = 0,
|
||||
last_records_inserted = 0,
|
||||
last_records_updated = 0,
|
||||
last_records_skipped = 0
|
||||
WHERE id = p_sync_id;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para marcar sincronizacion completada
|
||||
CREATE OR REPLACE FUNCTION data_sources.mark_sync_completed(
|
||||
p_sync_id UUID,
|
||||
p_records_fetched INTEGER,
|
||||
p_records_inserted INTEGER,
|
||||
p_records_updated INTEGER,
|
||||
p_records_skipped INTEGER,
|
||||
p_last_timestamp TIMESTAMPTZ DEFAULT NULL
|
||||
)
|
||||
RETURNS VOID AS $$
|
||||
DECLARE
|
||||
v_started_at TIMESTAMPTZ;
|
||||
v_duration_ms INTEGER;
|
||||
BEGIN
|
||||
SELECT last_sync_started_at INTO v_started_at
|
||||
FROM data_sources.data_sync_status WHERE id = p_sync_id;
|
||||
|
||||
v_duration_ms := EXTRACT(EPOCH FROM (NOW() - v_started_at)) * 1000;
|
||||
|
||||
UPDATE data_sources.data_sync_status
|
||||
SET status = 'completed',
|
||||
last_sync_completed_at = NOW(),
|
||||
last_sync_duration_ms = v_duration_ms,
|
||||
last_sync_status = 'completed',
|
||||
last_records_fetched = p_records_fetched,
|
||||
last_records_inserted = p_records_inserted,
|
||||
last_records_updated = p_records_updated,
|
||||
last_records_skipped = p_records_skipped,
|
||||
total_syncs = total_syncs + 1,
|
||||
successful_syncs = successful_syncs + 1,
|
||||
total_records_processed = total_records_processed + p_records_inserted + p_records_updated,
|
||||
last_synced_timestamp = COALESCE(p_last_timestamp, last_synced_timestamp),
|
||||
consecutive_failures = 0,
|
||||
next_sync_at = NOW() + (sync_interval_seconds || ' seconds')::INTERVAL
|
||||
WHERE id = p_sync_id;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para marcar sincronizacion fallida
|
||||
CREATE OR REPLACE FUNCTION data_sources.mark_sync_failed(
|
||||
p_sync_id UUID,
|
||||
p_error_code VARCHAR,
|
||||
p_error_message TEXT,
|
||||
p_is_rate_limited BOOLEAN DEFAULT FALSE
|
||||
)
|
||||
RETURNS VOID AS $$
|
||||
DECLARE
|
||||
v_failures INTEGER;
|
||||
v_backoff INTEGER;
|
||||
BEGIN
|
||||
SELECT consecutive_failures + 1 INTO v_failures
|
||||
FROM data_sources.data_sync_status WHERE id = p_sync_id;
|
||||
|
||||
-- Backoff exponencial: 60s, 120s, 240s, 480s, max 3600s
|
||||
v_backoff := LEAST(60 * POWER(2, v_failures - 1), 3600);
|
||||
|
||||
UPDATE data_sources.data_sync_status
|
||||
SET status = CASE WHEN p_is_rate_limited THEN 'rate_limited'::data_sources.sync_status ELSE 'failed'::data_sources.sync_status END,
|
||||
last_sync_status = CASE WHEN p_is_rate_limited THEN 'rate_limited'::data_sources.sync_status ELSE 'failed'::data_sources.sync_status END,
|
||||
last_sync_completed_at = NOW(),
|
||||
total_syncs = total_syncs + 1,
|
||||
failed_syncs = failed_syncs + 1,
|
||||
consecutive_failures = v_failures,
|
||||
last_error_code = p_error_code,
|
||||
last_error_message = p_error_message,
|
||||
last_error_at = NOW(),
|
||||
error_count = error_count + 1,
|
||||
backoff_until = NOW() + (v_backoff || ' seconds')::INTERVAL,
|
||||
next_sync_at = NOW() + (v_backoff || ' seconds')::INTERVAL
|
||||
WHERE id = p_sync_id;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para obtener proximas sincronizaciones
|
||||
CREATE OR REPLACE FUNCTION data_sources.get_pending_syncs(
|
||||
p_tenant_id UUID,
|
||||
p_limit INTEGER DEFAULT 10
|
||||
)
|
||||
RETURNS SETOF data_sources.data_sync_status AS $$
|
||||
BEGIN
|
||||
RETURN QUERY
|
||||
SELECT *
|
||||
FROM data_sources.data_sync_status
|
||||
WHERE tenant_id = p_tenant_id
|
||||
AND is_enabled = TRUE
|
||||
AND status IN ('idle', 'pending', 'completed')
|
||||
AND (next_sync_at IS NULL OR next_sync_at <= NOW())
|
||||
AND (backoff_until IS NULL OR backoff_until <= NOW())
|
||||
AND (locked_by IS NULL OR lock_expires_at < NOW())
|
||||
ORDER BY priority ASC, next_sync_at ASC NULLS FIRST
|
||||
LIMIT p_limit;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Vista de estado de sincronizacion
|
||||
CREATE OR REPLACE VIEW data_sources.v_sync_status_overview AS
|
||||
SELECT
|
||||
ss.id,
|
||||
ss.tenant_id,
|
||||
ap.name AS provider_name,
|
||||
ap.code AS provider_code,
|
||||
ss.symbol,
|
||||
ss.timeframe,
|
||||
ss.status,
|
||||
ss.sync_type,
|
||||
ss.is_enabled,
|
||||
ss.last_sync_completed_at,
|
||||
ss.last_sync_duration_ms,
|
||||
ss.last_records_fetched,
|
||||
ss.total_syncs,
|
||||
ss.successful_syncs,
|
||||
ss.failed_syncs,
|
||||
ROUND((ss.successful_syncs::DECIMAL / NULLIF(ss.total_syncs, 0) * 100), 2) AS success_rate,
|
||||
ss.consecutive_failures,
|
||||
ss.has_gaps,
|
||||
ss.next_sync_at,
|
||||
ss.locked_by IS NOT NULL AS is_locked
|
||||
FROM data_sources.data_sync_status ss
|
||||
JOIN data_sources.api_providers ap ON ss.provider_id = ap.id
|
||||
ORDER BY ss.symbol, ss.timeframe;
|
||||
|
||||
-- Vista de sincronizaciones con errores
|
||||
CREATE OR REPLACE VIEW data_sources.v_failed_syncs AS
|
||||
SELECT
|
||||
ss.id,
|
||||
ap.name AS provider_name,
|
||||
ss.symbol,
|
||||
ss.timeframe,
|
||||
ss.consecutive_failures,
|
||||
ss.last_error_code,
|
||||
ss.last_error_message,
|
||||
ss.last_error_at,
|
||||
ss.backoff_until,
|
||||
ss.next_sync_at
|
||||
FROM data_sources.data_sync_status ss
|
||||
JOIN data_sources.api_providers ap ON ss.provider_id = ap.id
|
||||
WHERE ss.status = 'failed'
|
||||
OR ss.consecutive_failures > 0
|
||||
ORDER BY ss.consecutive_failures DESC, ss.last_error_at DESC;
|
||||
|
||||
-- RLS Policies
|
||||
ALTER TABLE data_sources.data_sync_status ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
CREATE POLICY sync_status_tenant_isolation ON data_sources.data_sync_status
|
||||
FOR ALL
|
||||
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
|
||||
|
||||
-- Grants
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON data_sources.data_sync_status TO trading_app;
|
||||
GRANT SELECT ON data_sources.data_sync_status TO trading_readonly;
|
||||
GRANT SELECT ON data_sources.v_sync_status_overview TO trading_app;
|
||||
GRANT SELECT ON data_sources.v_failed_syncs TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.acquire_sync_lock TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.release_sync_lock TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.mark_sync_started TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.mark_sync_completed TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.mark_sync_failed TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION data_sources.get_pending_syncs TO trading_app;
|
||||
295
ddl/schemas/ml/003_range_predictions.sql
Normal file
295
ddl/schemas/ml/003_range_predictions.sql
Normal file
@ -0,0 +1,295 @@
|
||||
-- ============================================================================
|
||||
-- SCHEMA: ml
|
||||
-- TABLE: range_predictions
|
||||
-- DESCRIPTION: Predicciones de rango diario/semanal (High/Low esperados)
|
||||
-- VERSION: 1.0.0
|
||||
-- CREATED: 2026-01-16
|
||||
-- SPRINT: Sprint 4 - DDL Implementation Roadmap Q1-2026
|
||||
-- NOTE: Tabla particionada por fecha para alto volumen
|
||||
-- ============================================================================
|
||||
|
||||
-- Enum para tipo de rango
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.range_type AS ENUM (
|
||||
'daily', -- Rango diario
|
||||
'weekly', -- Rango semanal
|
||||
'session_asia', -- Sesion asiatica
|
||||
'session_london', -- Sesion Londres
|
||||
'session_ny', -- Sesion NY
|
||||
'kill_zone', -- Kill zones ICT
|
||||
'custom' -- Personalizado
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para resultado de rango
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.range_result AS ENUM (
|
||||
'pending', -- Pendiente de validacion
|
||||
'hit_high', -- Alcanzo high predicho
|
||||
'hit_low', -- Alcanzo low predicho
|
||||
'hit_both', -- Alcanzo ambos
|
||||
'missed', -- No alcanzo ninguno
|
||||
'partial_high', -- Parcialmente hacia high
|
||||
'partial_low', -- Parcialmente hacia low
|
||||
'invalidated' -- Prediccion invalidada
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Tabla de Predicciones de Rango (particionada)
|
||||
CREATE TABLE IF NOT EXISTS ml.range_predictions (
|
||||
-- Identificadores
|
||||
id UUID DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
|
||||
-- Simbolo y timeframe
|
||||
symbol VARCHAR(20) NOT NULL,
|
||||
asset_class ml.asset_class NOT NULL DEFAULT 'FOREX',
|
||||
range_type ml.range_type NOT NULL DEFAULT 'daily',
|
||||
|
||||
-- Fecha del rango
|
||||
prediction_date DATE NOT NULL, -- Fecha de la prediccion
|
||||
valid_from TIMESTAMPTZ NOT NULL, -- Inicio de validez
|
||||
valid_until TIMESTAMPTZ NOT NULL, -- Fin de validez
|
||||
|
||||
-- Prediccion de rango
|
||||
predicted_high DECIMAL(15, 8) NOT NULL, -- High esperado
|
||||
predicted_low DECIMAL(15, 8) NOT NULL, -- Low esperado
|
||||
predicted_open DECIMAL(15, 8), -- Open esperado
|
||||
predicted_close DECIMAL(15, 8), -- Close esperado
|
||||
|
||||
-- Rangos de confianza
|
||||
high_confidence_upper DECIMAL(15, 8), -- High upper bound
|
||||
high_confidence_lower DECIMAL(15, 8), -- High lower bound
|
||||
low_confidence_upper DECIMAL(15, 8), -- Low upper bound
|
||||
low_confidence_lower DECIMAL(15, 8), -- Low lower bound
|
||||
|
||||
-- Pips esperados
|
||||
expected_range_pips DECIMAL(10, 2), -- |High - Low|
|
||||
expected_atr_pips DECIMAL(10, 2), -- ATR esperado
|
||||
|
||||
-- Niveles clave
|
||||
pivot_point DECIMAL(15, 8),
|
||||
resistance_1 DECIMAL(15, 8),
|
||||
resistance_2 DECIMAL(15, 8),
|
||||
support_1 DECIMAL(15, 8),
|
||||
support_2 DECIMAL(15, 8),
|
||||
|
||||
-- Confianza del modelo
|
||||
confidence_score DECIMAL(5, 4) NOT NULL DEFAULT 0.5
|
||||
CHECK (confidence_score BETWEEN 0 AND 1),
|
||||
model_version VARCHAR(50) NOT NULL,
|
||||
model_id VARCHAR(100),
|
||||
|
||||
-- Features usadas
|
||||
features_used JSONB DEFAULT '{}'::JSONB, -- Features del modelo
|
||||
market_context JSONB DEFAULT '{}'::JSONB, -- Contexto de mercado
|
||||
|
||||
-- Resultado actual
|
||||
result ml.range_result NOT NULL DEFAULT 'pending',
|
||||
actual_high DECIMAL(15, 8),
|
||||
actual_low DECIMAL(15, 8),
|
||||
actual_open DECIMAL(15, 8),
|
||||
actual_close DECIMAL(15, 8),
|
||||
|
||||
-- Metricas de precision
|
||||
high_error_pips DECIMAL(10, 2), -- Error en prediccion de high
|
||||
low_error_pips DECIMAL(10, 2), -- Error en prediccion de low
|
||||
range_error_percent DECIMAL(5, 2), -- Error porcentual en rango
|
||||
|
||||
-- Timing
|
||||
time_to_high_minutes INTEGER, -- Tiempo hasta alcanzar high
|
||||
time_to_low_minutes INTEGER, -- Tiempo hasta alcanzar low
|
||||
high_hit_first BOOLEAN, -- Si high se alcanzo primero
|
||||
|
||||
-- Estadisticas
|
||||
max_drawdown_pips DECIMAL(10, 2), -- Max drawdown desde open
|
||||
volatility_actual DECIMAL(10, 4), -- Volatilidad real
|
||||
|
||||
-- Metadata
|
||||
notes TEXT,
|
||||
metadata JSONB DEFAULT '{}'::JSONB,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
validated_at TIMESTAMPTZ,
|
||||
|
||||
-- Primary Key incluye columna de particion
|
||||
PRIMARY KEY (id, prediction_date),
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT range_pred_high_gt_low CHECK (predicted_high > predicted_low)
|
||||
) PARTITION BY RANGE (prediction_date);
|
||||
|
||||
COMMENT ON TABLE ml.range_predictions IS
|
||||
'Predicciones de rango (high/low) diario/semanal. Particionada por fecha para alto volumen';
|
||||
|
||||
-- Crear particiones para 2026
|
||||
CREATE TABLE IF NOT EXISTS ml.range_predictions_2026_q1
|
||||
PARTITION OF ml.range_predictions
|
||||
FOR VALUES FROM ('2026-01-01') TO ('2026-04-01');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS ml.range_predictions_2026_q2
|
||||
PARTITION OF ml.range_predictions
|
||||
FOR VALUES FROM ('2026-04-01') TO ('2026-07-01');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS ml.range_predictions_2026_q3
|
||||
PARTITION OF ml.range_predictions
|
||||
FOR VALUES FROM ('2026-07-01') TO ('2026-10-01');
|
||||
|
||||
CREATE TABLE IF NOT EXISTS ml.range_predictions_2026_q4
|
||||
PARTITION OF ml.range_predictions
|
||||
FOR VALUES FROM ('2026-10-01') TO ('2027-01-01');
|
||||
|
||||
-- Indices en la tabla padre (se propagan a particiones)
|
||||
CREATE INDEX IF NOT EXISTS idx_range_pred_tenant
|
||||
ON ml.range_predictions(tenant_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_range_pred_symbol
|
||||
ON ml.range_predictions(symbol, prediction_date DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_range_pred_date
|
||||
ON ml.range_predictions(prediction_date DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_range_pred_type
|
||||
ON ml.range_predictions(range_type, prediction_date DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_range_pred_pending
|
||||
ON ml.range_predictions(result, valid_until)
|
||||
WHERE result = 'pending';
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_range_pred_model
|
||||
ON ml.range_predictions(model_version, prediction_date DESC);
|
||||
|
||||
-- Funcion de timestamp (si no existe ya)
|
||||
CREATE OR REPLACE FUNCTION ml.update_ml_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at := NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger para updated_at
|
||||
DROP TRIGGER IF EXISTS range_pred_updated_at ON ml.range_predictions;
|
||||
CREATE TRIGGER range_pred_updated_at
|
||||
BEFORE UPDATE ON ml.range_predictions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION ml.update_ml_timestamp();
|
||||
|
||||
-- Funcion para validar prediccion de rango
|
||||
CREATE OR REPLACE FUNCTION ml.validate_range_prediction(
|
||||
p_prediction_id UUID,
|
||||
p_prediction_date DATE,
|
||||
p_actual_high DECIMAL,
|
||||
p_actual_low DECIMAL,
|
||||
p_actual_open DECIMAL DEFAULT NULL,
|
||||
p_actual_close DECIMAL DEFAULT NULL
|
||||
)
|
||||
RETURNS ml.range_result AS $$
|
||||
DECLARE
|
||||
v_pred ml.range_predictions;
|
||||
v_result ml.range_result;
|
||||
v_tolerance DECIMAL := 0.0005; -- 5 pips tolerance
|
||||
BEGIN
|
||||
SELECT * INTO v_pred
|
||||
FROM ml.range_predictions
|
||||
WHERE id = p_prediction_id AND prediction_date = p_prediction_date;
|
||||
|
||||
IF NOT FOUND THEN
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- Determinar resultado
|
||||
IF p_actual_high >= v_pred.predicted_high * (1 - v_tolerance)
|
||||
AND p_actual_low <= v_pred.predicted_low * (1 + v_tolerance) THEN
|
||||
v_result := 'hit_both';
|
||||
ELSIF p_actual_high >= v_pred.predicted_high * (1 - v_tolerance) THEN
|
||||
v_result := 'hit_high';
|
||||
ELSIF p_actual_low <= v_pred.predicted_low * (1 + v_tolerance) THEN
|
||||
v_result := 'hit_low';
|
||||
ELSIF p_actual_high >= v_pred.predicted_high * 0.9 THEN
|
||||
v_result := 'partial_high';
|
||||
ELSIF p_actual_low <= v_pred.predicted_low * 1.1 THEN
|
||||
v_result := 'partial_low';
|
||||
ELSE
|
||||
v_result := 'missed';
|
||||
END IF;
|
||||
|
||||
-- Actualizar prediccion
|
||||
UPDATE ml.range_predictions
|
||||
SET result = v_result,
|
||||
actual_high = p_actual_high,
|
||||
actual_low = p_actual_low,
|
||||
actual_open = p_actual_open,
|
||||
actual_close = p_actual_close,
|
||||
high_error_pips = ABS(p_actual_high - predicted_high) * 10000,
|
||||
low_error_pips = ABS(p_actual_low - predicted_low) * 10000,
|
||||
range_error_percent = ABS((p_actual_high - p_actual_low) - (predicted_high - predicted_low))
|
||||
/ (predicted_high - predicted_low) * 100,
|
||||
validated_at = NOW()
|
||||
WHERE id = p_prediction_id AND prediction_date = p_prediction_date;
|
||||
|
||||
RETURN v_result;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Vista de rendimiento de predicciones de rango
|
||||
CREATE OR REPLACE VIEW ml.v_range_prediction_performance AS
|
||||
SELECT
|
||||
symbol,
|
||||
range_type,
|
||||
model_version,
|
||||
COUNT(*) AS total_predictions,
|
||||
COUNT(*) FILTER (WHERE result IN ('hit_both', 'hit_high', 'hit_low')) AS successful,
|
||||
COUNT(*) FILTER (WHERE result = 'missed') AS missed,
|
||||
ROUND((COUNT(*) FILTER (WHERE result IN ('hit_both', 'hit_high', 'hit_low'))::DECIMAL
|
||||
/ NULLIF(COUNT(*) FILTER (WHERE result != 'pending'), 0) * 100), 2) AS accuracy_percent,
|
||||
ROUND(AVG(confidence_score)::NUMERIC, 4) AS avg_confidence,
|
||||
ROUND(AVG(high_error_pips)::NUMERIC, 2) AS avg_high_error_pips,
|
||||
ROUND(AVG(low_error_pips)::NUMERIC, 2) AS avg_low_error_pips,
|
||||
MAX(prediction_date) AS last_prediction_date
|
||||
FROM ml.range_predictions
|
||||
WHERE result != 'pending'
|
||||
GROUP BY symbol, range_type, model_version
|
||||
ORDER BY accuracy_percent DESC NULLS LAST;
|
||||
|
||||
-- Vista de predicciones activas
|
||||
CREATE OR REPLACE VIEW ml.v_active_range_predictions AS
|
||||
SELECT
|
||||
id,
|
||||
tenant_id,
|
||||
symbol,
|
||||
range_type,
|
||||
prediction_date,
|
||||
predicted_high,
|
||||
predicted_low,
|
||||
expected_range_pips,
|
||||
pivot_point,
|
||||
resistance_1,
|
||||
support_1,
|
||||
confidence_score,
|
||||
model_version,
|
||||
valid_until
|
||||
FROM ml.range_predictions
|
||||
WHERE result = 'pending'
|
||||
AND valid_until > NOW()
|
||||
ORDER BY prediction_date DESC, confidence_score DESC;
|
||||
|
||||
-- RLS Policies
|
||||
ALTER TABLE ml.range_predictions ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
CREATE POLICY range_pred_tenant ON ml.range_predictions
|
||||
FOR ALL
|
||||
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
|
||||
|
||||
-- Grants
|
||||
GRANT SELECT, INSERT, UPDATE ON ml.range_predictions TO trading_app;
|
||||
GRANT SELECT ON ml.range_predictions TO trading_readonly;
|
||||
GRANT SELECT ON ml.v_range_prediction_performance TO trading_app;
|
||||
GRANT SELECT ON ml.v_active_range_predictions TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION ml.validate_range_prediction TO trading_app;
|
||||
408
ddl/schemas/ml/004_entry_signals.sql
Normal file
408
ddl/schemas/ml/004_entry_signals.sql
Normal file
@ -0,0 +1,408 @@
|
||||
-- ============================================================================
|
||||
-- SCHEMA: ml
|
||||
-- TABLE: entry_signals
|
||||
-- DESCRIPTION: Senales de entrada generadas por modelos ML
|
||||
-- VERSION: 1.0.0
|
||||
-- CREATED: 2026-01-16
|
||||
-- SPRINT: Sprint 4 - DDL Implementation Roadmap Q1-2026
|
||||
-- ============================================================================
|
||||
|
||||
-- Enum para metodologia de senal
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.signal_methodology AS ENUM (
|
||||
'amd', -- Accumulation, Manipulation, Distribution
|
||||
'ict_smc', -- ICT Smart Money Concepts
|
||||
'order_blocks', -- Order Blocks
|
||||
'fair_value_gap', -- Fair Value Gaps
|
||||
'liquidity_sweep', -- Liquidity Sweeps
|
||||
'break_of_structure', -- Break of Structure
|
||||
'supply_demand', -- Supply & Demand zones
|
||||
'technical', -- Indicadores tecnicos clasicos
|
||||
'pattern', -- Patrones de precio
|
||||
'ensemble' -- Combinacion de metodologias
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para fase AMD
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.amd_phase AS ENUM (
|
||||
'accumulation', -- Fase de acumulacion
|
||||
'manipulation', -- Fase de manipulacion
|
||||
'distribution', -- Fase de distribucion
|
||||
'expansion', -- Fase de expansion
|
||||
'retracement', -- Fase de retroceso
|
||||
'consolidation', -- Consolidacion
|
||||
'unknown' -- Fase no determinada
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para estado de senal de entrada
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.entry_signal_status AS ENUM (
|
||||
'generated', -- Generada por modelo
|
||||
'pending', -- Pendiente de activacion
|
||||
'active', -- Activa (precio llego a zona)
|
||||
'triggered', -- Disparada (orden ejecutada)
|
||||
'expired', -- Expirada sin activar
|
||||
'invalidated', -- Invalidada por condiciones
|
||||
'cancelled' -- Cancelada
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Tabla de Senales de Entrada ML
|
||||
CREATE TABLE IF NOT EXISTS ml.entry_signals (
|
||||
-- Identificadores
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
user_id UUID, -- NULL si es senal publica
|
||||
|
||||
-- Referencias
|
||||
range_prediction_id UUID, -- Prediccion de rango asociada
|
||||
ticker_id UUID REFERENCES market_data.tickers(id),
|
||||
|
||||
-- Simbolo y timeframe
|
||||
symbol VARCHAR(20) NOT NULL,
|
||||
asset_class ml.asset_class NOT NULL DEFAULT 'FOREX',
|
||||
timeframe VARCHAR(10) NOT NULL DEFAULT 'H1',
|
||||
session VARCHAR(20), -- 'asia', 'london', 'ny'
|
||||
|
||||
-- Metodologia
|
||||
methodology ml.signal_methodology NOT NULL,
|
||||
amd_phase ml.amd_phase,
|
||||
|
||||
-- Direccion
|
||||
direction VARCHAR(10) NOT NULL CHECK (direction IN ('LONG', 'SHORT')),
|
||||
|
||||
-- Estado
|
||||
status ml.entry_signal_status NOT NULL DEFAULT 'generated',
|
||||
|
||||
-- Zona de entrada
|
||||
entry_zone_high DECIMAL(15, 8) NOT NULL, -- Limite superior de entrada
|
||||
entry_zone_low DECIMAL(15, 8) NOT NULL, -- Limite inferior de entrada
|
||||
optimal_entry DECIMAL(15, 8), -- Precio optimo de entrada
|
||||
|
||||
-- Stop Loss
|
||||
stop_loss DECIMAL(15, 8) NOT NULL,
|
||||
stop_loss_pips DECIMAL(10, 2),
|
||||
invalidation_price DECIMAL(15, 8), -- Precio que invalida la senal
|
||||
|
||||
-- Take Profits
|
||||
take_profit_1 DECIMAL(15, 8),
|
||||
take_profit_2 DECIMAL(15, 8),
|
||||
take_profit_3 DECIMAL(15, 8),
|
||||
tp1_pips DECIMAL(10, 2),
|
||||
tp2_pips DECIMAL(10, 2),
|
||||
tp3_pips DECIMAL(10, 2),
|
||||
|
||||
-- Risk/Reward
|
||||
risk_reward_1 DECIMAL(5, 2), -- RR a TP1
|
||||
risk_reward_2 DECIMAL(5, 2), -- RR a TP2
|
||||
risk_reward_3 DECIMAL(5, 2), -- RR a TP3
|
||||
|
||||
-- Confianza
|
||||
confidence_score DECIMAL(5, 4) NOT NULL DEFAULT 0.5
|
||||
CHECK (confidence_score BETWEEN 0 AND 1),
|
||||
strength VARCHAR(20), -- 'weak', 'moderate', 'strong', 'very_strong'
|
||||
quality_grade VARCHAR(2), -- 'A+', 'A', 'B', 'C', 'D'
|
||||
|
||||
-- Modelo
|
||||
model_id VARCHAR(100),
|
||||
model_version VARCHAR(50) NOT NULL,
|
||||
|
||||
-- Contexto tecnico
|
||||
market_structure JSONB DEFAULT '{}'::JSONB, -- BOS, CHoCH, etc.
|
||||
key_levels JSONB DEFAULT '[]'::JSONB, -- Niveles clave cercanos
|
||||
order_blocks JSONB DEFAULT '[]'::JSONB, -- Order blocks detectados
|
||||
fair_value_gaps JSONB DEFAULT '[]'::JSONB, -- FVGs detectados
|
||||
liquidity_pools JSONB DEFAULT '[]'::JSONB, -- Pools de liquidez
|
||||
|
||||
-- Indicadores al momento
|
||||
rsi_14 DECIMAL(5, 2),
|
||||
macd_histogram DECIMAL(15, 8),
|
||||
atr_14 DECIMAL(15, 8),
|
||||
volume_ratio DECIMAL(5, 2),
|
||||
|
||||
-- Contexto de mercado
|
||||
market_bias VARCHAR(20), -- 'bullish', 'bearish', 'neutral'
|
||||
volatility_regime VARCHAR(20), -- 'low', 'medium', 'high', 'extreme'
|
||||
trend_direction VARCHAR(20), -- 'up', 'down', 'ranging'
|
||||
session_bias VARCHAR(20), -- Sesgo de la sesion
|
||||
|
||||
-- Validez
|
||||
valid_from TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
valid_until TIMESTAMPTZ NOT NULL,
|
||||
|
||||
-- Ejecucion
|
||||
activated_at TIMESTAMPTZ, -- Cuando precio llego a zona
|
||||
triggered_at TIMESTAMPTZ, -- Cuando se ejecuto
|
||||
triggered_price DECIMAL(15, 8),
|
||||
slippage DECIMAL(10, 4),
|
||||
|
||||
-- Resultado
|
||||
result VARCHAR(20), -- 'win', 'loss', 'breakeven', 'partial'
|
||||
exit_price DECIMAL(15, 8),
|
||||
exit_reason VARCHAR(50),
|
||||
pnl_pips DECIMAL(10, 2),
|
||||
pnl_rr DECIMAL(5, 2), -- Resultado en multiplos de RR
|
||||
|
||||
-- Trazabilidad
|
||||
position_id UUID, -- Posicion resultante
|
||||
|
||||
-- Features del modelo
|
||||
features JSONB DEFAULT '{}'::JSONB,
|
||||
explanation TEXT, -- Explicacion de la senal
|
||||
|
||||
-- Notificacion
|
||||
notified_at TIMESTAMPTZ,
|
||||
notification_channels VARCHAR(20)[],
|
||||
|
||||
-- Metadata
|
||||
tags VARCHAR(50)[],
|
||||
notes TEXT,
|
||||
metadata JSONB DEFAULT '{}'::JSONB,
|
||||
|
||||
-- Timestamps
|
||||
generated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT entry_signals_zone_valid CHECK (entry_zone_high >= entry_zone_low)
|
||||
);
|
||||
|
||||
COMMENT ON TABLE ml.entry_signals IS
|
||||
'Senales de entrada generadas por modelos ML con metodologias ICT/SMC/AMD';
|
||||
|
||||
COMMENT ON COLUMN ml.entry_signals.amd_phase IS
|
||||
'Fase AMD detectada: Accumulation, Manipulation, Distribution';
|
||||
|
||||
COMMENT ON COLUMN ml.entry_signals.quality_grade IS
|
||||
'Calificacion de calidad de la senal: A+, A, B, C, D';
|
||||
|
||||
-- Indices
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_tenant
|
||||
ON ml.entry_signals(tenant_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_user
|
||||
ON ml.entry_signals(user_id)
|
||||
WHERE user_id IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_symbol
|
||||
ON ml.entry_signals(symbol, generated_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_status
|
||||
ON ml.entry_signals(status);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_active
|
||||
ON ml.entry_signals(status, valid_until)
|
||||
WHERE status IN ('pending', 'active');
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_methodology
|
||||
ON ml.entry_signals(methodology, direction);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_amd
|
||||
ON ml.entry_signals(amd_phase, generated_at DESC)
|
||||
WHERE amd_phase IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_confidence
|
||||
ON ml.entry_signals(confidence_score DESC)
|
||||
WHERE status IN ('pending', 'active');
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_generated
|
||||
ON ml.entry_signals(generated_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_result
|
||||
ON ml.entry_signals(result, triggered_at DESC)
|
||||
WHERE result IS NOT NULL;
|
||||
|
||||
-- GIN indices para JSONB
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_keys_gin
|
||||
ON ml.entry_signals USING GIN (key_levels);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entry_signals_ob_gin
|
||||
ON ml.entry_signals USING GIN (order_blocks);
|
||||
|
||||
-- Trigger para updated_at
|
||||
DROP TRIGGER IF EXISTS entry_signal_updated_at ON ml.entry_signals;
|
||||
CREATE TRIGGER entry_signal_updated_at
|
||||
BEFORE UPDATE ON ml.entry_signals
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION ml.update_ml_timestamp();
|
||||
|
||||
-- Trigger para calcular strength basado en confidence
|
||||
CREATE OR REPLACE FUNCTION ml.calculate_signal_strength()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.strength := CASE
|
||||
WHEN NEW.confidence_score >= 0.85 THEN 'very_strong'
|
||||
WHEN NEW.confidence_score >= 0.70 THEN 'strong'
|
||||
WHEN NEW.confidence_score >= 0.55 THEN 'moderate'
|
||||
ELSE 'weak'
|
||||
END;
|
||||
|
||||
-- Calcular pips si hay precios
|
||||
IF NEW.optimal_entry IS NOT NULL AND NEW.stop_loss IS NOT NULL THEN
|
||||
NEW.stop_loss_pips := ABS(NEW.optimal_entry - NEW.stop_loss) * 10000;
|
||||
|
||||
IF NEW.take_profit_1 IS NOT NULL THEN
|
||||
NEW.tp1_pips := ABS(NEW.take_profit_1 - NEW.optimal_entry) * 10000;
|
||||
NEW.risk_reward_1 := NEW.tp1_pips / NULLIF(NEW.stop_loss_pips, 0);
|
||||
END IF;
|
||||
|
||||
IF NEW.take_profit_2 IS NOT NULL THEN
|
||||
NEW.tp2_pips := ABS(NEW.take_profit_2 - NEW.optimal_entry) * 10000;
|
||||
NEW.risk_reward_2 := NEW.tp2_pips / NULLIF(NEW.stop_loss_pips, 0);
|
||||
END IF;
|
||||
|
||||
IF NEW.take_profit_3 IS NOT NULL THEN
|
||||
NEW.tp3_pips := ABS(NEW.take_profit_3 - NEW.optimal_entry) * 10000;
|
||||
NEW.risk_reward_3 := NEW.tp3_pips / NULLIF(NEW.stop_loss_pips, 0);
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS entry_signal_calc_strength ON ml.entry_signals;
|
||||
CREATE TRIGGER entry_signal_calc_strength
|
||||
BEFORE INSERT OR UPDATE OF confidence_score, optimal_entry, stop_loss, take_profit_1, take_profit_2, take_profit_3
|
||||
ON ml.entry_signals
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION ml.calculate_signal_strength();
|
||||
|
||||
-- Funcion para activar senal (precio llego a zona)
|
||||
CREATE OR REPLACE FUNCTION ml.activate_entry_signal(
|
||||
p_signal_id UUID,
|
||||
p_current_price DECIMAL
|
||||
)
|
||||
RETURNS BOOLEAN AS $$
|
||||
DECLARE
|
||||
v_signal ml.entry_signals;
|
||||
BEGIN
|
||||
SELECT * INTO v_signal FROM ml.entry_signals WHERE id = p_signal_id;
|
||||
|
||||
IF NOT FOUND OR v_signal.status != 'pending' THEN
|
||||
RETURN FALSE;
|
||||
END IF;
|
||||
|
||||
-- Verificar si precio esta en zona
|
||||
IF p_current_price BETWEEN v_signal.entry_zone_low AND v_signal.entry_zone_high THEN
|
||||
UPDATE ml.entry_signals
|
||||
SET status = 'active',
|
||||
activated_at = NOW()
|
||||
WHERE id = p_signal_id;
|
||||
RETURN TRUE;
|
||||
END IF;
|
||||
|
||||
RETURN FALSE;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para registrar resultado de senal
|
||||
CREATE OR REPLACE FUNCTION ml.record_signal_result(
|
||||
p_signal_id UUID,
|
||||
p_result VARCHAR,
|
||||
p_exit_price DECIMAL,
|
||||
p_exit_reason VARCHAR DEFAULT 'manual',
|
||||
p_position_id UUID DEFAULT NULL
|
||||
)
|
||||
RETURNS VOID AS $$
|
||||
DECLARE
|
||||
v_signal ml.entry_signals;
|
||||
BEGIN
|
||||
SELECT * INTO v_signal FROM ml.entry_signals WHERE id = p_signal_id;
|
||||
|
||||
IF NOT FOUND THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
UPDATE ml.entry_signals
|
||||
SET result = p_result,
|
||||
exit_price = p_exit_price,
|
||||
exit_reason = p_exit_reason,
|
||||
position_id = p_position_id,
|
||||
pnl_pips = CASE v_signal.direction
|
||||
WHEN 'LONG' THEN (p_exit_price - v_signal.triggered_price) * 10000
|
||||
ELSE (v_signal.triggered_price - p_exit_price) * 10000
|
||||
END,
|
||||
pnl_rr = CASE
|
||||
WHEN v_signal.stop_loss_pips > 0 THEN
|
||||
CASE v_signal.direction
|
||||
WHEN 'LONG' THEN (p_exit_price - v_signal.triggered_price) * 10000 / v_signal.stop_loss_pips
|
||||
ELSE (v_signal.triggered_price - p_exit_price) * 10000 / v_signal.stop_loss_pips
|
||||
END
|
||||
ELSE NULL
|
||||
END
|
||||
WHERE id = p_signal_id;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Vista de senales activas
|
||||
CREATE OR REPLACE VIEW ml.v_active_entry_signals AS
|
||||
SELECT
|
||||
id,
|
||||
tenant_id,
|
||||
symbol,
|
||||
direction,
|
||||
methodology,
|
||||
amd_phase,
|
||||
status,
|
||||
entry_zone_high,
|
||||
entry_zone_low,
|
||||
optimal_entry,
|
||||
stop_loss,
|
||||
take_profit_1,
|
||||
risk_reward_1,
|
||||
confidence_score,
|
||||
strength,
|
||||
quality_grade,
|
||||
market_bias,
|
||||
volatility_regime,
|
||||
valid_until,
|
||||
generated_at
|
||||
FROM ml.entry_signals
|
||||
WHERE status IN ('pending', 'active')
|
||||
AND valid_until > NOW()
|
||||
ORDER BY confidence_score DESC, generated_at DESC;
|
||||
|
||||
-- Vista de rendimiento por metodologia
|
||||
CREATE OR REPLACE VIEW ml.v_signal_methodology_performance AS
|
||||
SELECT
|
||||
methodology,
|
||||
amd_phase,
|
||||
direction,
|
||||
COUNT(*) AS total_signals,
|
||||
COUNT(*) FILTER (WHERE result = 'win') AS wins,
|
||||
COUNT(*) FILTER (WHERE result = 'loss') AS losses,
|
||||
COUNT(*) FILTER (WHERE result = 'breakeven') AS breakeven,
|
||||
ROUND((COUNT(*) FILTER (WHERE result = 'win')::DECIMAL
|
||||
/ NULLIF(COUNT(*) FILTER (WHERE result IS NOT NULL), 0) * 100), 2) AS win_rate,
|
||||
ROUND(AVG(confidence_score)::NUMERIC, 4) AS avg_confidence,
|
||||
ROUND(AVG(pnl_rr) FILTER (WHERE pnl_rr IS NOT NULL)::NUMERIC, 2) AS avg_rr,
|
||||
ROUND(SUM(pnl_pips) FILTER (WHERE pnl_pips IS NOT NULL)::NUMERIC, 2) AS total_pips
|
||||
FROM ml.entry_signals
|
||||
WHERE result IS NOT NULL
|
||||
GROUP BY methodology, amd_phase, direction
|
||||
ORDER BY win_rate DESC NULLS LAST;
|
||||
|
||||
-- RLS Policies
|
||||
ALTER TABLE ml.entry_signals ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
CREATE POLICY entry_signals_tenant ON ml.entry_signals
|
||||
FOR ALL
|
||||
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
|
||||
|
||||
-- Grants
|
||||
GRANT SELECT, INSERT, UPDATE ON ml.entry_signals TO trading_app;
|
||||
GRANT SELECT ON ml.entry_signals TO trading_readonly;
|
||||
GRANT SELECT ON ml.v_active_entry_signals TO trading_app;
|
||||
GRANT SELECT ON ml.v_signal_methodology_performance TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION ml.activate_entry_signal TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION ml.record_signal_result TO trading_app;
|
||||
406
ddl/schemas/ml/005_market_analysis.sql
Normal file
406
ddl/schemas/ml/005_market_analysis.sql
Normal file
@ -0,0 +1,406 @@
|
||||
-- ============================================================================
|
||||
-- SCHEMA: ml
|
||||
-- TABLE: market_analysis
|
||||
-- DESCRIPTION: Analisis de mercado generado por ML (sesgo, volatilidad, estructura)
|
||||
-- VERSION: 1.0.0
|
||||
-- CREATED: 2026-01-16
|
||||
-- SPRINT: Sprint 4 - DDL Implementation Roadmap Q1-2026
|
||||
-- ============================================================================
|
||||
|
||||
-- Enum para tipo de analisis
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.analysis_type AS ENUM (
|
||||
'daily_bias', -- Sesgo diario
|
||||
'weekly_outlook', -- Perspectiva semanal
|
||||
'session_analysis', -- Analisis de sesion
|
||||
'structure_analysis', -- Analisis de estructura
|
||||
'volatility_forecast', -- Pronostico de volatilidad
|
||||
'correlation_report', -- Reporte de correlaciones
|
||||
'sentiment_analysis', -- Analisis de sentimiento
|
||||
'fundamental_impact', -- Impacto fundamental
|
||||
'comprehensive' -- Analisis completo
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para sesgo de mercado
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.market_bias AS ENUM (
|
||||
'strong_bullish', -- Fuertemente alcista
|
||||
'bullish', -- Alcista
|
||||
'slightly_bullish', -- Ligeramente alcista
|
||||
'neutral', -- Neutral
|
||||
'slightly_bearish', -- Ligeramente bajista
|
||||
'bearish', -- Bajista
|
||||
'strong_bearish' -- Fuertemente bajista
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para regimen de volatilidad
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.volatility_regime AS ENUM (
|
||||
'extremely_low', -- Extremadamente baja
|
||||
'low', -- Baja
|
||||
'normal', -- Normal
|
||||
'high', -- Alta
|
||||
'extreme', -- Extrema
|
||||
'transitioning' -- En transicion
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Enum para estructura de mercado
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE ml.market_structure AS ENUM (
|
||||
'uptrend_strong', -- Tendencia alcista fuerte
|
||||
'uptrend_weak', -- Tendencia alcista debil
|
||||
'downtrend_strong', -- Tendencia bajista fuerte
|
||||
'downtrend_weak', -- Tendencia bajista debil
|
||||
'ranging_tight', -- Rango estrecho
|
||||
'ranging_wide', -- Rango amplio
|
||||
'consolidating', -- Consolidando
|
||||
'breaking_out', -- Rompiendo
|
||||
'reversing' -- Revirtiendo
|
||||
);
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN null;
|
||||
END $$;
|
||||
|
||||
-- Tabla de Analisis de Mercado
|
||||
CREATE TABLE IF NOT EXISTS ml.market_analysis (
|
||||
-- Identificadores
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
|
||||
-- Simbolo y scope
|
||||
symbol VARCHAR(20) NOT NULL,
|
||||
asset_class ml.asset_class NOT NULL DEFAULT 'FOREX',
|
||||
analysis_type ml.analysis_type NOT NULL,
|
||||
timeframe VARCHAR(10) NOT NULL DEFAULT 'D1',
|
||||
|
||||
-- Periodo del analisis
|
||||
analysis_date DATE NOT NULL,
|
||||
valid_from TIMESTAMPTZ NOT NULL,
|
||||
valid_until TIMESTAMPTZ NOT NULL,
|
||||
|
||||
-- Sesgo y direccion
|
||||
bias ml.market_bias NOT NULL,
|
||||
bias_confidence DECIMAL(5, 4) NOT NULL DEFAULT 0.5
|
||||
CHECK (bias_confidence BETWEEN 0 AND 1),
|
||||
bias_reasoning TEXT,
|
||||
|
||||
-- Estructura de mercado
|
||||
structure ml.market_structure NOT NULL,
|
||||
structure_timeframe VARCHAR(10), -- TF de la estructura dominante
|
||||
trend_strength DECIMAL(5, 2), -- -100 a +100
|
||||
|
||||
-- Volatilidad
|
||||
volatility_regime ml.volatility_regime NOT NULL,
|
||||
current_atr DECIMAL(15, 8),
|
||||
atr_percentile DECIMAL(5, 2), -- Percentil historico
|
||||
expected_range_pips DECIMAL(10, 2),
|
||||
volatility_forecast DECIMAL(10, 4), -- Volatilidad esperada
|
||||
|
||||
-- Niveles clave
|
||||
key_resistance_1 DECIMAL(15, 8),
|
||||
key_resistance_2 DECIMAL(15, 8),
|
||||
key_resistance_3 DECIMAL(15, 8),
|
||||
key_support_1 DECIMAL(15, 8),
|
||||
key_support_2 DECIMAL(15, 8),
|
||||
key_support_3 DECIMAL(15, 8),
|
||||
pivot_point DECIMAL(15, 8),
|
||||
|
||||
-- Order blocks y FVGs
|
||||
bullish_order_blocks JSONB DEFAULT '[]'::JSONB,
|
||||
bearish_order_blocks JSONB DEFAULT '[]'::JSONB,
|
||||
fair_value_gaps JSONB DEFAULT '[]'::JSONB,
|
||||
liquidity_levels JSONB DEFAULT '[]'::JSONB,
|
||||
|
||||
-- Indicadores
|
||||
indicators JSONB DEFAULT '{}'::JSONB, -- RSI, MACD, etc.
|
||||
moving_averages JSONB DEFAULT '{}'::JSONB, -- SMAs, EMAs
|
||||
|
||||
-- Correlaciones
|
||||
correlation_dxy DECIMAL(5, 4), -- Correlacion con DXY
|
||||
correlation_gold DECIMAL(5, 4), -- Correlacion con oro
|
||||
correlation_sp500 DECIMAL(5, 4), -- Correlacion con S&P500
|
||||
correlated_pairs JSONB DEFAULT '[]'::JSONB, -- Pares correlacionados
|
||||
|
||||
-- Sentimiento
|
||||
sentiment_score DECIMAL(5, 2), -- -100 a +100
|
||||
sentiment_sources JSONB DEFAULT '{}'::JSONB, -- Fuentes de sentimiento
|
||||
cot_report JSONB DEFAULT '{}'::JSONB, -- Datos COT
|
||||
|
||||
-- Eventos fundamentales
|
||||
upcoming_events JSONB DEFAULT '[]'::JSONB, -- Eventos proximos
|
||||
recent_events_impact JSONB DEFAULT '[]'::JSONB, -- Impacto de eventos recientes
|
||||
high_impact_event_today BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
-- Escenarios
|
||||
primary_scenario JSONB DEFAULT '{}'::JSONB, -- Escenario principal
|
||||
secondary_scenario JSONB DEFAULT '{}'::JSONB, -- Escenario alternativo
|
||||
risk_scenario JSONB DEFAULT '{}'::JSONB, -- Escenario de riesgo
|
||||
|
||||
-- Zonas de interes
|
||||
buy_zones JSONB DEFAULT '[]'::JSONB, -- Zonas de compra
|
||||
sell_zones JSONB DEFAULT '[]'::JSONB, -- Zonas de venta
|
||||
avoid_zones JSONB DEFAULT '[]'::JSONB, -- Zonas a evitar
|
||||
|
||||
-- Modelo
|
||||
model_id VARCHAR(100),
|
||||
model_version VARCHAR(50) NOT NULL,
|
||||
analysis_confidence DECIMAL(5, 4) NOT NULL DEFAULT 0.5
|
||||
CHECK (analysis_confidence BETWEEN 0 AND 1),
|
||||
|
||||
-- Resumen
|
||||
summary TEXT, -- Resumen ejecutivo
|
||||
trading_plan TEXT, -- Plan de trading sugerido
|
||||
key_insights JSONB DEFAULT '[]'::JSONB, -- Insights clave
|
||||
|
||||
-- Resultado (post-validacion)
|
||||
actual_direction VARCHAR(20), -- Direccion real
|
||||
actual_range_pips DECIMAL(10, 2),
|
||||
accuracy_score DECIMAL(5, 2), -- Score de precision
|
||||
validated_at TIMESTAMPTZ,
|
||||
|
||||
-- Metadata
|
||||
tags VARCHAR(50)[],
|
||||
notes TEXT,
|
||||
metadata JSONB DEFAULT '{}'::JSONB,
|
||||
|
||||
-- Timestamps
|
||||
generated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT market_analysis_unique UNIQUE (tenant_id, symbol, analysis_type, analysis_date)
|
||||
);
|
||||
|
||||
COMMENT ON TABLE ml.market_analysis IS
|
||||
'Analisis de mercado generado por modelos ML: sesgo, estructura, volatilidad, niveles clave';
|
||||
|
||||
COMMENT ON COLUMN ml.market_analysis.bias IS
|
||||
'Sesgo direccional del mercado para el periodo analizado';
|
||||
|
||||
COMMENT ON COLUMN ml.market_analysis.structure IS
|
||||
'Estructura de mercado dominante (tendencia, rango, etc.)';
|
||||
|
||||
-- Indices
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_tenant
|
||||
ON ml.market_analysis(tenant_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_symbol
|
||||
ON ml.market_analysis(symbol, analysis_date DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_date
|
||||
ON ml.market_analysis(analysis_date DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_type
|
||||
ON ml.market_analysis(analysis_type, analysis_date DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_bias
|
||||
ON ml.market_analysis(bias, analysis_date DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_active
|
||||
ON ml.market_analysis(valid_until)
|
||||
WHERE valid_until > NOW();
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_model
|
||||
ON ml.market_analysis(model_version, analysis_date DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_high_impact
|
||||
ON ml.market_analysis(analysis_date, high_impact_event_today)
|
||||
WHERE high_impact_event_today = TRUE;
|
||||
|
||||
-- GIN indices para JSONB
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_events_gin
|
||||
ON ml.market_analysis USING GIN (upcoming_events);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_market_analysis_insights_gin
|
||||
ON ml.market_analysis USING GIN (key_insights);
|
||||
|
||||
-- Trigger para updated_at
|
||||
DROP TRIGGER IF EXISTS market_analysis_updated_at ON ml.market_analysis;
|
||||
CREATE TRIGGER market_analysis_updated_at
|
||||
BEFORE UPDATE ON ml.market_analysis
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION ml.update_ml_timestamp();
|
||||
|
||||
-- Funcion para obtener analisis actual
|
||||
CREATE OR REPLACE FUNCTION ml.get_current_analysis(
|
||||
p_tenant_id UUID,
|
||||
p_symbol VARCHAR,
|
||||
p_analysis_type ml.analysis_type DEFAULT 'daily_bias'
|
||||
)
|
||||
RETURNS ml.market_analysis AS $$
|
||||
DECLARE
|
||||
v_analysis ml.market_analysis;
|
||||
BEGIN
|
||||
SELECT * INTO v_analysis
|
||||
FROM ml.market_analysis
|
||||
WHERE tenant_id = p_tenant_id
|
||||
AND symbol = p_symbol
|
||||
AND analysis_type = p_analysis_type
|
||||
AND valid_until > NOW()
|
||||
ORDER BY analysis_date DESC
|
||||
LIMIT 1;
|
||||
|
||||
RETURN v_analysis;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para obtener sesgo agregado de multiples simbolos
|
||||
CREATE OR REPLACE FUNCTION ml.get_market_overview(
|
||||
p_tenant_id UUID,
|
||||
p_symbols VARCHAR[] DEFAULT NULL
|
||||
)
|
||||
RETURNS TABLE (
|
||||
symbol VARCHAR,
|
||||
bias ml.market_bias,
|
||||
bias_confidence DECIMAL,
|
||||
structure ml.market_structure,
|
||||
volatility_regime ml.volatility_regime,
|
||||
analysis_date DATE
|
||||
) AS $$
|
||||
BEGIN
|
||||
RETURN QUERY
|
||||
SELECT DISTINCT ON (ma.symbol)
|
||||
ma.symbol,
|
||||
ma.bias,
|
||||
ma.bias_confidence,
|
||||
ma.structure,
|
||||
ma.volatility_regime,
|
||||
ma.analysis_date
|
||||
FROM ml.market_analysis ma
|
||||
WHERE ma.tenant_id = p_tenant_id
|
||||
AND ma.analysis_type = 'daily_bias'
|
||||
AND ma.valid_until > NOW()
|
||||
AND (p_symbols IS NULL OR ma.symbol = ANY(p_symbols))
|
||||
ORDER BY ma.symbol, ma.analysis_date DESC;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Funcion para validar analisis con datos reales
|
||||
CREATE OR REPLACE FUNCTION ml.validate_market_analysis(
|
||||
p_analysis_id UUID,
|
||||
p_actual_high DECIMAL,
|
||||
p_actual_low DECIMAL,
|
||||
p_actual_close DECIMAL
|
||||
)
|
||||
RETURNS VOID AS $$
|
||||
DECLARE
|
||||
v_analysis ml.market_analysis;
|
||||
v_direction VARCHAR;
|
||||
v_accuracy DECIMAL;
|
||||
BEGIN
|
||||
SELECT * INTO v_analysis FROM ml.market_analysis WHERE id = p_analysis_id;
|
||||
|
||||
IF NOT FOUND THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Determinar direccion real
|
||||
IF p_actual_close > (p_actual_high + p_actual_low) / 2 THEN
|
||||
v_direction := 'bullish';
|
||||
ELSE
|
||||
v_direction := 'bearish';
|
||||
END IF;
|
||||
|
||||
-- Calcular accuracy basado en sesgo predicho vs real
|
||||
v_accuracy := CASE
|
||||
WHEN v_analysis.bias IN ('strong_bullish', 'bullish', 'slightly_bullish') AND v_direction = 'bullish' THEN 100
|
||||
WHEN v_analysis.bias IN ('strong_bearish', 'bearish', 'slightly_bearish') AND v_direction = 'bearish' THEN 100
|
||||
WHEN v_analysis.bias = 'neutral' THEN 50
|
||||
ELSE 0
|
||||
END;
|
||||
|
||||
UPDATE ml.market_analysis
|
||||
SET actual_direction = v_direction,
|
||||
actual_range_pips = (p_actual_high - p_actual_low) * 10000,
|
||||
accuracy_score = v_accuracy,
|
||||
validated_at = NOW()
|
||||
WHERE id = p_analysis_id;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Vista de analisis activos
|
||||
CREATE OR REPLACE VIEW ml.v_current_market_analysis AS
|
||||
SELECT
|
||||
id,
|
||||
tenant_id,
|
||||
symbol,
|
||||
asset_class,
|
||||
analysis_type,
|
||||
analysis_date,
|
||||
bias,
|
||||
bias_confidence,
|
||||
structure,
|
||||
volatility_regime,
|
||||
expected_range_pips,
|
||||
key_resistance_1,
|
||||
key_support_1,
|
||||
pivot_point,
|
||||
sentiment_score,
|
||||
high_impact_event_today,
|
||||
summary,
|
||||
analysis_confidence,
|
||||
valid_until
|
||||
FROM ml.market_analysis
|
||||
WHERE valid_until > NOW()
|
||||
ORDER BY analysis_date DESC, symbol;
|
||||
|
||||
-- Vista de rendimiento de analisis
|
||||
CREATE OR REPLACE VIEW ml.v_market_analysis_performance AS
|
||||
SELECT
|
||||
symbol,
|
||||
analysis_type,
|
||||
model_version,
|
||||
COUNT(*) AS total_analyses,
|
||||
ROUND(AVG(accuracy_score)::NUMERIC, 2) AS avg_accuracy,
|
||||
ROUND(AVG(bias_confidence)::NUMERIC, 4) AS avg_confidence,
|
||||
COUNT(*) FILTER (WHERE accuracy_score >= 70) AS accurate_count,
|
||||
COUNT(*) FILTER (WHERE accuracy_score < 30) AS inaccurate_count,
|
||||
ROUND((COUNT(*) FILTER (WHERE accuracy_score >= 70)::DECIMAL
|
||||
/ NULLIF(COUNT(*) FILTER (WHERE validated_at IS NOT NULL), 0) * 100), 2) AS accuracy_rate
|
||||
FROM ml.market_analysis
|
||||
WHERE validated_at IS NOT NULL
|
||||
GROUP BY symbol, analysis_type, model_version
|
||||
ORDER BY accuracy_rate DESC NULLS LAST;
|
||||
|
||||
-- Vista de correlaciones actuales
|
||||
CREATE OR REPLACE VIEW ml.v_market_correlations AS
|
||||
SELECT
|
||||
symbol,
|
||||
analysis_date,
|
||||
correlation_dxy,
|
||||
correlation_gold,
|
||||
correlation_sp500,
|
||||
bias,
|
||||
structure
|
||||
FROM ml.market_analysis
|
||||
WHERE analysis_type = 'daily_bias'
|
||||
AND valid_until > NOW()
|
||||
AND (correlation_dxy IS NOT NULL OR correlation_gold IS NOT NULL)
|
||||
ORDER BY analysis_date DESC, symbol;
|
||||
|
||||
-- RLS Policies
|
||||
ALTER TABLE ml.market_analysis ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
CREATE POLICY market_analysis_tenant ON ml.market_analysis
|
||||
FOR ALL
|
||||
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
|
||||
|
||||
-- Grants
|
||||
GRANT SELECT, INSERT, UPDATE ON ml.market_analysis TO trading_app;
|
||||
GRANT SELECT ON ml.market_analysis TO trading_readonly;
|
||||
GRANT SELECT ON ml.v_current_market_analysis TO trading_app;
|
||||
GRANT SELECT ON ml.v_market_analysis_performance TO trading_app;
|
||||
GRANT SELECT ON ml.v_market_correlations TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION ml.get_current_analysis TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION ml.get_market_overview TO trading_app;
|
||||
GRANT EXECUTE ON FUNCTION ml.validate_market_analysis TO trading_app;
|
||||
Loading…
Reference in New Issue
Block a user