[DDL] feat: Sprint 1 - Add 12 tables for users, admin, notifications, market_data

## New Tables Created (Sprint 1 - DDL Roadmap Q1-2026)

### users schema (4 tables):
- profiles: Extended user profile information
- user_settings: User preferences and configurations
- kyc_verifications: KYC/AML verification records
- risk_profiles: Trading risk assessment profiles

### admin schema (3 tables):
- admin_roles: Platform administrative roles
- platform_analytics: Aggregated platform metrics
- api_keys: Programmatic API access keys

### notifications schema (1 table):
- notifications: Multi-channel notification system

### market_data schema (4 tables):
- tickers: Financial instruments catalog
- ohlcv_5m: 5-minute OHLCV price data
- technical_indicators: Pre-calculated TA indicators
- ohlcv_5m_staging: Staging table for data ingestion

## Features:
- Multi-tenancy with RLS policies
- Comprehensive indexes for query optimization
- Triggers for computed fields and timestamps
- Helper functions for common operations
- Views for dashboard and reporting
- Full GRANTS configuration

Roadmap: orchestration/planes/ROADMAP-IMPLEMENTACION-DDL-2026-Q1.md

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
rckrdmrd 2026-01-16 19:41:53 -06:00
parent e520268348
commit b86dfa2e06
12 changed files with 3227 additions and 0 deletions

View File

@ -0,0 +1,243 @@
-- ============================================================================
-- SCHEMA: admin
-- TABLE: admin_roles
-- DESCRIPTION: Roles administrativos de la plataforma
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Crear schema si no existe
CREATE SCHEMA IF NOT EXISTS admin;
-- Grant usage
GRANT USAGE ON SCHEMA admin TO trading_app;
GRANT USAGE ON SCHEMA admin TO trading_readonly;
-- Enum para nivel de acceso administrativo
DO $$ BEGIN
CREATE TYPE admin.admin_level AS ENUM (
'super_admin', -- Acceso total a la plataforma
'platform_admin', -- Administracion de plataforma (sin acceso a codigo)
'tenant_admin', -- Administrador de tenant
'support', -- Soporte al cliente
'analyst', -- Solo lectura para analisis
'auditor' -- Solo lectura para auditorias
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Tabla de Roles Administrativos
CREATE TABLE IF NOT EXISTS admin.admin_roles (
-- Identificadores
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES users.users(id) ON DELETE CASCADE,
-- Nivel administrativo
level admin.admin_level NOT NULL,
-- Scope del rol
is_global BOOLEAN NOT NULL DEFAULT FALSE, -- Aplica a todos los tenants
tenant_id UUID REFERENCES tenants.tenants(id), -- Tenant especifico (NULL si global)
-- Permisos especificos (override del nivel)
permissions JSONB DEFAULT '{}'::JSONB,
denied_permissions JSONB DEFAULT '[]'::JSONB, -- Permisos explicitamente denegados
-- Configuracion
can_impersonate BOOLEAN NOT NULL DEFAULT FALSE, -- Puede actuar como otro usuario
can_view_pii BOOLEAN NOT NULL DEFAULT FALSE, -- Puede ver datos personales
can_export_data BOOLEAN NOT NULL DEFAULT FALSE, -- Puede exportar datos masivos
can_modify_config BOOLEAN NOT NULL DEFAULT FALSE, -- Puede modificar configuracion
can_manage_admins BOOLEAN NOT NULL DEFAULT FALSE, -- Puede gestionar otros admins
-- Restricciones de IP
allowed_ips INET[], -- IPs permitidas (NULL = todas)
-- Restricciones de horario
allowed_hours_start TIME, -- Hora inicio permitida
allowed_hours_end TIME, -- Hora fin permitida
allowed_days INTEGER[], -- Dias permitidos (0=Dom, 6=Sab)
-- Activacion
is_active BOOLEAN NOT NULL DEFAULT TRUE,
activated_at TIMESTAMPTZ,
deactivated_at TIMESTAMPTZ,
deactivation_reason TEXT,
-- Expiracion
expires_at TIMESTAMPTZ, -- Rol temporal
-- Auditoria de asignacion
assigned_by UUID REFERENCES users.users(id),
assigned_at TIMESTAMPTZ DEFAULT NOW(),
assignment_reason TEXT,
-- Ultima actividad administrativa
last_admin_action_at TIMESTAMPTZ,
total_admin_actions INTEGER NOT NULL DEFAULT 0,
-- Metadata
metadata JSONB DEFAULT '{}'::JSONB,
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT admin_roles_unique_user_tenant UNIQUE (user_id, tenant_id),
CONSTRAINT admin_roles_tenant_check CHECK (
(is_global = TRUE AND tenant_id IS NULL) OR
(is_global = FALSE AND tenant_id IS NOT NULL)
)
);
COMMENT ON TABLE admin.admin_roles IS
'Roles administrativos asignados a usuarios de la plataforma';
COMMENT ON COLUMN admin.admin_roles.level IS
'Nivel de acceso: super_admin, platform_admin, tenant_admin, support, analyst, auditor';
COMMENT ON COLUMN admin.admin_roles.is_global IS
'TRUE si el rol aplica a todos los tenants (solo para super_admin/platform_admin)';
-- Indices
CREATE INDEX IF NOT EXISTS idx_admin_roles_user_id
ON admin.admin_roles(user_id);
CREATE INDEX IF NOT EXISTS idx_admin_roles_tenant_id
ON admin.admin_roles(tenant_id)
WHERE tenant_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_admin_roles_level
ON admin.admin_roles(level);
CREATE INDEX IF NOT EXISTS idx_admin_roles_active
ON admin.admin_roles(is_active)
WHERE is_active = TRUE;
CREATE INDEX IF NOT EXISTS idx_admin_roles_global
ON admin.admin_roles(is_global)
WHERE is_global = TRUE;
CREATE INDEX IF NOT EXISTS idx_admin_roles_expires
ON admin.admin_roles(expires_at)
WHERE expires_at IS NOT NULL;
-- Trigger para updated_at
CREATE OR REPLACE FUNCTION admin.update_admin_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS admin_role_updated_at ON admin.admin_roles;
CREATE TRIGGER admin_role_updated_at
BEFORE UPDATE ON admin.admin_roles
FOR EACH ROW
EXECUTE FUNCTION admin.update_admin_timestamp();
-- Funcion para verificar acceso admin
CREATE OR REPLACE FUNCTION admin.check_admin_access(
p_user_id UUID,
p_tenant_id UUID DEFAULT NULL,
p_required_level admin.admin_level DEFAULT 'support'
)
RETURNS BOOLEAN AS $$
DECLARE
v_role RECORD;
v_level_order INTEGER;
v_required_order INTEGER;
v_current_time TIME;
v_current_day INTEGER;
BEGIN
-- Orden de niveles (mayor = mas permisos)
SELECT CASE p_required_level
WHEN 'super_admin' THEN 6
WHEN 'platform_admin' THEN 5
WHEN 'tenant_admin' THEN 4
WHEN 'support' THEN 3
WHEN 'analyst' THEN 2
WHEN 'auditor' THEN 1
END INTO v_required_order;
-- Buscar rol activo
SELECT * INTO v_role
FROM admin.admin_roles
WHERE user_id = p_user_id
AND is_active = TRUE
AND (expires_at IS NULL OR expires_at > NOW())
AND (is_global = TRUE OR tenant_id = p_tenant_id)
ORDER BY is_global DESC -- Preferir roles globales
LIMIT 1;
IF v_role IS NULL THEN
RETURN FALSE;
END IF;
-- Verificar nivel
SELECT CASE v_role.level
WHEN 'super_admin' THEN 6
WHEN 'platform_admin' THEN 5
WHEN 'tenant_admin' THEN 4
WHEN 'support' THEN 3
WHEN 'analyst' THEN 2
WHEN 'auditor' THEN 1
END INTO v_level_order;
IF v_level_order < v_required_order THEN
RETURN FALSE;
END IF;
-- Verificar restricciones de horario si existen
IF v_role.allowed_hours_start IS NOT NULL AND v_role.allowed_hours_end IS NOT NULL THEN
v_current_time := CURRENT_TIME;
IF v_current_time < v_role.allowed_hours_start OR v_current_time > v_role.allowed_hours_end THEN
RETURN FALSE;
END IF;
END IF;
-- Verificar restricciones de dias si existen
IF v_role.allowed_days IS NOT NULL AND array_length(v_role.allowed_days, 1) > 0 THEN
v_current_day := EXTRACT(DOW FROM CURRENT_DATE)::INTEGER;
IF NOT (v_current_day = ANY(v_role.allowed_days)) THEN
RETURN FALSE;
END IF;
END IF;
RETURN TRUE;
END;
$$ LANGUAGE plpgsql;
-- Vista de administradores activos
CREATE OR REPLACE VIEW admin.v_active_admins AS
SELECT
ar.id,
ar.user_id,
u.email,
u.first_name,
u.last_name,
ar.level,
ar.is_global,
ar.tenant_id,
t.name AS tenant_name,
ar.can_impersonate,
ar.can_view_pii,
ar.expires_at,
ar.last_admin_action_at,
ar.total_admin_actions
FROM admin.admin_roles ar
JOIN users.users u ON ar.user_id = u.id
LEFT JOIN tenants.tenants t ON ar.tenant_id = t.id
WHERE ar.is_active = TRUE
AND (ar.expires_at IS NULL OR ar.expires_at > NOW())
ORDER BY ar.level, ar.created_at;
-- Grants
GRANT SELECT, INSERT, UPDATE, DELETE ON admin.admin_roles TO trading_app;
GRANT SELECT ON admin.admin_roles TO trading_readonly;
GRANT SELECT ON admin.v_active_admins TO trading_app;
GRANT EXECUTE ON FUNCTION admin.check_admin_access TO trading_app;

View File

@ -0,0 +1,276 @@
-- ============================================================================
-- SCHEMA: admin
-- TABLE: platform_analytics
-- DESCRIPTION: Metricas agregadas de la plataforma
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Enum para tipo de metrica
DO $$ BEGIN
CREATE TYPE admin.metric_type AS ENUM (
'counter', -- Contador simple
'gauge', -- Valor actual
'histogram', -- Distribucion
'summary', -- Resumen estadistico
'rate' -- Tasa por periodo
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Enum para granularidad
DO $$ BEGIN
CREATE TYPE admin.time_granularity AS ENUM (
'minute',
'hour',
'day',
'week',
'month',
'quarter',
'year'
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Tabla de Metricas de Plataforma
CREATE TABLE IF NOT EXISTS admin.platform_analytics (
-- Identificadores
id BIGSERIAL PRIMARY KEY,
-- Scope
tenant_id UUID REFERENCES tenants.tenants(id), -- NULL = plataforma global
-- Metrica
metric_name VARCHAR(100) NOT NULL,
metric_type admin.metric_type NOT NULL DEFAULT 'gauge',
category VARCHAR(50) NOT NULL, -- 'users', 'trading', 'revenue', 'performance'
-- Periodo
period_start TIMESTAMPTZ NOT NULL,
period_end TIMESTAMPTZ NOT NULL,
granularity admin.time_granularity NOT NULL DEFAULT 'day',
-- Valores
value DECIMAL(20, 6) NOT NULL,
previous_value DECIMAL(20, 6), -- Valor periodo anterior (para calcular delta)
delta_value DECIMAL(20, 6), -- Cambio vs periodo anterior
delta_percent DECIMAL(10, 4), -- Cambio porcentual
-- Estadisticas (para histogramas/summaries)
min_value DECIMAL(20, 6),
max_value DECIMAL(20, 6),
avg_value DECIMAL(20, 6),
median_value DECIMAL(20, 6),
p95_value DECIMAL(20, 6),
p99_value DECIMAL(20, 6),
stddev_value DECIMAL(20, 6),
sample_count BIGINT,
-- Dimensiones adicionales
dimensions JSONB DEFAULT '{}'::JSONB, -- Dimensiones para desglose
-- Metadata
source VARCHAR(50) NOT NULL DEFAULT 'system', -- Origen del dato
is_estimated BOOLEAN NOT NULL DEFAULT FALSE, -- Dato estimado vs real
confidence_level DECIMAL(5, 4), -- Nivel de confianza (0-1)
-- Timestamps
collected_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT analytics_period_check CHECK (period_end > period_start),
CONSTRAINT analytics_unique_metric UNIQUE (tenant_id, metric_name, granularity, period_start)
);
COMMENT ON TABLE admin.platform_analytics IS
'Metricas agregadas de la plataforma para dashboards y reportes';
COMMENT ON COLUMN admin.platform_analytics.tenant_id IS
'NULL para metricas globales de plataforma, UUID para metricas por tenant';
COMMENT ON COLUMN admin.platform_analytics.dimensions IS
'Dimensiones adicionales como {"country": "MX", "plan": "premium"}';
-- Particionamiento por fecha (recomendado para produccion)
-- CREATE TABLE admin.platform_analytics_partitioned (
-- LIKE admin.platform_analytics INCLUDING ALL
-- ) PARTITION BY RANGE (period_start);
-- Indices
CREATE INDEX IF NOT EXISTS idx_analytics_metric_period
ON admin.platform_analytics(metric_name, period_start DESC);
CREATE INDEX IF NOT EXISTS idx_analytics_tenant_metric
ON admin.platform_analytics(tenant_id, metric_name, period_start DESC)
WHERE tenant_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_analytics_category
ON admin.platform_analytics(category, period_start DESC);
CREATE INDEX IF NOT EXISTS idx_analytics_granularity
ON admin.platform_analytics(granularity, period_start DESC);
CREATE INDEX IF NOT EXISTS idx_analytics_global
ON admin.platform_analytics(metric_name, granularity, period_start DESC)
WHERE tenant_id IS NULL;
-- GIN index para dimensiones
CREATE INDEX IF NOT EXISTS idx_analytics_dimensions_gin
ON admin.platform_analytics USING GIN (dimensions);
-- Trigger para calcular deltas automaticamente
CREATE OR REPLACE FUNCTION admin.calculate_analytics_delta()
RETURNS TRIGGER AS $$
DECLARE
v_prev RECORD;
v_period_interval INTERVAL;
BEGIN
-- Calcular intervalo basado en granularidad
CASE NEW.granularity
WHEN 'minute' THEN v_period_interval := INTERVAL '1 minute';
WHEN 'hour' THEN v_period_interval := INTERVAL '1 hour';
WHEN 'day' THEN v_period_interval := INTERVAL '1 day';
WHEN 'week' THEN v_period_interval := INTERVAL '1 week';
WHEN 'month' THEN v_period_interval := INTERVAL '1 month';
WHEN 'quarter' THEN v_period_interval := INTERVAL '3 months';
WHEN 'year' THEN v_period_interval := INTERVAL '1 year';
END CASE;
-- Buscar valor anterior
SELECT value INTO v_prev
FROM admin.platform_analytics
WHERE metric_name = NEW.metric_name
AND granularity = NEW.granularity
AND (tenant_id = NEW.tenant_id OR (tenant_id IS NULL AND NEW.tenant_id IS NULL))
AND period_start = NEW.period_start - v_period_interval
LIMIT 1;
IF v_prev IS NOT NULL THEN
NEW.previous_value := v_prev.value;
NEW.delta_value := NEW.value - v_prev.value;
IF v_prev.value != 0 THEN
NEW.delta_percent := ((NEW.value - v_prev.value) / v_prev.value) * 100;
END IF;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS analytics_calc_delta ON admin.platform_analytics;
CREATE TRIGGER analytics_calc_delta
BEFORE INSERT ON admin.platform_analytics
FOR EACH ROW
EXECUTE FUNCTION admin.calculate_analytics_delta();
-- Funcion para insertar o actualizar metrica
CREATE OR REPLACE FUNCTION admin.upsert_metric(
p_metric_name VARCHAR(100),
p_category VARCHAR(50),
p_value DECIMAL(20, 6),
p_granularity admin.time_granularity DEFAULT 'day',
p_tenant_id UUID DEFAULT NULL,
p_metric_type admin.metric_type DEFAULT 'gauge',
p_dimensions JSONB DEFAULT '{}'::JSONB
)
RETURNS BIGINT AS $$
DECLARE
v_period_start TIMESTAMPTZ;
v_period_end TIMESTAMPTZ;
v_id BIGINT;
BEGIN
-- Calcular periodo basado en granularidad
CASE p_granularity
WHEN 'minute' THEN
v_period_start := DATE_TRUNC('minute', NOW());
v_period_end := v_period_start + INTERVAL '1 minute';
WHEN 'hour' THEN
v_period_start := DATE_TRUNC('hour', NOW());
v_period_end := v_period_start + INTERVAL '1 hour';
WHEN 'day' THEN
v_period_start := DATE_TRUNC('day', NOW());
v_period_end := v_period_start + INTERVAL '1 day';
WHEN 'week' THEN
v_period_start := DATE_TRUNC('week', NOW());
v_period_end := v_period_start + INTERVAL '1 week';
WHEN 'month' THEN
v_period_start := DATE_TRUNC('month', NOW());
v_period_end := v_period_start + INTERVAL '1 month';
WHEN 'quarter' THEN
v_period_start := DATE_TRUNC('quarter', NOW());
v_period_end := v_period_start + INTERVAL '3 months';
WHEN 'year' THEN
v_period_start := DATE_TRUNC('year', NOW());
v_period_end := v_period_start + INTERVAL '1 year';
END CASE;
INSERT INTO admin.platform_analytics (
tenant_id, metric_name, metric_type, category,
period_start, period_end, granularity,
value, dimensions
) VALUES (
p_tenant_id, p_metric_name, p_metric_type, p_category,
v_period_start, v_period_end, p_granularity,
p_value, p_dimensions
)
ON CONFLICT (tenant_id, metric_name, granularity, period_start)
DO UPDATE SET
value = EXCLUDED.value,
dimensions = EXCLUDED.dimensions,
collected_at = NOW()
RETURNING id INTO v_id;
RETURN v_id;
END;
$$ LANGUAGE plpgsql;
-- Vista de metricas recientes
CREATE OR REPLACE VIEW admin.v_recent_metrics AS
SELECT
metric_name,
category,
granularity,
tenant_id,
value,
delta_percent,
period_start,
collected_at
FROM admin.platform_analytics
WHERE collected_at > NOW() - INTERVAL '24 hours'
ORDER BY metric_name, period_start DESC;
-- Vista de KPIs principales
CREATE OR REPLACE VIEW admin.v_platform_kpis AS
SELECT
metric_name,
value AS current_value,
previous_value,
delta_percent AS change_percent,
period_start,
granularity
FROM admin.platform_analytics
WHERE tenant_id IS NULL -- Solo metricas globales
AND granularity = 'day'
AND metric_name IN (
'total_users',
'active_users_daily',
'total_trades',
'total_volume_usd',
'revenue_usd',
'new_signups',
'churn_rate'
)
AND period_start = DATE_TRUNC('day', NOW())
ORDER BY metric_name;
-- Grants
GRANT SELECT, INSERT, UPDATE ON admin.platform_analytics TO trading_app;
GRANT SELECT ON admin.platform_analytics TO trading_readonly;
GRANT USAGE, SELECT ON SEQUENCE admin.platform_analytics_id_seq TO trading_app;
GRANT SELECT ON admin.v_recent_metrics TO trading_app;
GRANT SELECT ON admin.v_platform_kpis TO trading_app;
GRANT EXECUTE ON FUNCTION admin.upsert_metric TO trading_app;

View File

@ -0,0 +1,358 @@
-- ============================================================================
-- SCHEMA: admin
-- TABLE: api_keys
-- DESCRIPTION: API Keys para acceso programatico
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Enum para tipo de API key
DO $$ BEGIN
CREATE TYPE admin.api_key_type AS ENUM (
'public', -- Solo lectura publica
'private', -- Acceso completo a recursos propios
'admin', -- Acceso administrativo
'service', -- Servicio a servicio
'webhook', -- Solo para recibir webhooks
'readonly' -- Solo lectura
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Enum para estado de API key
DO $$ BEGIN
CREATE TYPE admin.api_key_status AS ENUM (
'active', -- Activa y funcional
'suspended', -- Suspendida temporalmente
'revoked', -- Revocada permanentemente
'expired' -- Expirada
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Tabla de API Keys
CREATE TABLE IF NOT EXISTS admin.api_keys (
-- Identificadores
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES users.users(id) ON DELETE CASCADE,
-- Identificacion de la key
name VARCHAR(100) NOT NULL,
description TEXT,
-- Key values (solo se almacena hash del secret)
key_prefix VARCHAR(8) NOT NULL, -- Primeros 8 caracteres para identificacion
key_hash VARCHAR(255) NOT NULL, -- Hash bcrypt del secret completo
key_hint VARCHAR(4), -- Ultimos 4 caracteres para referencia
-- Tipo y estado
type admin.api_key_type NOT NULL DEFAULT 'private',
status admin.api_key_status NOT NULL DEFAULT 'active',
-- Permisos
scopes JSONB NOT NULL DEFAULT '["read"]'::JSONB, -- Array de scopes permitidos
permissions JSONB DEFAULT '{}'::JSONB, -- Permisos granulares
-- Restricciones
allowed_ips INET[], -- IPs permitidas (NULL = todas)
allowed_origins TEXT[], -- Origenes CORS permitidos
allowed_user_agents TEXT[], -- User agents permitidos
-- Rate limiting
rate_limit_per_minute INTEGER DEFAULT 60,
rate_limit_per_hour INTEGER DEFAULT 1000,
rate_limit_per_day INTEGER DEFAULT 10000,
-- Uso
last_used_at TIMESTAMPTZ,
last_used_ip INET,
last_used_user_agent TEXT,
total_requests BIGINT NOT NULL DEFAULT 0,
total_errors BIGINT NOT NULL DEFAULT 0,
-- Tracking de uso diario
requests_today INTEGER NOT NULL DEFAULT 0,
requests_today_date DATE DEFAULT CURRENT_DATE,
-- Expiracion
expires_at TIMESTAMPTZ,
-- Rotacion
previous_key_hash VARCHAR(255), -- Hash anterior durante rotacion
previous_key_valid_until TIMESTAMPTZ, -- Validez del key anterior
rotated_at TIMESTAMPTZ,
rotation_count INTEGER NOT NULL DEFAULT 0,
-- Auditoria
created_by UUID REFERENCES users.users(id),
revoked_by UUID REFERENCES users.users(id),
revoked_at TIMESTAMPTZ,
revocation_reason TEXT,
-- Metadata
environment VARCHAR(20) DEFAULT 'production', -- 'development', 'staging', 'production'
metadata JSONB DEFAULT '{}'::JSONB,
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT api_keys_unique_name_per_user UNIQUE (user_id, name),
CONSTRAINT api_keys_unique_prefix UNIQUE (key_prefix)
);
COMMENT ON TABLE admin.api_keys IS
'API Keys para acceso programatico a la plataforma';
COMMENT ON COLUMN admin.api_keys.key_prefix IS
'Primeros 8 caracteres de la key para identificacion sin exponer el secret';
COMMENT ON COLUMN admin.api_keys.key_hash IS
'Hash bcrypt del secret completo (el secret solo se muestra una vez al crear)';
COMMENT ON COLUMN admin.api_keys.scopes IS
'Array de scopes: ["read", "write", "trade", "admin"]';
-- Indices
CREATE INDEX IF NOT EXISTS idx_api_keys_tenant
ON admin.api_keys(tenant_id);
CREATE INDEX IF NOT EXISTS idx_api_keys_user
ON admin.api_keys(user_id);
CREATE INDEX IF NOT EXISTS idx_api_keys_prefix
ON admin.api_keys(key_prefix);
CREATE INDEX IF NOT EXISTS idx_api_keys_status_active
ON admin.api_keys(status)
WHERE status = 'active';
CREATE INDEX IF NOT EXISTS idx_api_keys_type
ON admin.api_keys(type);
CREATE INDEX IF NOT EXISTS idx_api_keys_expires
ON admin.api_keys(expires_at)
WHERE expires_at IS NOT NULL AND status = 'active';
CREATE INDEX IF NOT EXISTS idx_api_keys_last_used
ON admin.api_keys(last_used_at DESC)
WHERE status = 'active';
-- Trigger para updated_at
DROP TRIGGER IF EXISTS api_key_updated_at ON admin.api_keys;
CREATE TRIGGER api_key_updated_at
BEFORE UPDATE ON admin.api_keys
FOR EACH ROW
EXECUTE FUNCTION admin.update_admin_timestamp();
-- Trigger para resetear contador diario
CREATE OR REPLACE FUNCTION admin.reset_daily_requests()
RETURNS TRIGGER AS $$
BEGIN
IF NEW.requests_today_date < CURRENT_DATE THEN
NEW.requests_today := 0;
NEW.requests_today_date := CURRENT_DATE;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS api_key_reset_daily ON admin.api_keys;
CREATE TRIGGER api_key_reset_daily
BEFORE UPDATE ON admin.api_keys
FOR EACH ROW
EXECUTE FUNCTION admin.reset_daily_requests();
-- Funcion para validar API key
CREATE OR REPLACE FUNCTION admin.validate_api_key(
p_key_prefix VARCHAR(8),
p_client_ip INET DEFAULT NULL
)
RETURNS TABLE (
is_valid BOOLEAN,
key_id UUID,
user_id UUID,
tenant_id UUID,
key_type admin.api_key_type,
scopes JSONB,
rate_limit_remaining INTEGER,
error_message TEXT
) AS $$
DECLARE
v_key RECORD;
v_rate_remaining INTEGER;
BEGIN
-- Buscar key por prefijo
SELECT * INTO v_key
FROM admin.api_keys ak
WHERE ak.key_prefix = p_key_prefix
AND ak.status = 'active'
LIMIT 1;
IF v_key IS NULL THEN
RETURN QUERY SELECT
FALSE, NULL::UUID, NULL::UUID, NULL::UUID,
NULL::admin.api_key_type, NULL::JSONB, NULL::INTEGER,
'API key not found or inactive'::TEXT;
RETURN;
END IF;
-- Verificar expiracion
IF v_key.expires_at IS NOT NULL AND v_key.expires_at < NOW() THEN
-- Marcar como expirada
UPDATE admin.api_keys SET status = 'expired' WHERE id = v_key.id;
RETURN QUERY SELECT
FALSE, NULL::UUID, NULL::UUID, NULL::UUID,
NULL::admin.api_key_type, NULL::JSONB, NULL::INTEGER,
'API key has expired'::TEXT;
RETURN;
END IF;
-- Verificar IP si hay restricciones
IF v_key.allowed_ips IS NOT NULL AND array_length(v_key.allowed_ips, 1) > 0 THEN
IF p_client_ip IS NULL OR NOT (p_client_ip = ANY(v_key.allowed_ips)) THEN
RETURN QUERY SELECT
FALSE, NULL::UUID, NULL::UUID, NULL::UUID,
NULL::admin.api_key_type, NULL::JSONB, NULL::INTEGER,
'IP address not allowed'::TEXT;
RETURN;
END IF;
END IF;
-- Verificar rate limit diario
v_rate_remaining := v_key.rate_limit_per_day - v_key.requests_today;
IF v_rate_remaining <= 0 THEN
RETURN QUERY SELECT
FALSE, v_key.id, v_key.user_id, v_key.tenant_id,
v_key.type, v_key.scopes, 0,
'Daily rate limit exceeded'::TEXT;
RETURN;
END IF;
-- Key valida - actualizar uso
UPDATE admin.api_keys
SET
last_used_at = NOW(),
last_used_ip = p_client_ip,
total_requests = total_requests + 1,
requests_today = requests_today + 1
WHERE id = v_key.id;
RETURN QUERY SELECT
TRUE, v_key.id, v_key.user_id, v_key.tenant_id,
v_key.type, v_key.scopes, v_rate_remaining - 1,
NULL::TEXT;
END;
$$ LANGUAGE plpgsql;
-- Funcion para revocar API key
CREATE OR REPLACE FUNCTION admin.revoke_api_key(
p_key_id UUID,
p_revoked_by UUID,
p_reason TEXT DEFAULT NULL
)
RETURNS BOOLEAN AS $$
BEGIN
UPDATE admin.api_keys
SET
status = 'revoked',
revoked_by = p_revoked_by,
revoked_at = NOW(),
revocation_reason = p_reason
WHERE id = p_key_id
AND status = 'active';
RETURN FOUND;
END;
$$ LANGUAGE plpgsql;
-- Funcion para rotar API key (genera nuevo secret, mantiene el anterior temporalmente)
CREATE OR REPLACE FUNCTION admin.rotate_api_key(
p_key_id UUID,
p_new_key_hash VARCHAR(255),
p_grace_period_hours INTEGER DEFAULT 24
)
RETURNS BOOLEAN AS $$
BEGIN
UPDATE admin.api_keys
SET
previous_key_hash = key_hash,
previous_key_valid_until = NOW() + (p_grace_period_hours || ' hours')::INTERVAL,
key_hash = p_new_key_hash,
rotated_at = NOW(),
rotation_count = rotation_count + 1
WHERE id = p_key_id
AND status = 'active';
RETURN FOUND;
END;
$$ LANGUAGE plpgsql;
-- Vista de API keys activas
CREATE OR REPLACE VIEW admin.v_active_api_keys AS
SELECT
ak.id,
ak.tenant_id,
ak.user_id,
u.email AS user_email,
ak.name,
ak.key_prefix,
ak.key_hint,
ak.type,
ak.scopes,
ak.rate_limit_per_day,
ak.requests_today,
ak.total_requests,
ak.last_used_at,
ak.expires_at,
ak.created_at
FROM admin.api_keys ak
JOIN users.users u ON ak.user_id = u.id
WHERE ak.status = 'active'
ORDER BY ak.last_used_at DESC NULLS LAST;
-- Vista de uso de API keys por dia
CREATE OR REPLACE VIEW admin.v_api_key_usage AS
SELECT
ak.id,
ak.name,
ak.key_prefix,
ak.type,
ak.total_requests,
ak.total_errors,
ak.requests_today,
CASE WHEN ak.rate_limit_per_day > 0
THEN (ak.requests_today::DECIMAL / ak.rate_limit_per_day * 100)::DECIMAL(5,2)
ELSE 0
END AS usage_percent,
ak.last_used_at,
ak.last_used_ip
FROM admin.api_keys ak
WHERE ak.status = 'active'
ORDER BY ak.total_requests DESC;
-- RLS Policy para multi-tenancy
ALTER TABLE admin.api_keys ENABLE ROW LEVEL SECURITY;
CREATE POLICY api_keys_tenant_isolation ON admin.api_keys
FOR ALL
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
-- Los usuarios solo pueden ver sus propias API keys
CREATE POLICY api_keys_user_isolation ON admin.api_keys
FOR SELECT
USING (user_id = current_setting('app.current_user_id', true)::UUID);
-- Grants
GRANT SELECT, INSERT, UPDATE ON admin.api_keys TO trading_app;
GRANT SELECT ON admin.api_keys TO trading_readonly;
GRANT SELECT ON admin.v_active_api_keys TO trading_app;
GRANT SELECT ON admin.v_api_key_usage TO trading_app;
GRANT EXECUTE ON FUNCTION admin.validate_api_key TO trading_app;
GRANT EXECUTE ON FUNCTION admin.revoke_api_key TO trading_app;
GRANT EXECUTE ON FUNCTION admin.rotate_api_key TO trading_app;

View File

@ -0,0 +1,274 @@
-- ============================================================================
-- SCHEMA: market_data
-- TABLE: tickers
-- DESCRIPTION: Instrumentos financieros disponibles para trading
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Crear schema si no existe
CREATE SCHEMA IF NOT EXISTS market_data;
-- Grant usage
GRANT USAGE ON SCHEMA market_data TO trading_app;
GRANT USAGE ON SCHEMA market_data TO trading_readonly;
-- Enum para tipo de instrumento
DO $$ BEGIN
CREATE TYPE market_data.instrument_type AS ENUM (
'forex_major', -- Pares mayores: EURUSD, GBPUSD, etc.
'forex_minor', -- Pares menores: EURGBP, AUDNZD, etc.
'forex_exotic', -- Pares exoticos: USDMXN, USDZAR, etc.
'commodity', -- Commodities: XAUUSD, XAGUSD, USOIL
'index', -- Indices: US30, US500, NAS100
'stock', -- Acciones individuales
'crypto', -- Criptomonedas
'bond', -- Bonos
'etf' -- ETFs
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Enum para estado del ticker
DO $$ BEGIN
CREATE TYPE market_data.ticker_status AS ENUM (
'active', -- Activo y tradeable
'inactive', -- Inactivo temporalmente
'halted', -- Trading suspendido
'delisted', -- Eliminado del mercado
'pending' -- Pendiente de activacion
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Tabla de Tickers (Instrumentos)
CREATE TABLE IF NOT EXISTS market_data.tickers (
-- Identificadores
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
symbol VARCHAR(20) NOT NULL UNIQUE, -- EURUSD, XAUUSD, US30
name VARCHAR(200) NOT NULL, -- "Euro / US Dollar"
-- Clasificacion
type market_data.instrument_type NOT NULL,
category VARCHAR(50), -- Sub-categoria libre
sector VARCHAR(100), -- Para stocks
industry VARCHAR(100), -- Para stocks
-- Monedas (para forex)
base_currency VARCHAR(3), -- EUR en EURUSD
quote_currency VARCHAR(3), -- USD en EURUSD
-- Exchange / Broker
exchange VARCHAR(50), -- NYSE, NASDAQ, FOREX
broker_symbol VARCHAR(50), -- Simbolo en el broker
polygon_ticker VARCHAR(50), -- Ticker en Polygon.io
-- Especificaciones de trading
contract_size DECIMAL(15, 6) DEFAULT 100000, -- Tamaño del contrato
tick_size DECIMAL(15, 8) NOT NULL DEFAULT 0.00001,
tick_value DECIMAL(15, 6), -- Valor por tick en USD
pip_size DECIMAL(15, 8) DEFAULT 0.0001, -- Tamaño del pip
min_lot_size DECIMAL(10, 4) DEFAULT 0.01,
max_lot_size DECIMAL(10, 4) DEFAULT 100,
lot_step DECIMAL(10, 4) DEFAULT 0.01,
-- Margen y apalancamiento
margin_required DECIMAL(10, 4) DEFAULT 1.00, -- % margen requerido
max_leverage INTEGER DEFAULT 100,
margin_mode VARCHAR(20) DEFAULT 'percentage', -- 'percentage', 'fixed'
-- Spread tipico
typical_spread_pips DECIMAL(10, 4),
min_spread_pips DECIMAL(10, 4),
max_spread_pips DECIMAL(10, 4),
-- Horarios de trading
trading_hours JSONB DEFAULT '{
"timezone": "America/New_York",
"sessions": [
{"day": 0, "open": "17:00", "close": "17:00", "next_day": true},
{"day": 1, "open": "00:00", "close": "17:00"},
{"day": 2, "open": "17:00", "close": "17:00", "next_day": true},
{"day": 3, "open": "00:00", "close": "17:00"},
{"day": 4, "open": "17:00", "close": "17:00", "next_day": true},
{"day": 5, "open": "00:00", "close": "17:00"}
]
}'::JSONB,
is_tradeable_now BOOLEAN DEFAULT FALSE, -- Cache de estado actual
-- Precio actual (cache)
current_bid DECIMAL(15, 8),
current_ask DECIMAL(15, 8),
current_spread DECIMAL(15, 8),
price_updated_at TIMESTAMPTZ,
-- Estadisticas diarias
daily_open DECIMAL(15, 8),
daily_high DECIMAL(15, 8),
daily_low DECIMAL(15, 8),
daily_close DECIMAL(15, 8),
daily_change DECIMAL(15, 8),
daily_change_percent DECIMAL(10, 4),
daily_volume BIGINT,
-- Volatilidad
atr_14d DECIMAL(15, 8), -- ATR de 14 dias
volatility_percentile INTEGER, -- Percentil de volatilidad (0-100)
-- Estado
status market_data.ticker_status NOT NULL DEFAULT 'active',
is_featured BOOLEAN NOT NULL DEFAULT FALSE, -- Destacado en UI
display_order INTEGER DEFAULT 999,
-- Integraciones
enabled_for_predictions BOOLEAN NOT NULL DEFAULT FALSE,
enabled_for_signals BOOLEAN NOT NULL DEFAULT FALSE,
enabled_for_bots BOOLEAN NOT NULL DEFAULT FALSE,
-- Metadata
metadata JSONB DEFAULT '{}'::JSONB,
tags VARCHAR(50)[],
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
COMMENT ON TABLE market_data.tickers IS
'Catalogo de instrumentos financieros disponibles para trading';
COMMENT ON COLUMN market_data.tickers.symbol IS
'Simbolo estandar del instrumento (ej: EURUSD, XAUUSD, US30)';
COMMENT ON COLUMN market_data.tickers.pip_size IS
'Tamaño de un pip: 0.0001 para forex, 0.01 para JPY pairs';
-- Indices
CREATE INDEX IF NOT EXISTS idx_tickers_symbol
ON market_data.tickers(symbol);
CREATE INDEX IF NOT EXISTS idx_tickers_type
ON market_data.tickers(type);
CREATE INDEX IF NOT EXISTS idx_tickers_status_active
ON market_data.tickers(status, type)
WHERE status = 'active';
CREATE INDEX IF NOT EXISTS idx_tickers_featured
ON market_data.tickers(is_featured, display_order)
WHERE is_featured = TRUE;
CREATE INDEX IF NOT EXISTS idx_tickers_predictions
ON market_data.tickers(symbol)
WHERE enabled_for_predictions = TRUE;
CREATE INDEX IF NOT EXISTS idx_tickers_polygon
ON market_data.tickers(polygon_ticker)
WHERE polygon_ticker IS NOT NULL;
-- GIN index para tags
CREATE INDEX IF NOT EXISTS idx_tickers_tags_gin
ON market_data.tickers USING GIN (tags);
-- Trigger para updated_at
CREATE OR REPLACE FUNCTION market_data.update_ticker_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS ticker_updated_at ON market_data.tickers;
CREATE TRIGGER ticker_updated_at
BEFORE UPDATE ON market_data.tickers
FOR EACH ROW
EXECUTE FUNCTION market_data.update_ticker_timestamp();
-- Funcion para actualizar precio
CREATE OR REPLACE FUNCTION market_data.update_ticker_price(
p_symbol VARCHAR(20),
p_bid DECIMAL(15, 8),
p_ask DECIMAL(15, 8)
)
RETURNS BOOLEAN AS $$
BEGIN
UPDATE market_data.tickers
SET
current_bid = p_bid,
current_ask = p_ask,
current_spread = p_ask - p_bid,
price_updated_at = NOW()
WHERE symbol = p_symbol;
RETURN FOUND;
END;
$$ LANGUAGE plpgsql;
-- Vista de tickers activos para trading
CREATE OR REPLACE VIEW market_data.v_active_tickers AS
SELECT
id,
symbol,
name,
type,
base_currency,
quote_currency,
pip_size,
typical_spread_pips,
current_bid,
current_ask,
daily_change_percent,
is_tradeable_now,
enabled_for_predictions,
enabled_for_signals
FROM market_data.tickers
WHERE status = 'active'
ORDER BY
is_featured DESC,
display_order ASC,
symbol ASC;
-- Vista de tickers para ML predictions
CREATE OR REPLACE VIEW market_data.v_prediction_tickers AS
SELECT
symbol,
name,
type,
pip_size,
atr_14d,
volatility_percentile
FROM market_data.tickers
WHERE status = 'active'
AND enabled_for_predictions = TRUE
ORDER BY symbol;
-- Grants
GRANT SELECT, INSERT, UPDATE ON market_data.tickers TO trading_app;
GRANT SELECT ON market_data.tickers TO trading_readonly;
GRANT SELECT ON market_data.v_active_tickers TO trading_app;
GRANT SELECT ON market_data.v_prediction_tickers TO trading_app;
GRANT EXECUTE ON FUNCTION market_data.update_ticker_price TO trading_app;
-- ============================================================================
-- DATOS INICIALES - Tickers principales
-- ============================================================================
INSERT INTO market_data.tickers (symbol, name, type, base_currency, quote_currency, pip_size, is_featured, enabled_for_predictions, enabled_for_signals, display_order)
VALUES
('EURUSD', 'Euro / US Dollar', 'forex_major', 'EUR', 'USD', 0.0001, TRUE, TRUE, TRUE, 1),
('GBPUSD', 'British Pound / US Dollar', 'forex_major', 'GBP', 'USD', 0.0001, TRUE, TRUE, TRUE, 2),
('USDJPY', 'US Dollar / Japanese Yen', 'forex_major', 'USD', 'JPY', 0.01, TRUE, TRUE, TRUE, 3),
('USDCHF', 'US Dollar / Swiss Franc', 'forex_major', 'USD', 'CHF', 0.0001, TRUE, TRUE, TRUE, 4),
('AUDUSD', 'Australian Dollar / US Dollar', 'forex_major', 'AUD', 'USD', 0.0001, TRUE, TRUE, TRUE, 5),
('USDCAD', 'US Dollar / Canadian Dollar', 'forex_major', 'USD', 'CAD', 0.0001, TRUE, TRUE, TRUE, 6),
('NZDUSD', 'New Zealand Dollar / US Dollar', 'forex_major', 'NZD', 'USD', 0.0001, FALSE, TRUE, TRUE, 7),
('XAUUSD', 'Gold / US Dollar', 'commodity', 'XAU', 'USD', 0.01, TRUE, TRUE, TRUE, 10),
('XAGUSD', 'Silver / US Dollar', 'commodity', 'XAG', 'USD', 0.001, FALSE, TRUE, TRUE, 11),
('US30', 'Dow Jones Industrial Average', 'index', NULL, 'USD', 1.0, TRUE, TRUE, TRUE, 20),
('US500', 'S&P 500 Index', 'index', NULL, 'USD', 0.1, TRUE, TRUE, TRUE, 21),
('NAS100', 'NASDAQ 100 Index', 'index', NULL, 'USD', 0.1, TRUE, TRUE, TRUE, 22)
ON CONFLICT (symbol) DO NOTHING;

View File

@ -0,0 +1,303 @@
-- ============================================================================
-- SCHEMA: market_data
-- TABLE: ohlcv_5m
-- DESCRIPTION: Datos OHLCV de 5 minutos para analisis tecnico y ML
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Tabla de datos OHLCV (5 minutos)
-- NOTA: Esta tabla puede crecer significativamente. Considerar particionamiento por fecha.
CREATE TABLE IF NOT EXISTS market_data.ohlcv_5m (
-- Identificadores
id BIGSERIAL PRIMARY KEY,
ticker_id UUID NOT NULL REFERENCES market_data.tickers(id) ON DELETE CASCADE,
symbol VARCHAR(20) NOT NULL, -- Denormalizado para performance
-- Timestamp de la vela
timestamp TIMESTAMPTZ NOT NULL,
timestamp_unix BIGINT NOT NULL, -- Unix timestamp para queries rapidos
-- Precios OHLC
open DECIMAL(15, 8) NOT NULL,
high DECIMAL(15, 8) NOT NULL,
low DECIMAL(15, 8) NOT NULL,
close DECIMAL(15, 8) NOT NULL,
-- Volumen
volume BIGINT DEFAULT 0,
volume_weighted_price DECIMAL(15, 8), -- VWAP del periodo
-- Estadisticas derivadas
range_high_low DECIMAL(15, 8), -- high - low
range_percent DECIMAL(10, 6), -- (high - low) / open * 100
body_size DECIMAL(15, 8), -- |close - open|
body_percent DECIMAL(10, 6), -- body / range * 100
is_bullish BOOLEAN, -- close > open
-- Gaps
gap_from_previous DECIMAL(15, 8), -- open - previous_close
gap_percent DECIMAL(10, 6),
-- Fuente de datos
source VARCHAR(20) NOT NULL DEFAULT 'polygon', -- 'polygon', 'broker', 'manual'
is_complete BOOLEAN NOT NULL DEFAULT TRUE, -- Vela completa vs en progreso
-- Calidad de datos
data_quality VARCHAR(20) DEFAULT 'good', -- 'good', 'interpolated', 'missing'
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT ohlcv_5m_unique UNIQUE (ticker_id, timestamp),
CONSTRAINT ohlcv_5m_price_check CHECK (high >= low AND high >= open AND high >= close AND low <= open AND low <= close)
);
COMMENT ON TABLE market_data.ohlcv_5m IS
'Datos OHLCV de 5 minutos para todos los tickers activos';
COMMENT ON COLUMN market_data.ohlcv_5m.symbol IS
'Simbolo denormalizado para evitar JOINs en queries frecuentes';
COMMENT ON COLUMN market_data.ohlcv_5m.volume_weighted_price IS
'Precio promedio ponderado por volumen (VWAP) del periodo';
-- ============================================================================
-- PARTICIONAMIENTO (para produccion)
-- Descomentar para habilitar particionamiento mensual
-- ============================================================================
-- CREATE TABLE market_data.ohlcv_5m_partitioned (
-- LIKE market_data.ohlcv_5m INCLUDING ALL
-- ) PARTITION BY RANGE (timestamp);
-- CREATE TABLE market_data.ohlcv_5m_2026_01 PARTITION OF market_data.ohlcv_5m_partitioned
-- FOR VALUES FROM ('2026-01-01') TO ('2026-02-01');
-- ============================================================================
-- INDICES
-- ============================================================================
-- Indice principal para queries por simbolo y tiempo
CREATE INDEX IF NOT EXISTS idx_ohlcv_5m_symbol_time
ON market_data.ohlcv_5m(symbol, timestamp DESC);
-- Indice para ticker_id
CREATE INDEX IF NOT EXISTS idx_ohlcv_5m_ticker_time
ON market_data.ohlcv_5m(ticker_id, timestamp DESC);
-- Indice para timestamp solo (para queries de rango)
CREATE INDEX IF NOT EXISTS idx_ohlcv_5m_timestamp
ON market_data.ohlcv_5m(timestamp DESC);
-- Indice BRIN para timestamp (eficiente en tablas grandes ordenadas)
CREATE INDEX IF NOT EXISTS idx_ohlcv_5m_timestamp_brin
ON market_data.ohlcv_5m USING BRIN (timestamp);
-- Indice para velas incompletas
CREATE INDEX IF NOT EXISTS idx_ohlcv_5m_incomplete
ON market_data.ohlcv_5m(symbol, timestamp)
WHERE is_complete = FALSE;
-- ============================================================================
-- TRIGGERS
-- ============================================================================
-- Trigger para calcular campos derivados
CREATE OR REPLACE FUNCTION market_data.calculate_ohlcv_derived()
RETURNS TRIGGER AS $$
DECLARE
v_prev_close DECIMAL(15, 8);
BEGIN
-- Calcular rango
NEW.range_high_low := NEW.high - NEW.low;
IF NEW.open > 0 THEN
NEW.range_percent := (NEW.range_high_low / NEW.open) * 100;
END IF;
-- Calcular body
NEW.body_size := ABS(NEW.close - NEW.open);
IF NEW.range_high_low > 0 THEN
NEW.body_percent := (NEW.body_size / NEW.range_high_low) * 100;
END IF;
-- Determinar direccion
NEW.is_bullish := NEW.close > NEW.open;
-- Calcular gap (buscar vela anterior)
SELECT close INTO v_prev_close
FROM market_data.ohlcv_5m
WHERE symbol = NEW.symbol
AND timestamp < NEW.timestamp
ORDER BY timestamp DESC
LIMIT 1;
IF v_prev_close IS NOT NULL THEN
NEW.gap_from_previous := NEW.open - v_prev_close;
IF v_prev_close > 0 THEN
NEW.gap_percent := (NEW.gap_from_previous / v_prev_close) * 100;
END IF;
END IF;
-- Generar timestamp_unix si no viene
IF NEW.timestamp_unix IS NULL THEN
NEW.timestamp_unix := EXTRACT(EPOCH FROM NEW.timestamp)::BIGINT;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS ohlcv_5m_derived ON market_data.ohlcv_5m;
CREATE TRIGGER ohlcv_5m_derived
BEFORE INSERT OR UPDATE ON market_data.ohlcv_5m
FOR EACH ROW
EXECUTE FUNCTION market_data.calculate_ohlcv_derived();
-- ============================================================================
-- FUNCIONES HELPER
-- ============================================================================
-- Funcion para obtener ultimas N velas
CREATE OR REPLACE FUNCTION market_data.get_latest_candles(
p_symbol VARCHAR(20),
p_count INTEGER DEFAULT 100
)
RETURNS TABLE (
timestamp TIMESTAMPTZ,
open DECIMAL(15, 8),
high DECIMAL(15, 8),
low DECIMAL(15, 8),
close DECIMAL(15, 8),
volume BIGINT,
is_bullish BOOLEAN
) AS $$
BEGIN
RETURN QUERY
SELECT
o.timestamp,
o.open,
o.high,
o.low,
o.close,
o.volume,
o.is_bullish
FROM market_data.ohlcv_5m o
WHERE o.symbol = p_symbol
AND o.is_complete = TRUE
ORDER BY o.timestamp DESC
LIMIT p_count;
END;
$$ LANGUAGE plpgsql;
-- Funcion para insertar/actualizar vela
CREATE OR REPLACE FUNCTION market_data.upsert_candle(
p_symbol VARCHAR(20),
p_timestamp TIMESTAMPTZ,
p_open DECIMAL(15, 8),
p_high DECIMAL(15, 8),
p_low DECIMAL(15, 8),
p_close DECIMAL(15, 8),
p_volume BIGINT DEFAULT 0,
p_source VARCHAR(20) DEFAULT 'polygon',
p_is_complete BOOLEAN DEFAULT TRUE
)
RETURNS BIGINT AS $$
DECLARE
v_ticker_id UUID;
v_id BIGINT;
BEGIN
-- Obtener ticker_id
SELECT id INTO v_ticker_id FROM market_data.tickers WHERE symbol = p_symbol;
IF v_ticker_id IS NULL THEN
RAISE EXCEPTION 'Ticker % not found', p_symbol;
END IF;
INSERT INTO market_data.ohlcv_5m (
ticker_id, symbol, timestamp,
open, high, low, close, volume,
source, is_complete
) VALUES (
v_ticker_id, p_symbol, p_timestamp,
p_open, p_high, p_low, p_close, p_volume,
p_source, p_is_complete
)
ON CONFLICT (ticker_id, timestamp)
DO UPDATE SET
high = GREATEST(market_data.ohlcv_5m.high, EXCLUDED.high),
low = LEAST(market_data.ohlcv_5m.low, EXCLUDED.low),
close = EXCLUDED.close,
volume = EXCLUDED.volume,
is_complete = EXCLUDED.is_complete
RETURNING id INTO v_id;
RETURN v_id;
END;
$$ LANGUAGE plpgsql;
-- Funcion para limpiar datos antiguos
CREATE OR REPLACE FUNCTION market_data.cleanup_old_ohlcv(
p_days_to_keep INTEGER DEFAULT 365
)
RETURNS INTEGER AS $$
DECLARE
v_count INTEGER;
BEGIN
DELETE FROM market_data.ohlcv_5m
WHERE timestamp < NOW() - (p_days_to_keep || ' days')::INTERVAL;
GET DIAGNOSTICS v_count = ROW_COUNT;
RETURN v_count;
END;
$$ LANGUAGE plpgsql;
-- ============================================================================
-- VISTAS
-- ============================================================================
-- Vista de ultimas velas por simbolo
CREATE OR REPLACE VIEW market_data.v_latest_candles AS
SELECT DISTINCT ON (symbol)
symbol,
timestamp,
open,
high,
low,
close,
volume,
is_bullish,
range_percent,
is_complete
FROM market_data.ohlcv_5m
ORDER BY symbol, timestamp DESC;
-- Vista de estadisticas por simbolo (ultimas 24h)
CREATE OR REPLACE VIEW market_data.v_symbol_stats_24h AS
SELECT
symbol,
COUNT(*) AS candle_count,
MIN(low) AS period_low,
MAX(high) AS period_high,
SUM(volume) AS total_volume,
AVG(range_percent) AS avg_range_percent,
SUM(CASE WHEN is_bullish THEN 1 ELSE 0 END)::DECIMAL / COUNT(*) * 100 AS bullish_percent
FROM market_data.ohlcv_5m
WHERE timestamp > NOW() - INTERVAL '24 hours'
AND is_complete = TRUE
GROUP BY symbol;
-- ============================================================================
-- GRANTS
-- ============================================================================
GRANT SELECT, INSERT, UPDATE, DELETE ON market_data.ohlcv_5m TO trading_app;
GRANT SELECT ON market_data.ohlcv_5m TO trading_readonly;
GRANT USAGE, SELECT ON SEQUENCE market_data.ohlcv_5m_id_seq TO trading_app;
GRANT SELECT ON market_data.v_latest_candles TO trading_app;
GRANT SELECT ON market_data.v_symbol_stats_24h TO trading_app;
GRANT EXECUTE ON FUNCTION market_data.get_latest_candles TO trading_app;
GRANT EXECUTE ON FUNCTION market_data.upsert_candle TO trading_app;
GRANT EXECUTE ON FUNCTION market_data.cleanup_old_ohlcv TO trading_app;

View File

@ -0,0 +1,287 @@
-- ============================================================================
-- SCHEMA: market_data
-- TABLE: technical_indicators
-- DESCRIPTION: Indicadores tecnicos pre-calculados para ML y analisis
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Tabla de Indicadores Tecnicos pre-calculados
CREATE TABLE IF NOT EXISTS market_data.technical_indicators (
-- Identificadores
id BIGSERIAL PRIMARY KEY,
ticker_id UUID NOT NULL REFERENCES market_data.tickers(id) ON DELETE CASCADE,
symbol VARCHAR(20) NOT NULL, -- Denormalizado para performance
ohlcv_id BIGINT REFERENCES market_data.ohlcv_5m(id),
-- Timestamp de referencia
timestamp TIMESTAMPTZ NOT NULL,
timeframe VARCHAR(10) NOT NULL DEFAULT '5m', -- '5m', '15m', '1h', '4h', '1d'
-- ============================================
-- MOVING AVERAGES
-- ============================================
sma_5 DECIMAL(15, 8),
sma_10 DECIMAL(15, 8),
sma_20 DECIMAL(15, 8),
sma_50 DECIMAL(15, 8),
sma_100 DECIMAL(15, 8),
sma_200 DECIMAL(15, 8),
ema_5 DECIMAL(15, 8),
ema_10 DECIMAL(15, 8),
ema_20 DECIMAL(15, 8),
ema_50 DECIMAL(15, 8),
ema_100 DECIMAL(15, 8),
ema_200 DECIMAL(15, 8),
-- ============================================
-- MOMENTUM INDICATORS
-- ============================================
rsi_14 DECIMAL(10, 6), -- Relative Strength Index
rsi_7 DECIMAL(10, 6),
rsi_21 DECIMAL(10, 6),
stoch_k DECIMAL(10, 6), -- Stochastic %K
stoch_d DECIMAL(10, 6), -- Stochastic %D
cci_14 DECIMAL(15, 6), -- Commodity Channel Index
cci_20 DECIMAL(15, 6),
williams_r DECIMAL(10, 6), -- Williams %R
roc_10 DECIMAL(15, 8), -- Rate of Change
momentum_10 DECIMAL(15, 8),
-- ============================================
-- VOLATILITY INDICATORS
-- ============================================
atr_14 DECIMAL(15, 8), -- Average True Range
atr_7 DECIMAL(15, 8),
atr_21 DECIMAL(15, 8),
-- Bollinger Bands
bb_upper DECIMAL(15, 8),
bb_middle DECIMAL(15, 8),
bb_lower DECIMAL(15, 8),
bb_width DECIMAL(15, 8),
bb_percent DECIMAL(10, 6), -- Position within bands
-- Keltner Channels
kc_upper DECIMAL(15, 8),
kc_middle DECIMAL(15, 8),
kc_lower DECIMAL(15, 8),
-- ============================================
-- TREND INDICATORS
-- ============================================
macd_line DECIMAL(15, 8), -- MACD Line
macd_signal DECIMAL(15, 8), -- Signal Line
macd_histogram DECIMAL(15, 8), -- Histogram
adx_14 DECIMAL(10, 6), -- Average Directional Index
plus_di DECIMAL(10, 6), -- +DI
minus_di DECIMAL(10, 6), -- -DI
aroon_up DECIMAL(10, 6),
aroon_down DECIMAL(10, 6),
aroon_oscillator DECIMAL(10, 6),
-- ============================================
-- VOLUME INDICATORS
-- ============================================
obv BIGINT, -- On-Balance Volume
vwap DECIMAL(15, 8), -- Volume Weighted Average Price
mfi_14 DECIMAL(10, 6), -- Money Flow Index
ad_line DECIMAL(20, 8), -- Accumulation/Distribution
-- ============================================
-- SUPPORT/RESISTANCE
-- ============================================
pivot_point DECIMAL(15, 8),
pivot_r1 DECIMAL(15, 8),
pivot_r2 DECIMAL(15, 8),
pivot_r3 DECIMAL(15, 8),
pivot_s1 DECIMAL(15, 8),
pivot_s2 DECIMAL(15, 8),
pivot_s3 DECIMAL(15, 8),
-- ============================================
-- ICT/SMC CONCEPTS
-- ============================================
fair_value_gap_high DECIMAL(15, 8), -- FVG upper
fair_value_gap_low DECIMAL(15, 8), -- FVG lower
order_block_high DECIMAL(15, 8), -- OB upper
order_block_low DECIMAL(15, 8), -- OB lower
liquidity_level DECIMAL(15, 8), -- Identified liquidity
-- ============================================
-- SIGNALS AND PATTERNS
-- ============================================
trend_direction VARCHAR(10), -- 'bullish', 'bearish', 'neutral'
trend_strength DECIMAL(10, 6), -- 0-100
-- Crossover signals
ma_cross_signal VARCHAR(20), -- 'golden_cross', 'death_cross', 'none'
macd_cross_signal VARCHAR(20), -- 'bullish', 'bearish', 'none'
rsi_signal VARCHAR(20), -- 'overbought', 'oversold', 'neutral'
bb_signal VARCHAR(20), -- 'upper_touch', 'lower_touch', 'squeeze', 'none'
-- Pattern detection (JSON for flexibility)
detected_patterns JSONB DEFAULT '[]'::JSONB,
-- ============================================
-- ML FEATURES
-- ============================================
ml_features JSONB DEFAULT '{}'::JSONB, -- Pre-computed ML features
-- ============================================
-- METADATA
-- ============================================
calculation_version VARCHAR(20) DEFAULT '1.0',
calculated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT ti_unique UNIQUE (ticker_id, timestamp, timeframe)
);
COMMENT ON TABLE market_data.technical_indicators IS
'Indicadores tecnicos pre-calculados por vela para analisis y ML';
COMMENT ON COLUMN market_data.technical_indicators.ml_features IS
'Features adicionales pre-calculadas para modelos ML';
-- ============================================================================
-- INDICES
-- ============================================================================
CREATE INDEX IF NOT EXISTS idx_ti_symbol_time
ON market_data.technical_indicators(symbol, timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_ti_ticker_timeframe
ON market_data.technical_indicators(ticker_id, timeframe, timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_ti_timestamp
ON market_data.technical_indicators(timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_ti_trend
ON market_data.technical_indicators(symbol, trend_direction)
WHERE trend_direction IS NOT NULL;
-- BRIN index para timestamp (eficiente en tablas grandes)
CREATE INDEX IF NOT EXISTS idx_ti_timestamp_brin
ON market_data.technical_indicators USING BRIN (timestamp);
-- GIN index para patterns
CREATE INDEX IF NOT EXISTS idx_ti_patterns_gin
ON market_data.technical_indicators USING GIN (detected_patterns);
-- ============================================================================
-- FUNCIONES
-- ============================================================================
-- Funcion para obtener indicadores mas recientes
CREATE OR REPLACE FUNCTION market_data.get_latest_indicators(
p_symbol VARCHAR(20),
p_timeframe VARCHAR(10) DEFAULT '5m'
)
RETURNS SETOF market_data.technical_indicators AS $$
BEGIN
RETURN QUERY
SELECT *
FROM market_data.technical_indicators
WHERE symbol = p_symbol
AND timeframe = p_timeframe
ORDER BY timestamp DESC
LIMIT 1;
END;
$$ LANGUAGE plpgsql;
-- Funcion para obtener resumen de indicadores
CREATE OR REPLACE FUNCTION market_data.get_indicator_summary(
p_symbol VARCHAR(20),
p_timeframe VARCHAR(10) DEFAULT '5m'
)
RETURNS TABLE (
symbol VARCHAR(20),
timestamp TIMESTAMPTZ,
trend_direction VARCHAR(10),
rsi_14 DECIMAL(10, 6),
macd_histogram DECIMAL(15, 8),
bb_percent DECIMAL(10, 6),
adx_14 DECIMAL(10, 6),
atr_14 DECIMAL(15, 8)
) AS $$
BEGIN
RETURN QUERY
SELECT
ti.symbol,
ti.timestamp,
ti.trend_direction,
ti.rsi_14,
ti.macd_histogram,
ti.bb_percent,
ti.adx_14,
ti.atr_14
FROM market_data.technical_indicators ti
WHERE ti.symbol = p_symbol
AND ti.timeframe = p_timeframe
ORDER BY ti.timestamp DESC
LIMIT 1;
END;
$$ LANGUAGE plpgsql;
-- ============================================================================
-- VISTAS
-- ============================================================================
-- Vista de indicadores mas recientes por simbolo
CREATE OR REPLACE VIEW market_data.v_latest_indicators AS
SELECT DISTINCT ON (symbol, timeframe)
symbol,
timeframe,
timestamp,
trend_direction,
trend_strength,
rsi_14,
macd_histogram,
adx_14,
bb_percent,
rsi_signal,
macd_cross_signal
FROM market_data.technical_indicators
ORDER BY symbol, timeframe, timestamp DESC;
-- Vista de senales activas
CREATE OR REPLACE VIEW market_data.v_active_signals AS
SELECT
symbol,
timeframe,
timestamp,
trend_direction,
rsi_signal,
macd_cross_signal,
ma_cross_signal,
bb_signal
FROM market_data.technical_indicators
WHERE timestamp > NOW() - INTERVAL '1 hour'
AND (
rsi_signal IN ('overbought', 'oversold')
OR macd_cross_signal IN ('bullish', 'bearish')
OR ma_cross_signal IN ('golden_cross', 'death_cross')
)
ORDER BY timestamp DESC;
-- ============================================================================
-- GRANTS
-- ============================================================================
GRANT SELECT, INSERT, UPDATE, DELETE ON market_data.technical_indicators TO trading_app;
GRANT SELECT ON market_data.technical_indicators TO trading_readonly;
GRANT USAGE, SELECT ON SEQUENCE market_data.technical_indicators_id_seq TO trading_app;
GRANT SELECT ON market_data.v_latest_indicators TO trading_app;
GRANT SELECT ON market_data.v_active_signals TO trading_app;
GRANT EXECUTE ON FUNCTION market_data.get_latest_indicators TO trading_app;
GRANT EXECUTE ON FUNCTION market_data.get_indicator_summary TO trading_app;

View File

@ -0,0 +1,391 @@
-- ============================================================================
-- SCHEMA: market_data
-- TABLE: ohlcv_5m_staging
-- DESCRIPTION: Tabla staging para ingesta de datos OHLCV desde APIs externas
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Enum para estado de procesamiento
DO $$ BEGIN
CREATE TYPE market_data.staging_status AS ENUM (
'pending', -- Pendiente de procesar
'processing', -- En proceso
'processed', -- Procesado exitosamente
'error', -- Error en procesamiento
'duplicate', -- Duplicado detectado
'invalid' -- Datos invalidos
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Tabla de Staging para OHLCV
-- Esta tabla recibe datos crudos de APIs externas (Polygon, brokers, etc)
-- y los procesa/valida antes de insertar en ohlcv_5m
CREATE TABLE IF NOT EXISTS market_data.ohlcv_5m_staging (
-- Identificadores
id BIGSERIAL PRIMARY KEY,
batch_id UUID NOT NULL DEFAULT gen_random_uuid(), -- ID del lote de importacion
-- Datos crudos
raw_symbol VARCHAR(50) NOT NULL, -- Simbolo como viene del proveedor
normalized_symbol VARCHAR(20), -- Simbolo normalizado (EURUSD)
ticker_id UUID, -- FK a tickers (NULL hasta normalizar)
-- Timestamp raw
raw_timestamp VARCHAR(50), -- Timestamp como viene del proveedor
parsed_timestamp TIMESTAMPTZ, -- Timestamp parseado
-- Precios OHLCV (raw - pueden venir como strings)
raw_open VARCHAR(50),
raw_high VARCHAR(50),
raw_low VARCHAR(50),
raw_close VARCHAR(50),
raw_volume VARCHAR(50),
-- Precios parseados
open DECIMAL(15, 8),
high DECIMAL(15, 8),
low DECIMAL(15, 8),
close DECIMAL(15, 8),
volume BIGINT,
-- Metadata del proveedor
source VARCHAR(20) NOT NULL, -- 'polygon', 'broker', 'csv', 'manual'
source_id VARCHAR(100), -- ID en el sistema origen
api_response_id VARCHAR(100), -- ID de respuesta de API
-- Estado de procesamiento
status market_data.staging_status NOT NULL DEFAULT 'pending',
error_message TEXT,
validation_errors JSONB DEFAULT '[]'::JSONB,
-- Procesamiento
processed_at TIMESTAMPTZ,
target_ohlcv_id BIGINT, -- ID del registro creado en ohlcv_5m
-- Deduplicacion
data_hash VARCHAR(64), -- Hash para detectar duplicados
is_duplicate BOOLEAN NOT NULL DEFAULT FALSE,
duplicate_of_id BIGINT,
-- Metadata
metadata JSONB DEFAULT '{}'::JSONB,
raw_data JSONB, -- Datos originales completos
-- Timestamps
received_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
COMMENT ON TABLE market_data.ohlcv_5m_staging IS
'Tabla staging para ingesta y validacion de datos OHLCV antes de insertar en ohlcv_5m';
COMMENT ON COLUMN market_data.ohlcv_5m_staging.batch_id IS
'ID del lote de importacion para agrupar registros del mismo request';
COMMENT ON COLUMN market_data.ohlcv_5m_staging.data_hash IS
'Hash SHA256 de symbol+timestamp+ohlcv para detectar duplicados';
-- ============================================================================
-- INDICES
-- ============================================================================
CREATE INDEX IF NOT EXISTS idx_staging_batch
ON market_data.ohlcv_5m_staging(batch_id);
CREATE INDEX IF NOT EXISTS idx_staging_status
ON market_data.ohlcv_5m_staging(status)
WHERE status IN ('pending', 'processing', 'error');
CREATE INDEX IF NOT EXISTS idx_staging_symbol
ON market_data.ohlcv_5m_staging(normalized_symbol, parsed_timestamp);
CREATE INDEX IF NOT EXISTS idx_staging_received
ON market_data.ohlcv_5m_staging(received_at DESC);
CREATE INDEX IF NOT EXISTS idx_staging_hash
ON market_data.ohlcv_5m_staging(data_hash)
WHERE data_hash IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_staging_source
ON market_data.ohlcv_5m_staging(source, status);
-- ============================================================================
-- TRIGGERS
-- ============================================================================
-- Trigger para normalizar y validar datos al insertar
CREATE OR REPLACE FUNCTION market_data.normalize_staging_data()
RETURNS TRIGGER AS $$
DECLARE
v_ticker_id UUID;
v_hash TEXT;
BEGIN
-- Normalizar simbolo (remover prefijos/sufijos de broker)
NEW.normalized_symbol := UPPER(TRIM(
REGEXP_REPLACE(NEW.raw_symbol, '^(C:|oanda:)', '', 'i')
));
-- Buscar ticker_id
SELECT id INTO v_ticker_id
FROM market_data.tickers
WHERE symbol = NEW.normalized_symbol
OR broker_symbol = NEW.raw_symbol
OR polygon_ticker = NEW.raw_symbol
LIMIT 1;
NEW.ticker_id := v_ticker_id;
-- Parsear timestamp
BEGIN
IF NEW.raw_timestamp ~ '^\d+$' THEN
-- Unix timestamp (milliseconds o seconds)
IF LENGTH(NEW.raw_timestamp) > 10 THEN
NEW.parsed_timestamp := TO_TIMESTAMP(NEW.raw_timestamp::BIGINT / 1000.0);
ELSE
NEW.parsed_timestamp := TO_TIMESTAMP(NEW.raw_timestamp::BIGINT);
END IF;
ELSE
NEW.parsed_timestamp := NEW.raw_timestamp::TIMESTAMPTZ;
END IF;
EXCEPTION WHEN OTHERS THEN
NEW.validation_errors := NEW.validation_errors || jsonb_build_object(
'field', 'timestamp',
'error', 'Invalid timestamp format',
'value', NEW.raw_timestamp
);
END;
-- Parsear precios
BEGIN
NEW.open := NEW.raw_open::DECIMAL(15, 8);
NEW.high := NEW.raw_high::DECIMAL(15, 8);
NEW.low := NEW.raw_low::DECIMAL(15, 8);
NEW.close := NEW.raw_close::DECIMAL(15, 8);
NEW.volume := COALESCE(NEW.raw_volume::BIGINT, 0);
EXCEPTION WHEN OTHERS THEN
NEW.validation_errors := NEW.validation_errors || jsonb_build_object(
'field', 'prices',
'error', 'Invalid price format'
);
END;
-- Validar precios
IF NEW.open IS NOT NULL AND NEW.high IS NOT NULL AND NEW.low IS NOT NULL AND NEW.close IS NOT NULL THEN
IF NEW.high < NEW.low OR NEW.high < NEW.open OR NEW.high < NEW.close
OR NEW.low > NEW.open OR NEW.low > NEW.close THEN
NEW.validation_errors := NEW.validation_errors || jsonb_build_object(
'field', 'ohlc_consistency',
'error', 'OHLC values are inconsistent'
);
END IF;
END IF;
-- Calcular hash para deduplicacion
v_hash := encode(
sha256(
(COALESCE(NEW.normalized_symbol, '') ||
COALESCE(NEW.parsed_timestamp::TEXT, '') ||
COALESCE(NEW.open::TEXT, '') ||
COALESCE(NEW.high::TEXT, '') ||
COALESCE(NEW.low::TEXT, '') ||
COALESCE(NEW.close::TEXT, ''))::bytea
),
'hex'
);
NEW.data_hash := v_hash;
-- Verificar si es duplicado
IF EXISTS (
SELECT 1 FROM market_data.ohlcv_5m_staging
WHERE data_hash = v_hash AND id != NEW.id
) THEN
NEW.is_duplicate := TRUE;
NEW.status := 'duplicate';
SELECT id INTO NEW.duplicate_of_id
FROM market_data.ohlcv_5m_staging
WHERE data_hash = v_hash
LIMIT 1;
END IF;
-- Marcar como invalido si hay errores
IF jsonb_array_length(NEW.validation_errors) > 0 THEN
NEW.status := 'invalid';
END IF;
-- Marcar como invalido si no se encontro ticker
IF NEW.ticker_id IS NULL AND NEW.status = 'pending' THEN
NEW.validation_errors := NEW.validation_errors || jsonb_build_object(
'field', 'symbol',
'error', 'Unknown symbol',
'value', NEW.raw_symbol
);
NEW.status := 'invalid';
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS staging_normalize ON market_data.ohlcv_5m_staging;
CREATE TRIGGER staging_normalize
BEFORE INSERT ON market_data.ohlcv_5m_staging
FOR EACH ROW
EXECUTE FUNCTION market_data.normalize_staging_data();
-- ============================================================================
-- FUNCIONES DE PROCESAMIENTO
-- ============================================================================
-- Funcion para procesar registros pendientes en staging
CREATE OR REPLACE FUNCTION market_data.process_staging_batch(
p_batch_id UUID DEFAULT NULL,
p_limit INTEGER DEFAULT 1000
)
RETURNS TABLE (
processed INTEGER,
errors INTEGER,
duplicates INTEGER
) AS $$
DECLARE
v_staging RECORD;
v_processed INTEGER := 0;
v_errors INTEGER := 0;
v_duplicates INTEGER := 0;
v_ohlcv_id BIGINT;
BEGIN
FOR v_staging IN
SELECT * FROM market_data.ohlcv_5m_staging
WHERE status = 'pending'
AND (p_batch_id IS NULL OR batch_id = p_batch_id)
ORDER BY received_at
LIMIT p_limit
FOR UPDATE SKIP LOCKED
LOOP
BEGIN
-- Marcar como procesando
UPDATE market_data.ohlcv_5m_staging
SET status = 'processing'
WHERE id = v_staging.id;
-- Verificar duplicado en tabla principal
IF EXISTS (
SELECT 1 FROM market_data.ohlcv_5m
WHERE ticker_id = v_staging.ticker_id
AND timestamp = v_staging.parsed_timestamp
) THEN
UPDATE market_data.ohlcv_5m_staging
SET status = 'duplicate',
processed_at = NOW()
WHERE id = v_staging.id;
v_duplicates := v_duplicates + 1;
CONTINUE;
END IF;
-- Insertar en tabla principal
INSERT INTO market_data.ohlcv_5m (
ticker_id, symbol, timestamp,
open, high, low, close, volume,
source, is_complete
) VALUES (
v_staging.ticker_id,
v_staging.normalized_symbol,
v_staging.parsed_timestamp,
v_staging.open,
v_staging.high,
v_staging.low,
v_staging.close,
v_staging.volume,
v_staging.source,
TRUE
)
RETURNING id INTO v_ohlcv_id;
-- Marcar como procesado
UPDATE market_data.ohlcv_5m_staging
SET status = 'processed',
processed_at = NOW(),
target_ohlcv_id = v_ohlcv_id
WHERE id = v_staging.id;
v_processed := v_processed + 1;
EXCEPTION WHEN OTHERS THEN
UPDATE market_data.ohlcv_5m_staging
SET status = 'error',
error_message = SQLERRM,
processed_at = NOW()
WHERE id = v_staging.id;
v_errors := v_errors + 1;
END;
END LOOP;
RETURN QUERY SELECT v_processed, v_errors, v_duplicates;
END;
$$ LANGUAGE plpgsql;
-- Funcion para limpiar staging procesado
CREATE OR REPLACE FUNCTION market_data.cleanup_staging(
p_older_than_hours INTEGER DEFAULT 24
)
RETURNS INTEGER AS $$
DECLARE
v_count INTEGER;
BEGIN
DELETE FROM market_data.ohlcv_5m_staging
WHERE status IN ('processed', 'duplicate')
AND processed_at < NOW() - (p_older_than_hours || ' hours')::INTERVAL;
GET DIAGNOSTICS v_count = ROW_COUNT;
RETURN v_count;
END;
$$ LANGUAGE plpgsql;
-- ============================================================================
-- VISTAS
-- ============================================================================
-- Vista de estado de staging
CREATE OR REPLACE VIEW market_data.v_staging_status AS
SELECT
status,
source,
COUNT(*) AS count,
MIN(received_at) AS oldest,
MAX(received_at) AS newest
FROM market_data.ohlcv_5m_staging
GROUP BY status, source
ORDER BY status, source;
-- Vista de errores recientes
CREATE OR REPLACE VIEW market_data.v_staging_errors AS
SELECT
id,
batch_id,
raw_symbol,
normalized_symbol,
source,
status,
error_message,
validation_errors,
received_at
FROM market_data.ohlcv_5m_staging
WHERE status IN ('error', 'invalid')
ORDER BY received_at DESC
LIMIT 100;
-- ============================================================================
-- GRANTS
-- ============================================================================
GRANT SELECT, INSERT, UPDATE, DELETE ON market_data.ohlcv_5m_staging TO trading_app;
GRANT SELECT ON market_data.ohlcv_5m_staging TO trading_readonly;
GRANT USAGE, SELECT ON SEQUENCE market_data.ohlcv_5m_staging_id_seq TO trading_app;
GRANT SELECT ON market_data.v_staging_status TO trading_app;
GRANT SELECT ON market_data.v_staging_errors TO trading_app;
GRANT EXECUTE ON FUNCTION market_data.process_staging_batch TO trading_app;
GRANT EXECUTE ON FUNCTION market_data.cleanup_staging TO trading_app;

View File

@ -0,0 +1,355 @@
-- ============================================================================
-- SCHEMA: notifications
-- TABLE: notifications
-- DESCRIPTION: Sistema de notificaciones del sistema
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Crear schema si no existe
CREATE SCHEMA IF NOT EXISTS notifications;
-- Grant usage
GRANT USAGE ON SCHEMA notifications TO trading_app;
GRANT USAGE ON SCHEMA notifications TO trading_readonly;
-- Enum para tipo de notificacion
DO $$ BEGIN
CREATE TYPE notifications.notification_type AS ENUM (
'info', -- Informativa general
'success', -- Accion exitosa
'warning', -- Advertencia
'error', -- Error
'alert', -- Alerta importante
'trade', -- Relacionada a trading
'signal', -- Senal de trading
'payment', -- Pago/transaccion
'account', -- Cuenta de usuario
'security', -- Seguridad
'system', -- Sistema/mantenimiento
'promotion' -- Promocional
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Enum para prioridad
DO $$ BEGIN
CREATE TYPE notifications.notification_priority AS ENUM (
'low',
'normal',
'high',
'critical'
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Enum para canal de entrega
DO $$ BEGIN
CREATE TYPE notifications.delivery_channel AS ENUM (
'in_app', -- Notificacion en app
'email', -- Email
'push', -- Push notification
'sms', -- SMS
'webhook' -- Webhook externo
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Tabla principal de Notificaciones
CREATE TABLE IF NOT EXISTS notifications.notifications (
-- Identificadores
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES users.users(id) ON DELETE CASCADE,
-- Clasificacion
type notifications.notification_type NOT NULL DEFAULT 'info',
priority notifications.notification_priority NOT NULL DEFAULT 'normal',
category VARCHAR(50), -- Subcategoria libre
-- Contenido
title VARCHAR(255) NOT NULL,
message TEXT NOT NULL,
message_html TEXT, -- Version HTML del mensaje
-- Datos estructurados
data JSONB DEFAULT '{}'::JSONB, -- Datos adicionales para la UI
action_url TEXT, -- URL de accion principal
action_label VARCHAR(100), -- Texto del boton de accion
actions JSONB, -- Array de acciones multiples
-- Estado
is_read BOOLEAN NOT NULL DEFAULT FALSE,
read_at TIMESTAMPTZ,
is_archived BOOLEAN NOT NULL DEFAULT FALSE,
archived_at TIMESTAMPTZ,
is_deleted BOOLEAN NOT NULL DEFAULT FALSE,
deleted_at TIMESTAMPTZ,
-- Entrega multicanal
channels notifications.delivery_channel[] NOT NULL DEFAULT ARRAY['in_app']::notifications.delivery_channel[],
delivery_status JSONB DEFAULT '{}'::JSONB, -- Estado por canal
-- Tracking de emails
email_sent BOOLEAN NOT NULL DEFAULT FALSE,
email_sent_at TIMESTAMPTZ,
email_opened BOOLEAN NOT NULL DEFAULT FALSE,
email_opened_at TIMESTAMPTZ,
email_clicked BOOLEAN NOT NULL DEFAULT FALSE,
email_clicked_at TIMESTAMPTZ,
-- Push notifications
push_sent BOOLEAN NOT NULL DEFAULT FALSE,
push_sent_at TIMESTAMPTZ,
push_delivered BOOLEAN NOT NULL DEFAULT FALSE,
push_delivered_at TIMESTAMPTZ,
-- Agrupacion
group_key VARCHAR(100), -- Agrupar notificaciones similares
group_count INTEGER DEFAULT 1, -- Contador de agrupacion
-- Programacion
scheduled_for TIMESTAMPTZ, -- Envio programado
expires_at TIMESTAMPTZ, -- Expiracion automatica
-- Origen
source VARCHAR(50) NOT NULL DEFAULT 'system', -- 'system', 'user', 'bot', 'webhook'
source_id VARCHAR(255), -- ID del recurso origen
source_type VARCHAR(50), -- Tipo de recurso origen
-- Metadata
metadata JSONB DEFAULT '{}'::JSONB,
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
COMMENT ON TABLE notifications.notifications IS
'Notificaciones del sistema para usuarios, soporta multicanal';
COMMENT ON COLUMN notifications.notifications.channels IS
'Canales de entrega: in_app, email, push, sms, webhook';
COMMENT ON COLUMN notifications.notifications.group_key IS
'Clave para agrupar notificaciones similares y evitar spam';
-- Indices para queries comunes
CREATE INDEX IF NOT EXISTS idx_notifications_user_unread
ON notifications.notifications(user_id, created_at DESC)
WHERE is_read = FALSE AND is_deleted = FALSE;
CREATE INDEX IF NOT EXISTS idx_notifications_user_all
ON notifications.notifications(user_id, created_at DESC)
WHERE is_deleted = FALSE;
CREATE INDEX IF NOT EXISTS idx_notifications_tenant
ON notifications.notifications(tenant_id, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_notifications_type
ON notifications.notifications(type);
CREATE INDEX IF NOT EXISTS idx_notifications_priority
ON notifications.notifications(priority)
WHERE priority IN ('high', 'critical');
CREATE INDEX IF NOT EXISTS idx_notifications_scheduled
ON notifications.notifications(scheduled_for)
WHERE scheduled_for IS NOT NULL AND scheduled_for > NOW();
CREATE INDEX IF NOT EXISTS idx_notifications_expires
ON notifications.notifications(expires_at)
WHERE expires_at IS NOT NULL AND is_deleted = FALSE;
CREATE INDEX IF NOT EXISTS idx_notifications_group
ON notifications.notifications(user_id, group_key, created_at DESC)
WHERE group_key IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_notifications_source
ON notifications.notifications(source, source_type, source_id);
-- GIN index para busqueda en datos JSONB
CREATE INDEX IF NOT EXISTS idx_notifications_data_gin
ON notifications.notifications USING GIN (data);
-- Trigger para updated_at
CREATE OR REPLACE FUNCTION notifications.update_notification_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS notification_updated_at ON notifications.notifications;
CREATE TRIGGER notification_updated_at
BEFORE UPDATE ON notifications.notifications
FOR EACH ROW
EXECUTE FUNCTION notifications.update_notification_timestamp();
-- Trigger para marcar read_at cuando is_read cambia a true
CREATE OR REPLACE FUNCTION notifications.set_read_timestamp()
RETURNS TRIGGER AS $$
BEGIN
IF NEW.is_read = TRUE AND OLD.is_read = FALSE THEN
NEW.read_at = NOW();
END IF;
IF NEW.is_archived = TRUE AND OLD.is_archived = FALSE THEN
NEW.archived_at = NOW();
END IF;
IF NEW.is_deleted = TRUE AND OLD.is_deleted = FALSE THEN
NEW.deleted_at = NOW();
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS notification_status_timestamps ON notifications.notifications;
CREATE TRIGGER notification_status_timestamps
BEFORE UPDATE ON notifications.notifications
FOR EACH ROW
EXECUTE FUNCTION notifications.set_read_timestamp();
-- Funcion para crear notificacion
CREATE OR REPLACE FUNCTION notifications.create_notification(
p_tenant_id UUID,
p_user_id UUID,
p_type notifications.notification_type,
p_title VARCHAR(255),
p_message TEXT,
p_priority notifications.notification_priority DEFAULT 'normal',
p_channels notifications.delivery_channel[] DEFAULT ARRAY['in_app']::notifications.delivery_channel[],
p_data JSONB DEFAULT '{}'::JSONB,
p_action_url TEXT DEFAULT NULL,
p_source VARCHAR(50) DEFAULT 'system',
p_source_type VARCHAR(50) DEFAULT NULL,
p_source_id VARCHAR(255) DEFAULT NULL,
p_group_key VARCHAR(100) DEFAULT NULL,
p_scheduled_for TIMESTAMPTZ DEFAULT NULL,
p_expires_at TIMESTAMPTZ DEFAULT NULL
)
RETURNS UUID AS $$
DECLARE
v_notification_id UUID;
BEGIN
INSERT INTO notifications.notifications (
tenant_id, user_id, type, title, message, priority,
channels, data, action_url, source, source_type, source_id,
group_key, scheduled_for, expires_at
) VALUES (
p_tenant_id, p_user_id, p_type, p_title, p_message, p_priority,
p_channels, p_data, p_action_url, p_source, p_source_type, p_source_id,
p_group_key, p_scheduled_for, p_expires_at
)
RETURNING id INTO v_notification_id;
RETURN v_notification_id;
END;
$$ LANGUAGE plpgsql;
-- Funcion para marcar como leidas por lotes
CREATE OR REPLACE FUNCTION notifications.mark_all_read(
p_user_id UUID,
p_before TIMESTAMPTZ DEFAULT NULL
)
RETURNS INTEGER AS $$
DECLARE
v_count INTEGER;
BEGIN
UPDATE notifications.notifications
SET is_read = TRUE,
read_at = NOW()
WHERE user_id = p_user_id
AND is_read = FALSE
AND is_deleted = FALSE
AND (p_before IS NULL OR created_at <= p_before);
GET DIAGNOSTICS v_count = ROW_COUNT;
RETURN v_count;
END;
$$ LANGUAGE plpgsql;
-- Funcion para limpiar notificaciones expiradas
CREATE OR REPLACE FUNCTION notifications.cleanup_expired()
RETURNS INTEGER AS $$
DECLARE
v_count INTEGER;
BEGIN
UPDATE notifications.notifications
SET is_deleted = TRUE,
deleted_at = NOW()
WHERE expires_at IS NOT NULL
AND expires_at < NOW()
AND is_deleted = FALSE;
GET DIAGNOSTICS v_count = ROW_COUNT;
RETURN v_count;
END;
$$ LANGUAGE plpgsql;
-- Vista de notificaciones no leidas
CREATE OR REPLACE VIEW notifications.v_unread_notifications AS
SELECT
n.id,
n.tenant_id,
n.user_id,
n.type,
n.priority,
n.title,
n.message,
n.data,
n.action_url,
n.source,
n.created_at
FROM notifications.notifications n
WHERE n.is_read = FALSE
AND n.is_deleted = FALSE
AND (n.expires_at IS NULL OR n.expires_at > NOW())
AND (n.scheduled_for IS NULL OR n.scheduled_for <= NOW())
ORDER BY
CASE n.priority
WHEN 'critical' THEN 1
WHEN 'high' THEN 2
WHEN 'normal' THEN 3
WHEN 'low' THEN 4
END,
n.created_at DESC;
-- Vista de conteo por tipo para dashboard
CREATE OR REPLACE VIEW notifications.v_notification_counts AS
SELECT
user_id,
COUNT(*) FILTER (WHERE is_read = FALSE) AS unread_count,
COUNT(*) FILTER (WHERE priority = 'critical' AND is_read = FALSE) AS critical_count,
COUNT(*) FILTER (WHERE priority = 'high' AND is_read = FALSE) AS high_priority_count,
COUNT(*) FILTER (WHERE type = 'trade' AND is_read = FALSE) AS trade_count,
COUNT(*) FILTER (WHERE type = 'signal' AND is_read = FALSE) AS signal_count
FROM notifications.notifications
WHERE is_deleted = FALSE
AND (expires_at IS NULL OR expires_at > NOW())
GROUP BY user_id;
-- RLS Policy para multi-tenancy
ALTER TABLE notifications.notifications ENABLE ROW LEVEL SECURITY;
CREATE POLICY notifications_tenant_isolation ON notifications.notifications
FOR ALL
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
-- Los usuarios solo pueden ver sus propias notificaciones
CREATE POLICY notifications_user_isolation ON notifications.notifications
FOR SELECT
USING (user_id = current_setting('app.current_user_id', true)::UUID);
-- Grants
GRANT SELECT, INSERT, UPDATE ON notifications.notifications TO trading_app;
GRANT SELECT ON notifications.notifications TO trading_readonly;
GRANT SELECT ON notifications.v_unread_notifications TO trading_app;
GRANT SELECT ON notifications.v_notification_counts TO trading_app;
GRANT EXECUTE ON FUNCTION notifications.create_notification TO trading_app;
GRANT EXECUTE ON FUNCTION notifications.mark_all_read TO trading_app;
GRANT EXECUTE ON FUNCTION notifications.cleanup_expired TO trading_app;

View File

@ -0,0 +1,128 @@
-- ============================================================================
-- SCHEMA: users
-- TABLE: profiles
-- DESCRIPTION: Informacion extendida del perfil de usuario
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Tabla de Perfiles extendidos
CREATE TABLE IF NOT EXISTS users.profiles (
-- Identificadores
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL UNIQUE REFERENCES users.users(id) ON DELETE CASCADE,
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
-- Informacion personal
bio TEXT,
date_of_birth DATE,
gender VARCHAR(20),
nationality VARCHAR(100),
country_of_residence VARCHAR(100),
city VARCHAR(100),
address TEXT,
postal_code VARCHAR(20),
-- Informacion profesional
occupation VARCHAR(100),
company_name VARCHAR(200),
annual_income_range VARCHAR(50), -- '0-25k', '25k-50k', '50k-100k', '100k-250k', '250k+'
source_of_funds VARCHAR(100), -- 'salary', 'business', 'investments', 'inheritance', 'other'
-- Documentacion
id_document_type VARCHAR(50), -- 'passport', 'national_id', 'drivers_license'
id_document_number VARCHAR(100),
id_document_expiry DATE,
id_document_country VARCHAR(100),
-- Redes sociales
social_links JSONB DEFAULT '{}'::JSONB, -- { "twitter": "@user", "linkedin": "..." }
-- Preferencias de comunicacion
preferred_contact_method VARCHAR(20) DEFAULT 'email', -- 'email', 'phone', 'sms'
timezone VARCHAR(50) DEFAULT 'America/New_York',
locale VARCHAR(10) DEFAULT 'es-MX',
-- Completitud del perfil
completion_percentage INTEGER NOT NULL DEFAULT 0 CHECK (completion_percentage BETWEEN 0 AND 100),
last_profile_update TIMESTAMPTZ,
-- Metadata
metadata JSONB DEFAULT '{}'::JSONB,
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
COMMENT ON TABLE users.profiles IS
'Informacion extendida del perfil de usuario, separada de credenciales';
COMMENT ON COLUMN users.profiles.completion_percentage IS
'Porcentaje de completitud del perfil (0-100)';
COMMENT ON COLUMN users.profiles.annual_income_range IS
'Rango de ingresos anuales para compliance KYC';
-- Indices
CREATE INDEX IF NOT EXISTS idx_profiles_user_id
ON users.profiles(user_id);
CREATE INDEX IF NOT EXISTS idx_profiles_tenant_id
ON users.profiles(tenant_id);
CREATE INDEX IF NOT EXISTS idx_profiles_completion
ON users.profiles(completion_percentage);
CREATE INDEX IF NOT EXISTS idx_profiles_country
ON users.profiles(country_of_residence);
-- Trigger para updated_at
DROP TRIGGER IF EXISTS profile_updated_at ON users.profiles;
CREATE TRIGGER profile_updated_at
BEFORE UPDATE ON users.profiles
FOR EACH ROW
EXECUTE FUNCTION users.update_user_timestamp();
-- Trigger para calcular completion_percentage
CREATE OR REPLACE FUNCTION users.calculate_profile_completion()
RETURNS TRIGGER AS $$
DECLARE
v_total_fields INTEGER := 10;
v_filled_fields INTEGER := 0;
BEGIN
IF NEW.bio IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.date_of_birth IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.nationality IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.country_of_residence IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.city IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.occupation IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.annual_income_range IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.source_of_funds IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.id_document_type IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
IF NEW.timezone IS NOT NULL THEN v_filled_fields := v_filled_fields + 1; END IF;
NEW.completion_percentage := (v_filled_fields * 100) / v_total_fields;
NEW.last_profile_update := NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS profile_completion_calc ON users.profiles;
CREATE TRIGGER profile_completion_calc
BEFORE INSERT OR UPDATE ON users.profiles
FOR EACH ROW
EXECUTE FUNCTION users.calculate_profile_completion();
-- RLS Policy para multi-tenancy
ALTER TABLE users.profiles ENABLE ROW LEVEL SECURITY;
CREATE POLICY profiles_tenant_isolation ON users.profiles
FOR ALL
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
-- Grants
GRANT SELECT, INSERT, UPDATE ON users.profiles TO trading_app;
GRANT SELECT ON users.profiles TO trading_readonly;

View File

@ -0,0 +1,141 @@
-- ============================================================================
-- SCHEMA: users
-- TABLE: user_settings
-- DESCRIPTION: Configuraciones y preferencias de usuario
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Tabla de Configuraciones de Usuario
CREATE TABLE IF NOT EXISTS users.user_settings (
-- Identificadores
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL UNIQUE REFERENCES users.users(id) ON DELETE CASCADE,
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
-- Configuraciones de UI
ui_settings JSONB NOT NULL DEFAULT '{
"theme": "dark",
"sidebar_collapsed": false,
"dashboard_layout": "default",
"charts_theme": "dark",
"compact_mode": false,
"animations_enabled": true
}'::JSONB,
-- Configuraciones de Trading
trading_settings JSONB NOT NULL DEFAULT '{
"default_leverage": 1,
"risk_per_trade_percent": 2,
"default_stop_loss_pips": 50,
"default_take_profit_pips": 100,
"confirmation_required": true,
"one_click_trading": false,
"sound_enabled": true
}'::JSONB,
-- Configuraciones de Notificaciones
notification_settings JSONB NOT NULL DEFAULT '{
"email": {
"enabled": true,
"signals": true,
"trades": true,
"account": true,
"marketing": false,
"digest": "daily"
},
"push": {
"enabled": true,
"signals": true,
"trades": true,
"price_alerts": true
},
"sms": {
"enabled": false,
"critical_only": true
}
}'::JSONB,
-- Configuraciones de Privacidad
privacy_settings JSONB NOT NULL DEFAULT '{
"profile_visibility": "private",
"show_in_leaderboard": false,
"share_performance": false,
"allow_follow": false
}'::JSONB,
-- Configuraciones de Seguridad
security_settings JSONB NOT NULL DEFAULT '{
"session_timeout_minutes": 30,
"remember_devices": true,
"login_notifications": true,
"suspicious_activity_alerts": true
}'::JSONB,
-- Configuraciones de Datos de Mercado
market_data_settings JSONB NOT NULL DEFAULT '{
"default_symbols": ["EURUSD", "GBPUSD", "XAUUSD"],
"default_timeframe": "H1",
"price_decimals": 5,
"volume_display": "lots"
}'::JSONB,
-- Preferencias de reportes
report_settings JSONB NOT NULL DEFAULT '{
"default_period": "monthly",
"include_closed_trades": true,
"currency_display": "USD"
}'::JSONB,
-- Metadata
metadata JSONB DEFAULT '{}'::JSONB,
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
COMMENT ON TABLE users.user_settings IS
'Configuraciones y preferencias personalizables del usuario';
COMMENT ON COLUMN users.user_settings.ui_settings IS
'Configuraciones de interfaz: tema, layouts, preferencias visuales';
COMMENT ON COLUMN users.user_settings.trading_settings IS
'Configuraciones de trading: leverage, risk, confirmaciones';
COMMENT ON COLUMN users.user_settings.notification_settings IS
'Preferencias de notificaciones por canal';
-- Indices
CREATE INDEX IF NOT EXISTS idx_user_settings_user_id
ON users.user_settings(user_id);
CREATE INDEX IF NOT EXISTS idx_user_settings_tenant_id
ON users.user_settings(tenant_id);
-- GIN index para busquedas en JSONB
CREATE INDEX IF NOT EXISTS idx_user_settings_ui_gin
ON users.user_settings USING GIN (ui_settings);
CREATE INDEX IF NOT EXISTS idx_user_settings_notif_gin
ON users.user_settings USING GIN (notification_settings);
-- Trigger para updated_at
DROP TRIGGER IF EXISTS user_settings_updated_at ON users.user_settings;
CREATE TRIGGER user_settings_updated_at
BEFORE UPDATE ON users.user_settings
FOR EACH ROW
EXECUTE FUNCTION users.update_user_timestamp();
-- RLS Policy para multi-tenancy
ALTER TABLE users.user_settings ENABLE ROW LEVEL SECURITY;
CREATE POLICY user_settings_tenant_isolation ON users.user_settings
FOR ALL
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
-- Grants
GRANT SELECT, INSERT, UPDATE ON users.user_settings TO trading_app;
GRANT SELECT ON users.user_settings TO trading_readonly;

View File

@ -0,0 +1,230 @@
-- ============================================================================
-- SCHEMA: users
-- TABLE: kyc_verifications
-- DESCRIPTION: Verificacion de identidad KYC (Know Your Customer)
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Enum para estado de verificacion KYC
DO $$ BEGIN
CREATE TYPE users.kyc_status AS ENUM (
'not_started', -- No ha iniciado proceso
'pending', -- Documentos enviados, pendiente revision
'under_review', -- En proceso de revision manual
'approved', -- Aprobado completamente
'rejected', -- Rechazado (puede reintentar)
'expired', -- Verificacion expirada, requiere re-verificacion
'suspended' -- Suspendido por actividad sospechosa
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Enum para nivel de verificacion
DO $$ BEGIN
CREATE TYPE users.kyc_level AS ENUM (
'none', -- Sin verificacion
'basic', -- Email + telefono verificado
'standard', -- Documento ID verificado
'enhanced', -- ID + prueba de direccion
'full' -- Verificacion completa con video
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Tabla de Verificaciones KYC
CREATE TABLE IF NOT EXISTS users.kyc_verifications (
-- Identificadores
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES users.users(id) ON DELETE CASCADE,
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
-- Estado actual
status users.kyc_status NOT NULL DEFAULT 'not_started',
level users.kyc_level NOT NULL DEFAULT 'none',
-- Verificacion de Email
email_verified BOOLEAN NOT NULL DEFAULT FALSE,
email_verified_at TIMESTAMPTZ,
email_verification_method VARCHAR(50), -- 'code', 'link'
-- Verificacion de Telefono
phone_verified BOOLEAN NOT NULL DEFAULT FALSE,
phone_verified_at TIMESTAMPTZ,
phone_verification_method VARCHAR(50), -- 'sms', 'call'
-- Verificacion de Documento ID
id_document_verified BOOLEAN NOT NULL DEFAULT FALSE,
id_document_verified_at TIMESTAMPTZ,
id_document_type VARCHAR(50), -- 'passport', 'national_id', 'drivers_license'
id_document_number_hash VARCHAR(255), -- Hash del numero para verificacion
id_document_country VARCHAR(100),
id_document_expiry DATE,
id_document_front_url TEXT, -- URL segura al documento (encriptado)
id_document_back_url TEXT,
id_selfie_url TEXT, -- Selfie con documento
-- Verificacion de Direccion
address_verified BOOLEAN NOT NULL DEFAULT FALSE,
address_verified_at TIMESTAMPTZ,
address_document_type VARCHAR(50), -- 'utility_bill', 'bank_statement', 'tax_document'
address_document_url TEXT,
address_document_date DATE, -- Fecha del documento (max 3 meses)
-- Verificacion de Video (enhanced)
video_verified BOOLEAN NOT NULL DEFAULT FALSE,
video_verified_at TIMESTAMPTZ,
video_url TEXT,
video_liveness_score DECIMAL(5, 4), -- 0.0000 - 1.0000
-- Proveedor de verificacion externo
external_provider VARCHAR(50), -- 'sumsub', 'onfido', 'jumio'
external_verification_id VARCHAR(255),
external_status VARCHAR(50),
external_risk_score DECIMAL(5, 4),
-- Informacion de revision
reviewer_id UUID,
reviewed_at TIMESTAMPTZ,
review_notes TEXT,
rejection_reason TEXT,
rejection_codes JSONB, -- Array de codigos de rechazo
-- AML/PEP checks
aml_checked BOOLEAN NOT NULL DEFAULT FALSE,
aml_checked_at TIMESTAMPTZ,
aml_result VARCHAR(50), -- 'clear', 'match', 'potential_match'
aml_details JSONB,
pep_checked BOOLEAN NOT NULL DEFAULT FALSE,
pep_checked_at TIMESTAMPTZ,
pep_result VARCHAR(50),
pep_details JSONB,
sanctions_checked BOOLEAN NOT NULL DEFAULT FALSE,
sanctions_checked_at TIMESTAMPTZ,
sanctions_result VARCHAR(50),
sanctions_details JSONB,
-- Expiracion
expires_at TIMESTAMPTZ,
reminder_sent_at TIMESTAMPTZ,
-- Intentos
verification_attempts INTEGER NOT NULL DEFAULT 0,
max_attempts INTEGER NOT NULL DEFAULT 3,
locked_until TIMESTAMPTZ,
-- Metadata
ip_address INET,
user_agent TEXT,
device_fingerprint VARCHAR(255),
metadata JSONB DEFAULT '{}'::JSONB,
-- Timestamps
started_at TIMESTAMPTZ,
completed_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Constraints
CONSTRAINT kyc_unique_user UNIQUE (user_id)
);
COMMENT ON TABLE users.kyc_verifications IS
'Registro de verificacion KYC de usuarios para compliance regulatorio';
COMMENT ON COLUMN users.kyc_verifications.level IS
'Nivel de verificacion alcanzado: none, basic, standard, enhanced, full';
COMMENT ON COLUMN users.kyc_verifications.external_risk_score IS
'Score de riesgo del proveedor externo (0-1, menor es mejor)';
-- Indices
CREATE INDEX IF NOT EXISTS idx_kyc_user_id
ON users.kyc_verifications(user_id);
CREATE INDEX IF NOT EXISTS idx_kyc_tenant_id
ON users.kyc_verifications(tenant_id);
CREATE INDEX IF NOT EXISTS idx_kyc_status
ON users.kyc_verifications(status);
CREATE INDEX IF NOT EXISTS idx_kyc_level
ON users.kyc_verifications(level);
CREATE INDEX IF NOT EXISTS idx_kyc_expires_at
ON users.kyc_verifications(expires_at)
WHERE expires_at IS NOT NULL AND status = 'approved';
CREATE INDEX IF NOT EXISTS idx_kyc_pending_review
ON users.kyc_verifications(created_at)
WHERE status IN ('pending', 'under_review');
CREATE INDEX IF NOT EXISTS idx_kyc_aml_matches
ON users.kyc_verifications(tenant_id, aml_result)
WHERE aml_result IN ('match', 'potential_match');
-- Trigger para updated_at
DROP TRIGGER IF EXISTS kyc_updated_at ON users.kyc_verifications;
CREATE TRIGGER kyc_updated_at
BEFORE UPDATE ON users.kyc_verifications
FOR EACH ROW
EXECUTE FUNCTION users.update_user_timestamp();
-- Funcion para actualizar nivel de KYC automaticamente
CREATE OR REPLACE FUNCTION users.update_kyc_level()
RETURNS TRIGGER AS $$
BEGIN
-- Calcular nivel basado en verificaciones completadas
IF NEW.video_verified AND NEW.address_verified AND NEW.id_document_verified AND NEW.phone_verified AND NEW.email_verified THEN
NEW.level := 'full';
ELSIF NEW.address_verified AND NEW.id_document_verified AND NEW.phone_verified AND NEW.email_verified THEN
NEW.level := 'enhanced';
ELSIF NEW.id_document_verified AND (NEW.phone_verified OR NEW.email_verified) THEN
NEW.level := 'standard';
ELSIF NEW.email_verified AND NEW.phone_verified THEN
NEW.level := 'basic';
ELSE
NEW.level := 'none';
END IF;
-- Actualizar status si todas las verificaciones del nivel estan completas
IF NEW.level = 'full' AND OLD.status IN ('pending', 'under_review') THEN
NEW.status := 'approved';
NEW.completed_at := NOW();
-- Set expiration (typically 1-2 years)
NEW.expires_at := NOW() + INTERVAL '2 years';
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS kyc_level_update ON users.kyc_verifications;
CREATE TRIGGER kyc_level_update
BEFORE UPDATE ON users.kyc_verifications
FOR EACH ROW
EXECUTE FUNCTION users.update_kyc_level();
-- RLS Policy para multi-tenancy
ALTER TABLE users.kyc_verifications ENABLE ROW LEVEL SECURITY;
CREATE POLICY kyc_tenant_isolation ON users.kyc_verifications
FOR ALL
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
-- Policy especial para revisores (pueden ver todos los pendientes)
CREATE POLICY kyc_reviewer_access ON users.kyc_verifications
FOR SELECT
USING (
current_setting('app.is_kyc_reviewer', true)::boolean = true
AND status IN ('pending', 'under_review')
);
-- Grants
GRANT SELECT, INSERT, UPDATE ON users.kyc_verifications TO trading_app;
GRANT SELECT ON users.kyc_verifications TO trading_readonly;

View File

@ -0,0 +1,241 @@
-- ============================================================================
-- SCHEMA: users
-- TABLE: risk_profiles
-- DESCRIPTION: Perfil de riesgo del usuario para trading
-- VERSION: 1.0.0
-- CREATED: 2026-01-16
-- SPRINT: Sprint 1 - DDL Implementation Roadmap Q1-2026
-- ============================================================================
-- Enum para perfil de riesgo
DO $$ BEGIN
CREATE TYPE users.risk_tolerance AS ENUM (
'conservative', -- Bajo riesgo, preservacion de capital
'moderate', -- Riesgo moderado, balance crecimiento/seguridad
'aggressive', -- Alto riesgo, maximizar retornos
'speculative' -- Muy alto riesgo, trading activo
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Enum para experiencia de trading
DO $$ BEGIN
CREATE TYPE users.trading_experience AS ENUM (
'none', -- Sin experiencia
'beginner', -- Menos de 1 año
'intermediate', -- 1-3 años
'advanced', -- 3-5 años
'expert' -- 5+ años
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Tabla de Perfiles de Riesgo
CREATE TABLE IF NOT EXISTS users.risk_profiles (
-- Identificadores
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL UNIQUE REFERENCES users.users(id) ON DELETE CASCADE,
tenant_id UUID NOT NULL REFERENCES tenants.tenants(id) ON DELETE CASCADE,
-- Perfil de riesgo
risk_tolerance users.risk_tolerance NOT NULL DEFAULT 'moderate',
trading_experience users.trading_experience NOT NULL DEFAULT 'none',
-- Cuestionario de adecuacion (suitability)
questionnaire_completed BOOLEAN NOT NULL DEFAULT FALSE,
questionnaire_completed_at TIMESTAMPTZ,
questionnaire_version VARCHAR(20),
questionnaire_responses JSONB,
questionnaire_score INTEGER CHECK (questionnaire_score BETWEEN 0 AND 100),
-- Objetivos de inversion
investment_objectives JSONB DEFAULT '{
"primary_goal": "growth",
"time_horizon": "medium_term",
"liquidity_needs": "moderate"
}'::JSONB,
-- Situacion financiera
financial_situation JSONB DEFAULT '{
"net_worth_range": "not_specified",
"annual_income_range": "not_specified",
"liquid_assets_range": "not_specified",
"investable_amount_range": "not_specified"
}'::JSONB,
-- Conocimiento de productos
product_knowledge JSONB DEFAULT '{
"forex": "none",
"stocks": "none",
"options": "none",
"futures": "none",
"crypto": "none",
"leverage_products": "none"
}'::JSONB,
-- Historial de trading declarado
trading_history JSONB DEFAULT '{
"years_trading": 0,
"average_trades_per_month": 0,
"largest_single_trade": 0,
"has_professional_experience": false
}'::JSONB,
-- Limites basados en perfil
max_position_size_percent DECIMAL(5, 2) NOT NULL DEFAULT 5.00, -- % del capital por posicion
max_daily_loss_percent DECIMAL(5, 2) NOT NULL DEFAULT 3.00, -- % perdida diaria maxima
max_total_exposure_percent DECIMAL(5, 2) NOT NULL DEFAULT 50.00, -- % exposicion total
max_leverage INTEGER NOT NULL DEFAULT 10,
allowed_instruments JSONB DEFAULT '["forex_majors", "indices"]'::JSONB,
-- Score de riesgo calculado
calculated_risk_score INTEGER CHECK (calculated_risk_score BETWEEN 1 AND 10),
risk_score_factors JSONB,
last_risk_assessment TIMESTAMPTZ,
-- Warnings y restricciones
risk_warnings_acknowledged JSONB DEFAULT '[]'::JSONB,
trading_restrictions JSONB DEFAULT '[]'::JSONB,
requires_additional_disclosure BOOLEAN NOT NULL DEFAULT FALSE,
-- Clasificacion regulatoria
regulatory_classification VARCHAR(50) DEFAULT 'retail', -- 'retail', 'professional', 'eligible_counterparty'
classification_request_status VARCHAR(50),
classification_approved_at TIMESTAMPTZ,
-- Metadata
metadata JSONB DEFAULT '{}'::JSONB,
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
COMMENT ON TABLE users.risk_profiles IS
'Perfil de riesgo del usuario basado en cuestionario de adecuacion';
COMMENT ON COLUMN users.risk_profiles.risk_tolerance IS
'Tolerancia al riesgo declarada: conservative, moderate, aggressive, speculative';
COMMENT ON COLUMN users.risk_profiles.calculated_risk_score IS
'Score de riesgo calculado (1-10), donde 10 es maximo riesgo permitido';
COMMENT ON COLUMN users.risk_profiles.regulatory_classification IS
'Clasificacion segun MiFID II: retail, professional, eligible_counterparty';
-- Indices
CREATE INDEX IF NOT EXISTS idx_risk_profiles_user_id
ON users.risk_profiles(user_id);
CREATE INDEX IF NOT EXISTS idx_risk_profiles_tenant_id
ON users.risk_profiles(tenant_id);
CREATE INDEX IF NOT EXISTS idx_risk_profiles_tolerance
ON users.risk_profiles(risk_tolerance);
CREATE INDEX IF NOT EXISTS idx_risk_profiles_experience
ON users.risk_profiles(trading_experience);
CREATE INDEX IF NOT EXISTS idx_risk_profiles_classification
ON users.risk_profiles(regulatory_classification);
CREATE INDEX IF NOT EXISTS idx_risk_profiles_pending_questionnaire
ON users.risk_profiles(tenant_id, created_at)
WHERE questionnaire_completed = FALSE;
-- GIN index para busquedas en JSONB
CREATE INDEX IF NOT EXISTS idx_risk_profiles_knowledge_gin
ON users.risk_profiles USING GIN (product_knowledge);
-- Trigger para updated_at
DROP TRIGGER IF EXISTS risk_profile_updated_at ON users.risk_profiles;
CREATE TRIGGER risk_profile_updated_at
BEFORE UPDATE ON users.risk_profiles
FOR EACH ROW
EXECUTE FUNCTION users.update_user_timestamp();
-- Funcion para calcular risk score automaticamente
CREATE OR REPLACE FUNCTION users.calculate_risk_score()
RETURNS TRIGGER AS $$
DECLARE
v_score INTEGER := 5; -- Base score
v_experience_weight INTEGER;
v_tolerance_weight INTEGER;
BEGIN
-- Ajustar por experiencia
CASE NEW.trading_experience
WHEN 'none' THEN v_experience_weight := -2;
WHEN 'beginner' THEN v_experience_weight := -1;
WHEN 'intermediate' THEN v_experience_weight := 0;
WHEN 'advanced' THEN v_experience_weight := 1;
WHEN 'expert' THEN v_experience_weight := 2;
ELSE v_experience_weight := 0;
END CASE;
-- Ajustar por tolerancia al riesgo
CASE NEW.risk_tolerance
WHEN 'conservative' THEN v_tolerance_weight := -2;
WHEN 'moderate' THEN v_tolerance_weight := 0;
WHEN 'aggressive' THEN v_tolerance_weight := 2;
WHEN 'speculative' THEN v_tolerance_weight := 3;
ELSE v_tolerance_weight := 0;
END CASE;
-- Calcular score final
v_score := v_score + v_experience_weight + v_tolerance_weight;
-- Asegurar rango 1-10
v_score := GREATEST(1, LEAST(10, v_score));
NEW.calculated_risk_score := v_score;
NEW.last_risk_assessment := NOW();
-- Guardar factores del calculo
NEW.risk_score_factors := jsonb_build_object(
'base_score', 5,
'experience_weight', v_experience_weight,
'tolerance_weight', v_tolerance_weight,
'final_score', v_score
);
-- Ajustar limites basados en score
IF v_score <= 3 THEN
NEW.max_leverage := 5;
NEW.max_position_size_percent := 2.00;
NEW.max_daily_loss_percent := 1.00;
ELSIF v_score <= 5 THEN
NEW.max_leverage := 10;
NEW.max_position_size_percent := 5.00;
NEW.max_daily_loss_percent := 3.00;
ELSIF v_score <= 7 THEN
NEW.max_leverage := 20;
NEW.max_position_size_percent := 10.00;
NEW.max_daily_loss_percent := 5.00;
ELSE
NEW.max_leverage := 50;
NEW.max_position_size_percent := 15.00;
NEW.max_daily_loss_percent := 10.00;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS risk_score_calc ON users.risk_profiles;
CREATE TRIGGER risk_score_calc
BEFORE INSERT OR UPDATE OF risk_tolerance, trading_experience ON users.risk_profiles
FOR EACH ROW
EXECUTE FUNCTION users.calculate_risk_score();
-- RLS Policy para multi-tenancy
ALTER TABLE users.risk_profiles ENABLE ROW LEVEL SECURITY;
CREATE POLICY risk_profiles_tenant_isolation ON users.risk_profiles
FOR ALL
USING (tenant_id = current_setting('app.current_tenant_id', true)::UUID);
-- Grants
GRANT SELECT, INSERT, UPDATE ON users.risk_profiles TO trading_app;
GRANT SELECT ON users.risk_profiles TO trading_readonly;