local-llm-agent/apps/gateway/src/main.ts
Adrian Flores Cortes 3def230d58 Initial commit: local-llm-agent infrastructure project
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-02 16:42:45 -06:00

53 lines
2.0 KiB
TypeScript

import { NestFactory } from '@nestjs/core';
import { ValidationPipe } from '@nestjs/common';
import { SwaggerModule, DocumentBuilder } from '@nestjs/swagger';
import { AppModule } from './app.module';
async function bootstrap() {
const app = await NestFactory.create(AppModule);
// Enable CORS for agent access
app.enableCors({
origin: '*',
methods: 'GET,HEAD,PUT,PATCH,POST,DELETE',
credentials: true,
});
// Global validation pipe
app.useGlobalPipes(
new ValidationPipe({
whitelist: true,
transform: true,
forbidNonWhitelisted: true,
}),
);
// Swagger documentation
const config = new DocumentBuilder()
.setTitle('Local LLM Agent Gateway')
.setDescription('OpenAI-compatible API Gateway for local LLM inference')
.setVersion('0.1.0')
.addTag('openai', 'OpenAI-compatible endpoints')
.addTag('mcp', 'MCP Tools endpoints')
.addTag('health', 'Health check endpoints')
.build();
const document = SwaggerModule.createDocument(app, config);
SwaggerModule.setup('api', app, document);
const port = process.env.GATEWAY_PORT || 3160;
await app.listen(port);
console.log(`
╔════════════════════════════════════════════════════════════╗
║ Local LLM Agent Gateway ║
╠════════════════════════════════════════════════════════════╣
║ Status: Running ║
║ Port: ${port}
║ Swagger: http://localhost:${port}/api ║
╚════════════════════════════════════════════════════════════╝
`);
}
bootstrap();