local-llm-agent/apps/gateway/src/health/health.service.ts
Adrian Flores Cortes 3def230d58 Initial commit: local-llm-agent infrastructure project
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-02 16:42:45 -06:00

50 lines
1.3 KiB
TypeScript

import { Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import axios from 'axios';
@Injectable()
export class HealthService {
private readonly inferenceEngineUrl: string;
constructor(private configService: ConfigService) {
const host = this.configService.get('INFERENCE_HOST', 'localhost');
const port = this.configService.get('INFERENCE_PORT', '3161');
this.inferenceEngineUrl = `http://${host}:${port}`;
}
async getHealth() {
const inferenceStatus = await this.checkInferenceEngine();
return {
status: inferenceStatus ? 'healthy' : 'degraded',
model_loaded: inferenceStatus,
inference_engine: inferenceStatus ? 'connected' : 'disconnected',
timestamp: new Date().toISOString(),
version: '0.1.0',
};
}
async getReadiness() {
const inferenceStatus = await this.checkInferenceEngine();
return {
ready: inferenceStatus,
checks: {
inference_engine: inferenceStatus ? 'ready' : 'not_ready',
},
timestamp: new Date().toISOString(),
};
}
private async checkInferenceEngine(): Promise<boolean> {
try {
const response = await axios.get(`${this.inferenceEngineUrl}/health`, {
timeout: 5000,
});
return response.status === 200;
} catch {
return false;
}
}
}