- HERENCIA-SIMCO.md actualizado con directivas v3.7 y v3.8 - Actualizaciones de configuracion Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
270 lines
11 KiB
JavaScript
270 lines
11 KiB
JavaScript
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const testing_1 = require("@nestjs/testing");
|
|
const typeorm_1 = require("@nestjs/typeorm");
|
|
const common_1 = require("@nestjs/common");
|
|
const ai_service_1 = require("../services/ai.service");
|
|
const ai_config_entity_1 = require("../entities/ai-config.entity");
|
|
const ai_usage_entity_1 = require("../entities/ai-usage.entity");
|
|
const openrouter_client_1 = require("../clients/openrouter.client");
|
|
describe('AIService', () => {
|
|
let service;
|
|
let configRepo;
|
|
let usageRepo;
|
|
let openRouterClient;
|
|
const mockTenantId = '550e8400-e29b-41d4-a716-446655440001';
|
|
const mockUserId = '550e8400-e29b-41d4-a716-446655440002';
|
|
const mockConfig = {
|
|
id: 'config-001',
|
|
tenant_id: mockTenantId,
|
|
provider: ai_config_entity_1.AIProvider.OPENROUTER,
|
|
default_model: 'anthropic/claude-3-haiku',
|
|
temperature: 0.7,
|
|
max_tokens: 2048,
|
|
is_enabled: true,
|
|
system_prompt: 'You are a helpful assistant.',
|
|
allow_custom_prompts: true,
|
|
log_conversations: false,
|
|
settings: {},
|
|
created_at: new Date(),
|
|
updated_at: new Date(),
|
|
};
|
|
const mockUsage = {
|
|
id: 'usage-001',
|
|
tenant_id: mockTenantId,
|
|
user_id: mockUserId,
|
|
provider: ai_config_entity_1.AIProvider.OPENROUTER,
|
|
model: 'anthropic/claude-3-haiku',
|
|
status: ai_usage_entity_1.UsageStatus.COMPLETED,
|
|
input_tokens: 100,
|
|
output_tokens: 50,
|
|
cost_input: 0.000025,
|
|
cost_output: 0.0000625,
|
|
latency_ms: 500,
|
|
};
|
|
const mockChatResponse = {
|
|
id: 'gen-001',
|
|
model: 'anthropic/claude-3-haiku',
|
|
choices: [
|
|
{
|
|
index: 0,
|
|
message: { role: 'assistant', content: 'Hello! How can I help you?' },
|
|
finish_reason: 'stop',
|
|
},
|
|
],
|
|
usage: {
|
|
prompt_tokens: 100,
|
|
completion_tokens: 50,
|
|
total_tokens: 150,
|
|
},
|
|
created: Date.now(),
|
|
};
|
|
beforeEach(async () => {
|
|
const mockConfigRepo = {
|
|
findOne: jest.fn(),
|
|
create: jest.fn(),
|
|
save: jest.fn(),
|
|
};
|
|
const mockUsageRepo = {
|
|
findOne: jest.fn(),
|
|
create: jest.fn(),
|
|
save: jest.fn(),
|
|
findAndCount: jest.fn(),
|
|
createQueryBuilder: jest.fn(),
|
|
};
|
|
const mockOpenRouterClient = {
|
|
isReady: jest.fn(),
|
|
chatCompletion: jest.fn(),
|
|
getModels: jest.fn(),
|
|
calculateCost: jest.fn(),
|
|
};
|
|
const module = await testing_1.Test.createTestingModule({
|
|
providers: [
|
|
ai_service_1.AIService,
|
|
{ provide: (0, typeorm_1.getRepositoryToken)(ai_config_entity_1.AIConfig), useValue: mockConfigRepo },
|
|
{ provide: (0, typeorm_1.getRepositoryToken)(ai_usage_entity_1.AIUsage), useValue: mockUsageRepo },
|
|
{ provide: openrouter_client_1.OpenRouterClient, useValue: mockOpenRouterClient },
|
|
],
|
|
}).compile();
|
|
service = module.get(ai_service_1.AIService);
|
|
configRepo = module.get((0, typeorm_1.getRepositoryToken)(ai_config_entity_1.AIConfig));
|
|
usageRepo = module.get((0, typeorm_1.getRepositoryToken)(ai_usage_entity_1.AIUsage));
|
|
openRouterClient = module.get(openrouter_client_1.OpenRouterClient);
|
|
});
|
|
afterEach(() => {
|
|
jest.clearAllMocks();
|
|
jest.restoreAllMocks();
|
|
});
|
|
describe('getConfig', () => {
|
|
it('should return existing config', async () => {
|
|
configRepo.findOne.mockResolvedValue(mockConfig);
|
|
const result = await service.getConfig(mockTenantId);
|
|
expect(result).toEqual(mockConfig);
|
|
expect(configRepo.findOne).toHaveBeenCalledWith({
|
|
where: { tenant_id: mockTenantId },
|
|
});
|
|
});
|
|
it('should create default config if not exists', async () => {
|
|
configRepo.findOne.mockResolvedValue(null);
|
|
configRepo.create.mockReturnValue(mockConfig);
|
|
configRepo.save.mockResolvedValue(mockConfig);
|
|
const result = await service.getConfig(mockTenantId);
|
|
expect(result).toEqual(mockConfig);
|
|
expect(configRepo.create).toHaveBeenCalled();
|
|
expect(configRepo.save).toHaveBeenCalled();
|
|
});
|
|
});
|
|
describe('updateConfig', () => {
|
|
it('should update config successfully', async () => {
|
|
configRepo.findOne.mockResolvedValue(mockConfig);
|
|
configRepo.save.mockResolvedValue({
|
|
...mockConfig,
|
|
temperature: 0.9,
|
|
max_tokens: 4096,
|
|
});
|
|
const result = await service.updateConfig(mockTenantId, {
|
|
temperature: 0.9,
|
|
max_tokens: 4096,
|
|
});
|
|
expect(result.temperature).toBe(0.9);
|
|
expect(result.max_tokens).toBe(4096);
|
|
expect(configRepo.save).toHaveBeenCalled();
|
|
});
|
|
it('should update system prompt', async () => {
|
|
configRepo.findOne.mockResolvedValue(mockConfig);
|
|
configRepo.save.mockResolvedValue({
|
|
...mockConfig,
|
|
system_prompt: 'New prompt',
|
|
});
|
|
const result = await service.updateConfig(mockTenantId, {
|
|
system_prompt: 'New prompt',
|
|
});
|
|
expect(result.system_prompt).toBe('New prompt');
|
|
});
|
|
it('should disable AI features', async () => {
|
|
configRepo.findOne.mockResolvedValue(mockConfig);
|
|
configRepo.save.mockResolvedValue({
|
|
...mockConfig,
|
|
is_enabled: false,
|
|
});
|
|
const result = await service.updateConfig(mockTenantId, {
|
|
is_enabled: false,
|
|
});
|
|
expect(result.is_enabled).toBe(false);
|
|
});
|
|
});
|
|
describe('chat', () => {
|
|
const chatDto = {
|
|
messages: [{ role: 'user', content: 'Hello' }],
|
|
};
|
|
it('should throw when AI is disabled for tenant', async () => {
|
|
configRepo.findOne.mockResolvedValue({
|
|
...mockConfig,
|
|
is_enabled: false,
|
|
});
|
|
await expect(service.chat(mockTenantId, mockUserId, chatDto)).rejects.toThrow(common_1.BadRequestException);
|
|
});
|
|
it('should throw when service not configured', async () => {
|
|
configRepo.findOne.mockResolvedValue(mockConfig);
|
|
openRouterClient.isReady.mockReturnValue(false);
|
|
await expect(service.chat(mockTenantId, mockUserId, chatDto)).rejects.toThrow(common_1.BadRequestException);
|
|
});
|
|
});
|
|
describe('getModels', () => {
|
|
it('should return available models', async () => {
|
|
const models = [
|
|
{
|
|
id: 'anthropic/claude-3-haiku',
|
|
name: 'Claude 3 Haiku',
|
|
provider: 'anthropic',
|
|
context_length: 200000,
|
|
pricing: { prompt: 0.25, completion: 1.25 },
|
|
},
|
|
];
|
|
openRouterClient.getModels.mockResolvedValue(models);
|
|
const result = await service.getModels();
|
|
expect(result).toEqual(models);
|
|
expect(openRouterClient.getModels).toHaveBeenCalled();
|
|
});
|
|
});
|
|
describe('getCurrentMonthUsage', () => {
|
|
it('should return usage statistics', async () => {
|
|
const mockQueryBuilder = {
|
|
select: jest.fn().mockReturnThis(),
|
|
addSelect: jest.fn().mockReturnThis(),
|
|
where: jest.fn().mockReturnThis(),
|
|
andWhere: jest.fn().mockReturnThis(),
|
|
getRawOne: jest.fn().mockResolvedValue({
|
|
request_count: '10',
|
|
total_input_tokens: '1000',
|
|
total_output_tokens: '500',
|
|
total_tokens: '1500',
|
|
total_cost: '0.05',
|
|
avg_latency_ms: '450',
|
|
}),
|
|
};
|
|
usageRepo.createQueryBuilder.mockReturnValue(mockQueryBuilder);
|
|
const result = await service.getCurrentMonthUsage(mockTenantId);
|
|
expect(result.request_count).toBe(10);
|
|
expect(result.total_input_tokens).toBe(1000);
|
|
expect(result.total_output_tokens).toBe(500);
|
|
expect(result.total_tokens).toBe(1500);
|
|
expect(result.total_cost).toBe(0.05);
|
|
expect(result.avg_latency_ms).toBe(450);
|
|
});
|
|
it('should return zero values for new tenant', async () => {
|
|
const mockQueryBuilder = {
|
|
select: jest.fn().mockReturnThis(),
|
|
addSelect: jest.fn().mockReturnThis(),
|
|
where: jest.fn().mockReturnThis(),
|
|
andWhere: jest.fn().mockReturnThis(),
|
|
getRawOne: jest.fn().mockResolvedValue({
|
|
request_count: '0',
|
|
total_input_tokens: '0',
|
|
total_output_tokens: '0',
|
|
total_tokens: '0',
|
|
total_cost: '0',
|
|
avg_latency_ms: '0',
|
|
}),
|
|
};
|
|
usageRepo.createQueryBuilder.mockReturnValue(mockQueryBuilder);
|
|
const result = await service.getCurrentMonthUsage(mockTenantId);
|
|
expect(result.request_count).toBe(0);
|
|
expect(result.total_cost).toBe(0);
|
|
});
|
|
});
|
|
describe('getUsageHistory', () => {
|
|
it('should return paginated usage history', async () => {
|
|
usageRepo.findAndCount.mockResolvedValue([[mockUsage], 1]);
|
|
const result = await service.getUsageHistory(mockTenantId, 1, 20);
|
|
expect(result.data).toHaveLength(1);
|
|
expect(result.total).toBe(1);
|
|
expect(usageRepo.findAndCount).toHaveBeenCalledWith({
|
|
where: { tenant_id: mockTenantId },
|
|
order: { created_at: 'DESC' },
|
|
skip: 0,
|
|
take: 20,
|
|
});
|
|
});
|
|
it('should handle pagination correctly', async () => {
|
|
usageRepo.findAndCount.mockResolvedValue([[], 100]);
|
|
const result = await service.getUsageHistory(mockTenantId, 5, 10);
|
|
expect(usageRepo.findAndCount).toHaveBeenCalledWith(expect.objectContaining({
|
|
skip: 40,
|
|
take: 10,
|
|
}));
|
|
expect(result.total).toBe(100);
|
|
});
|
|
});
|
|
describe('isServiceReady', () => {
|
|
it('should return true when client is ready', () => {
|
|
openRouterClient.isReady.mockReturnValue(true);
|
|
expect(service.isServiceReady()).toBe(true);
|
|
});
|
|
it('should return false when client is not ready', () => {
|
|
openRouterClient.isReady.mockReturnValue(false);
|
|
expect(service.isServiceReady()).toBe(false);
|
|
});
|
|
});
|
|
});
|
|
//# sourceMappingURL=ai.service.spec.js.map
|