seo-generator-server/tests/llm/llmmanager.concurrency.test.js
2025-09-03 15:29:19 +08:00

31 lines
1.0 KiB
JavaScript

import test from 'node:test';
import assert from 'node:assert';
import { safeImport } from '../_helpers/path.js';
import { FakeLLMClient } from '../_helpers/fakeLLMClient.js';
function skip(msg){ console.warn('[SKIP]', msg); }
test('LLMManager: enforces concurrency limit', async () => {
const res = await safeImport('LLMManager');
if (!res.ok) { skip(res.reason); return; }
const L = res.mod;
if (typeof L.__setClient !== 'function' || typeof L.__setConcurrency !== 'function') {
skip('Missing __setClient/__setConcurrency hooks');
return;
}
const fake = new FakeLLMClient({ delayMs: 50 });
L.__setClient('mock', fake);
L.__setConcurrency(2); // on demande 2 max
const N = 5;
const calls = Array.from({length:N}, (_,i)=> L.callModel({ provider:'mock', model:'fake-1', input:`#${i}` }));
const t0 = Date.now();
await Promise.all(calls);
const t1 = Date.now();
// Avec delay 50ms et concu=2, 5 requêtes => ~3 vagues => durée >= 150ms (avec marge)
assert.ok((t1 - t0) >= 120, `expected batching effect, got ${t1-t0}ms`);
});