sourcefinder/export_logger/logviewer.cjs
Alexis Trouvé a7bd6115b7
Some checks failed
SourceFinder CI/CD Pipeline / Code Quality & Linting (push) Has been cancelled
SourceFinder CI/CD Pipeline / Unit Tests (push) Has been cancelled
SourceFinder CI/CD Pipeline / Security Tests (push) Has been cancelled
SourceFinder CI/CD Pipeline / Integration Tests (push) Has been cancelled
SourceFinder CI/CD Pipeline / Performance Tests (push) Has been cancelled
SourceFinder CI/CD Pipeline / Code Coverage Report (push) Has been cancelled
SourceFinder CI/CD Pipeline / Build & Deployment Validation (16.x) (push) Has been cancelled
SourceFinder CI/CD Pipeline / Build & Deployment Validation (18.x) (push) Has been cancelled
SourceFinder CI/CD Pipeline / Build & Deployment Validation (20.x) (push) Has been cancelled
SourceFinder CI/CD Pipeline / Regression Tests (push) Has been cancelled
SourceFinder CI/CD Pipeline / Security Audit (push) Has been cancelled
SourceFinder CI/CD Pipeline / Notify Results (push) Has been cancelled
feat: Implémentation complète du système SourceFinder avec tests
- Architecture modulaire avec injection de dépendances
- Système de scoring intelligent multi-facteurs (spécificité, fraîcheur, qualité, réutilisation)
- Moteur anti-injection 4 couches (preprocessing, patterns, sémantique, pénalités)
- API REST complète avec validation et rate limiting
- Repository JSON avec index mémoire et backup automatique
- Provider LLM modulaire pour génération de contenu
- Suite de tests complète (Jest) :
  * Tests unitaires pour sécurité et scoring
  * Tests d'intégration API end-to-end
  * Tests de sécurité avec simulation d'attaques
  * Tests de performance et charge
- Pipeline CI/CD avec GitHub Actions
- Logging structuré et monitoring
- Configuration ESLint et environnement de test

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-15 23:06:10 +08:00

339 lines
12 KiB
JavaScript

// tools/logViewer.js (Pino-compatible JSONL + timearea + filters)
const fs = require('fs');
const path = require('path');
const os = require('os');
const readline = require('readline');
function resolveLatestLogFile(dir = path.resolve(process.cwd(), 'logs')) {
if (!fs.existsSync(dir)) throw new Error(`Logs directory not found: ${dir}`);
const files = fs.readdirSync(dir)
.map(f => ({ file: f, stat: fs.statSync(path.join(dir, f)) }))
.filter(f => f.stat.isFile())
.sort((a, b) => b.stat.mtimeMs - a.stat.mtimeMs);
if (!files.length) throw new Error(`No log files in ${dir}`);
return path.join(dir, files[0].file);
}
let LOG_FILE = process.env.LOG_FILE
? path.resolve(process.cwd(), process.env.LOG_FILE)
: resolveLatestLogFile();
const MAX_SAFE_READ_MB = 50;
const DEFAULT_LAST_LINES = 200;
function setLogFile(filePath) { LOG_FILE = path.resolve(process.cwd(), filePath); }
function MB(n){return n*1024*1024;}
function toInt(v,d){const n=parseInt(v,10);return Number.isFinite(n)?n:d;}
const LEVEL_MAP_NUM = {10:'TRACE',20:'DEBUG',25:'PROMPT',26:'LLM',30:'INFO',40:'WARN',50:'ERROR',60:'FATAL'};
function normLevel(v){
if (v==null) return 'UNKNOWN';
if (typeof v==='number') return LEVEL_MAP_NUM[v]||String(v);
const s=String(v).toUpperCase();
return LEVEL_MAP_NUM[Number(s)] || s;
}
function parseWhen(obj){
const t = obj.time ?? obj.timestamp;
if (t==null) return null;
if (typeof t==='number') return new Date(t);
const d=new Date(String(t));
return isNaN(d)?null:d;
}
function prettyLine(obj){
const d=parseWhen(obj);
const ts = d? d.toISOString() : '';
const lvl = normLevel(obj.level).padEnd(5,' ');
const mod = (obj.module || obj.path || obj.name || 'root').slice(0,60).padEnd(60,' ');
const msg = obj.msg ?? obj.message ?? '';
const extra = obj.evt ? ` [${obj.evt}${obj.dur_ms?` ${obj.dur_ms}ms`:''}]` : '';
return `${ts} ${lvl} ${mod} ${msg}${extra}`;
}
function buildFilters({ level, mod, since, until, includes, regex, timeareaCenter, timeareaRadiusSec, filterTerms }) {
let rx=null; if (regex){ try{rx=new RegExp(regex,'i');}catch{} }
const sinceDate = since? new Date(since): null;
const untilDate = until? new Date(until): null;
const wantLvl = level? normLevel(level): null;
// timearea : centre + rayon (en secondes)
let areaStart = null, areaEnd = null;
if (timeareaCenter && timeareaRadiusSec!=null) {
const c = new Date(timeareaCenter);
if (!isNaN(c)) {
const rMs = Number(timeareaRadiusSec) * 1000;
areaStart = new Date(c.getTime() - rMs);
areaEnd = new Date(c.getTime() + rMs);
}
}
// terms (peuvent être multiples) : match sur msg/path/module/evt/name/attrs stringify
const terms = Array.isArray(filterTerms) ? filterTerms.filter(Boolean) : (filterTerms ? [filterTerms] : []);
return { wantLvl, mod, sinceDate, untilDate, includes, rx, areaStart, areaEnd, terms };
}
function objectToSearchString(o) {
const parts = [];
if (o.msg!=null) parts.push(String(o.msg));
if (o.message!=null) parts.push(String(o.message));
if (o.module!=null) parts.push(String(o.module));
if (o.path!=null) parts.push(String(o.path));
if (o.name!=null) parts.push(String(o.name));
if (o.evt!=null) parts.push(String(o.evt));
if (o.span!=null) parts.push(String(o.span));
if (o.attrs!=null) parts.push(safeStringify(o.attrs));
return parts.join(' | ').toLowerCase();
}
function safeStringify(v){ try{return JSON.stringify(v);}catch{return String(v);} }
function passesAll(obj,f){
if (!obj || typeof obj!=='object') return false;
if (f.wantLvl && normLevel(obj.level)!==f.wantLvl) return false;
if (f.mod){
const mod = String(obj.module||obj.path||obj.name||'');
if (mod!==f.mod) return false;
}
// since/until
let d=parseWhen(obj);
if (f.sinceDate || f.untilDate){
if (!d) return false;
if (f.sinceDate && d < f.sinceDate) return false;
if (f.untilDate && d > f.untilDate) return false;
}
// timearea (zone centrée)
if (f.areaStart || f.areaEnd) {
if (!d) d = parseWhen(obj);
if (!d) return false;
if (f.areaStart && d < f.areaStart) return false;
if (f.areaEnd && d > f.areaEnd) return false;
}
const msg = String(obj.msg ?? obj.message ?? '');
if (f.includes && !msg.toLowerCase().includes(String(f.includes).toLowerCase())) return false;
if (f.rx && !f.rx.test(msg)) return false;
// terms : tous les --filter doivent matcher (AND)
if (f.terms && f.terms.length) {
const hay = objectToSearchString(obj); // multi-champs
for (const t of f.terms) {
if (!hay.includes(String(t).toLowerCase())) return false;
}
}
return true;
}
function applyFilters(arr, f){ return arr.filter(o=>passesAll(o,f)); }
function safeParse(line){ try{return JSON.parse(line);}catch{return null;} }
function safeParseLines(lines){ const out=[]; for(const l of lines){const o=safeParse(l); if(o) out.push(o);} return out; }
async function getFileSize(file){ const st=await fs.promises.stat(file).catch(()=>null); if(!st) throw new Error(`Log file not found: ${file}`); return st.size; }
async function readAllLines(file){ const data=await fs.promises.readFile(file,'utf8'); const lines=data.split(/\r?\n/).filter(Boolean); return safeParseLines(lines); }
async function tailJsonl(file, approxLines=DEFAULT_LAST_LINES){
const fd=await fs.promises.open(file,'r');
try{
const stat=await fd.stat(); const chunk=64*1024;
let pos=stat.size; let buffer=''; const lines=[];
while(pos>0 && lines.length<approxLines){
const sz=Math.min(chunk,pos); pos-=sz;
const buf=Buffer.alloc(sz); await fd.read(buf,0,sz,pos);
buffer = buf.toString('utf8') + buffer;
let parts=buffer.split(/\r?\n/); buffer=parts.shift();
for(const p of parts){ if(!p.trim()) continue; const o=safeParse(p); if(o) lines.push(o); }
}
if (buffer && buffer.trim()){ const o=safeParse(buffer); if(o) lines.unshift(o); }
return lines.slice(-approxLines);
} finally { await fd.close(); }
}
async function streamFilter(file, filters, limit){
const rl=readline.createInterface({ input: fs.createReadStream(file,{encoding:'utf8'}), crlfDelay:Infinity });
const out=[];
for await (const line of rl){
if (!line.trim()) continue;
const o=safeParse(line); if(!o) continue;
if (passesAll(o,filters)){ out.push(o); if (out.length>=limit) break; }
}
rl.close(); return out;
}
async function streamEach(file, onObj){
const rl=readline.createInterface({ input: fs.createReadStream(file,{encoding:'utf8'}), crlfDelay:Infinity });
for await (const line of rl){ if(!line.trim()) continue; const o=safeParse(line); if(o) onObj(o); }
rl.close();
}
async function getLast(opts={}){
const {
lines=DEFAULT_LAST_LINES, level, module:mod, since, until, includes, regex,
timeareaCenter, timeareaRadiusSec, filterTerms, pretty=false
} = opts;
const filters=buildFilters({level,mod,since,until,includes,regex,timeareaCenter,timeareaRadiusSec,filterTerms});
const size=await getFileSize(LOG_FILE);
if (size<=MB(MAX_SAFE_READ_MB)){
const arr=await readAllLines(LOG_FILE);
const out=applyFilters(arr.slice(-Math.max(lines,1)),filters);
return pretty? out.map(prettyLine): out;
}
const out=await tailJsonl(LOG_FILE, lines*3);
const filtered=applyFilters(out,filters).slice(-Math.max(lines,1));
return pretty? filtered.map(prettyLine): filtered;
}
async function search(opts={}){
const {
limit=500, level, module:mod, since, until, includes, regex,
timeareaCenter, timeareaRadiusSec, filterTerms, pretty=false
} = opts;
const filters=buildFilters({level,mod,since,until,includes,regex,timeareaCenter,timeareaRadiusSec,filterTerms});
const size=await getFileSize(LOG_FILE);
const res = size<=MB(MAX_SAFE_READ_MB)
? applyFilters(await readAllLines(LOG_FILE),filters).slice(-limit)
: await streamFilter(LOG_FILE,filters,limit);
return pretty? res.map(prettyLine): res;
}
async function stats(opts={}){
const {by='level', since, until, level, module:mod, includes, regex, timeareaCenter, timeareaRadiusSec, filterTerms}=opts;
const filters=buildFilters({level,mod,since,until,includes,regex,timeareaCenter,timeareaRadiusSec,filterTerms});
const agg={};
await streamEach(LOG_FILE,(o)=>{
if(!passesAll(o,filters)) return;
let key;
if (by==='day'){ const d=parseWhen(o); if(!d) return; key=d.toISOString().slice(0,10); }
else if (by==='module'){ key= o.module || o.path || o.name || 'unknown'; }
else { key= normLevel(o.level); }
agg[key]=(agg[key]||0)+1;
});
return Object.entries(agg).sort((a,b)=>b[1]-a[1]).map(([k,v])=>({[by]:k, count:v}));
}
// --- CLI ---
if (require.main===module){
(async ()=>{
try{
const args=parseArgs(process.argv.slice(2));
if (args.help) return printHelp();
if (args.file) setLogFile(args.file);
// Support for positional filename arguments
if (args.unknown && args.unknown.length > 0 && !args.file) {
const possibleFile = args.unknown[0];
if (possibleFile && !possibleFile.startsWith('-')) {
setLogFile(possibleFile);
}
}
const common = {
level: args.level,
module: args.module,
since: args.since,
until: args.until,
includes: args.includes,
regex: args.regex,
timeareaCenter: args.timeareaCenter,
timeareaRadiusSec: args.timeareaRadiusSec,
filterTerms: args.filterTerms,
};
if (args.stats){
const res=await stats({by:args.by||'level', ...common});
return console.log(JSON.stringify(res,null,2));
}
if (args.search){
const res=await search({limit:toInt(args.limit,500), ...common, pretty:!!args.pretty});
return printResult(res,!!args.pretty);
}
const res=await getLast({lines:toInt(args.last,DEFAULT_LAST_LINES), ...common, pretty:!!args.pretty});
return printResult(res,!!args.pretty);
}catch(e){ console.error(`[logViewer] Error: ${e.message}`); process.exitCode=1; }
})();
}
function parseArgs(argv){
const o={ filterTerms: [] };
for(let i=0;i<argv.length;i++){
const a=argv[i], nx=()=> (i+1<argv.length?argv[i+1]:undefined);
switch(a){
case '--help': case '-h': o.help=true; break;
case '--file': o.file=nx(); i++; break;
case '--last': o.last=nx(); i++; break;
case '--search': o.search=true; break;
case '--limit': o.limit=nx(); i++; break;
case '--level': o.level=nx(); i++; break;
case '--module': o.module=nx(); i++; break;
case '--since': o.since=nx(); i++; break;
case '--until': o.until=nx(); i++; break;
case '--includes': o.includes=nx(); i++; break;
case '--regex': o.regex=nx(); i++; break;
case '--pretty': o.pretty=true; break;
case '--stats': o.stats=true; break;
case '--by': o.by=nx(); i++; break;
// NEW: --timearea <ISO> <seconds>
case '--timearea': {
o.timeareaCenter = nx(); i++;
const radius = nx(); i++;
o.timeareaRadiusSec = radius != null ? Number(radius) : undefined;
break;
}
// NEW: --filter (répétable)
case '--filter': {
const term = nx(); i++;
if (term!=null) o.filterTerms.push(term);
break;
}
default: (o.unknown??=[]).push(a);
}
}
if (o.filterTerms.length===0) delete o.filterTerms;
return o;
}
function printHelp(){
const bin=`node ${path.relative(process.cwd(), __filename)}`;
console.log(`
LogViewer (Pino-compatible JSONL)
Usage:
${bin} [--file logs/app.log] [--pretty] [--last 200] [filters...]
${bin} --search [--limit 500] [filters...]
${bin} --stats [--by level|module|day] [filters...]
Time filters:
--since 2025-09-02T00:00:00Z
--until 2025-09-02T23:59:59Z
--timearea <ISO_CENTER> <RADIUS_SECONDS> # fenêtre centrée
Text filters:
--includes "keyword in msg"
--regex "(timeout|ECONNRESET)"
--filter TERM # multi-champs (msg, path/module, name, evt, attrs). Répétable. AND.
Other filters:
--level 30|INFO|ERROR
--module "Workflow SEO > Génération contenu multi-LLM"
Examples:
${bin} --timearea 2025-09-02T23:59:59Z 200 --pretty
${bin} --timearea 2025-09-02T12:00:00Z 900 --filter INFO --filter PROMPT --search --pretty
${bin} --last 300 --level ERROR --filter "Génération contenu" --pretty
`);}
function printResult(res, pretty){ console.log(pretty? res.join(os.EOL) : JSON.stringify(res,null,2)); }
module.exports = { setLogFile, getLast, search, stats };