update logviewer.js to show level 25 messages
This commit is contained in:
parent
ed12aba6f8
commit
31ea27153d
109
CLAUDE.md
109
CLAUDE.md
@ -54,39 +54,39 @@ This is a Node.js-based SEO content generation server that was converted from Go
|
||||
## Development Commands
|
||||
|
||||
### Production Workflow Execution
|
||||
```bash
|
||||
bash
|
||||
# Execute real production workflow from Google Sheets
|
||||
node -e "const main = require('./lib/Main'); main.handleFullWorkflow({ rowNumber: 2, source: 'production' });"
|
||||
|
||||
# Test with different rows
|
||||
node -e "const main = require('./lib/Main'); main.handleFullWorkflow({ rowNumber: 3, source: 'production' });"
|
||||
```
|
||||
|
||||
|
||||
### Basic Operations
|
||||
- `npm start` - Start the production server on port 3000
|
||||
- `npm run dev` - Start the development server (same as start)
|
||||
- `node server.js` - Direct server startup
|
||||
- npm start - Start the production server on port 3000
|
||||
- npm run dev - Start the development server (same as start)
|
||||
- node server.js - Direct server startup
|
||||
|
||||
### Testing Commands
|
||||
|
||||
#### Google Sheets Integration Tests
|
||||
```bash
|
||||
bash
|
||||
# Test personality loading from Google Sheets
|
||||
node -e "const {getPersonalities} = require('./lib/BrainConfig'); getPersonalities().then(p => console.log(`${p.length} personalities loaded`));"
|
||||
node -e "const {getPersonalities} = require('./lib/BrainConfig'); getPersonalities().then(p => console.log(${p.length} personalities loaded));"
|
||||
|
||||
# Test CSV data loading
|
||||
node -e "const {readInstructionsData} = require('./lib/BrainConfig'); readInstructionsData(2).then(d => console.log('Data:', d));"
|
||||
|
||||
# Test random personality selection
|
||||
node -e "const {selectPersonalityWithAI, getPersonalities} = require('./lib/BrainConfig'); getPersonalities().then(p => selectPersonalityWithAI('test', 'test', p)).then(r => console.log('Selected:', r.nom));"
|
||||
```
|
||||
|
||||
|
||||
#### LLM Connectivity Tests
|
||||
- `node -e "require('./lib/LLMManager').testLLMManager()"` - Test basic LLM connectivity
|
||||
- `node -e "require('./lib/LLMManager').testLLMManagerComplete()"` - Full LLM provider test suite
|
||||
- node -e "require('./lib/LLMManager').testLLMManager()" - Test basic LLM connectivity
|
||||
- node -e "require('./lib/LLMManager').testLLMManagerComplete()" - Full LLM provider test suite
|
||||
|
||||
#### Complete System Test
|
||||
```bash
|
||||
bash
|
||||
node -e "
|
||||
const main = require('./lib/Main');
|
||||
const testData = {
|
||||
@ -98,16 +98,16 @@ const testData = {
|
||||
mcPlus1: 'plaque gravée,plaque métal,plaque bois,plaque acrylique',
|
||||
tPlus1: 'Plaque Gravée Premium,Plaque Métal Moderne,Plaque Bois Naturel,Plaque Acrylique Design'
|
||||
},
|
||||
xmlTemplate: Buffer.from(\`<?xml version='1.0' encoding='UTF-8'?>
|
||||
xmlTemplate: Buffer.from(\<?xml version='1.0' encoding='UTF-8'?>
|
||||
<article>
|
||||
<h1>|Titre_Principal{{T0}}{Rédige un titre H1 accrocheur}|</h1>
|
||||
<intro>|Introduction{{MC0}}{Rédige une introduction engageante}|</intro>
|
||||
</article>\`).toString('base64'),
|
||||
</article>\).toString('base64'),
|
||||
source: 'node_server_test'
|
||||
};
|
||||
main.handleFullWorkflow(testData);
|
||||
"
|
||||
```
|
||||
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
@ -163,7 +163,7 @@ main.handleFullWorkflow(testData);
|
||||
- Default XML template system for filename fallbacks
|
||||
|
||||
#### lib/ElementExtraction.js ✅
|
||||
- Fixed regex for instruction parsing: `{{variables}}` vs `{instructions}`
|
||||
- Fixed regex for instruction parsing: {{variables}} vs {instructions}
|
||||
- 16+ element extraction capability
|
||||
- Direct generation mode operational
|
||||
|
||||
@ -229,7 +229,7 @@ main.handleFullWorkflow(testData);
|
||||
- **production** - Real Google Sheets data processing
|
||||
- **test_random_personality** - Testing with personality randomization
|
||||
- **node_server** - Direct API processing
|
||||
- Legacy: `make_com`, `digital_ocean_autonomous`
|
||||
- Legacy: make_com, digital_ocean_autonomous
|
||||
|
||||
## Key Dependencies
|
||||
- **googleapis** : Google Sheets API integration
|
||||
@ -300,16 +300,87 @@ main.handleFullWorkflow(testData);
|
||||
## Unused Audit Tool
|
||||
- **Location**: tools/audit-unused.cjs (manual run only)
|
||||
- **Reports**: Dead files, broken relative imports, unused exports
|
||||
- **Use sparingly**: Run before cleanup or release; keep with `// @keep:export Name`
|
||||
- **Use sparingly**: Run before cleanup or release; keep with // @keep:export Name
|
||||
|
||||
## 📦 Bundling Tool
|
||||
|
||||
`pack-lib.cjs` creates a single `code.js` from all files in `lib/`.
|
||||
pack-lib.cjs creates a single code.js from all files in lib/.
|
||||
Each file is concatenated with an ASCII header showing its path. Imports/exports are kept, so the bundle is for **reading/audit only**, not execution.
|
||||
|
||||
### Usage
|
||||
```bash
|
||||
|
||||
node pack-lib.cjs # default → code.js
|
||||
node pack-lib.cjs --out out.js # custom output
|
||||
node pack-lib.cjs --order alpha
|
||||
node pack-lib.cjs --entry lib/test-manual.js
|
||||
|
||||
## 🔍 Log Consultation (LogViewer)
|
||||
|
||||
### Contexte
|
||||
- Les logs ne sont plus envoyés en console.log (trop verbeux).
|
||||
- Tous les événements sont enregistrés dans logs/app.log au format **JSONL Pino**.
|
||||
- Exemple de ligne :
|
||||
json
|
||||
{"level":30,"time":1756797556942,"evt":"span.end","path":"Workflow SEO > Génération mots-clés","dur_ms":4584.6,"msg":"✔ Génération mots-clés (4.58s)"}
|
||||
|
||||
|
||||
### Outil dédié
|
||||
|
||||
Un outil tools/logViewer.js permet d’interroger facilement ce fichier.
|
||||
|
||||
#### Commandes rapides
|
||||
|
||||
* **Voir les 200 dernières lignes formatées**
|
||||
|
||||
bash
|
||||
node tools/logViewer.js --pretty
|
||||
|
||||
|
||||
* **Rechercher un mot-clé dans les messages**
|
||||
(exemple : tout ce qui mentionne Claude)
|
||||
|
||||
bash
|
||||
node tools/logViewer.js --search --includes "Claude" --pretty
|
||||
|
||||
|
||||
* **Rechercher par plage de temps**
|
||||
(ISO string ou date partielle)
|
||||
|
||||
bash
|
||||
# Tous les logs du 2 septembre 2025
|
||||
node tools/logViewer.js --since 2025-09-02T00:00:00Z --until 2025-09-02T23:59:59Z --pretty
|
||||
|
||||
|
||||
* **Filtrer par niveau d’erreur**
|
||||
|
||||
bash
|
||||
node tools/logViewer.js --last 300 --level ERROR --pretty
|
||||
|
||||
|
||||
* **Stats par jour**
|
||||
|
||||
bash
|
||||
node tools/logViewer.js --stats --by day --level ERROR
|
||||
|
||||
|
||||
### Filtres disponibles
|
||||
|
||||
* --level : 30=INFO, 40=WARN, 50=ERROR (ou INFO, WARN, ERROR)
|
||||
* --module : filtre par path ou module
|
||||
* --includes : mot-clé dans msg
|
||||
* --regex : expression régulière sur msg
|
||||
* --since / --until : bornes temporelles (ISO ou YYYY-MM-DD)
|
||||
|
||||
### Champs principaux
|
||||
|
||||
* level : niveau de log
|
||||
* time : timestamp (epoch ou ISO)
|
||||
* path : workflow concerné
|
||||
* evt : type d’événement (span.start, span.end, etc.)
|
||||
* dur_ms : durée si span.end
|
||||
* msg : message lisible
|
||||
|
||||
### Résumé
|
||||
|
||||
👉 Ne pas lire le log brut.
|
||||
Toujours utiliser tools/logViewer.js pour chercher **par mot-clé** ou **par date** afin de naviguer efficacement dans les logs.
|
||||
338
tools/logviewer.cjs
Normal file
338
tools/logviewer.cjs
Normal file
@ -0,0 +1,338 @@
|
||||
// tools/logViewer.js (Pino-compatible JSONL + timearea + filters)
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const readline = require('readline');
|
||||
|
||||
function resolveLatestLogFile(dir = path.resolve(process.cwd(), 'logs')) {
|
||||
if (!fs.existsSync(dir)) throw new Error(`Logs directory not found: ${dir}`);
|
||||
const files = fs.readdirSync(dir)
|
||||
.map(f => ({ file: f, stat: fs.statSync(path.join(dir, f)) }))
|
||||
.filter(f => f.stat.isFile())
|
||||
.sort((a, b) => b.stat.mtimeMs - a.stat.mtimeMs);
|
||||
if (!files.length) throw new Error(`No log files in ${dir}`);
|
||||
return path.join(dir, files[0].file);
|
||||
}
|
||||
|
||||
let LOG_FILE = process.env.LOG_FILE
|
||||
? path.resolve(process.cwd(), process.env.LOG_FILE)
|
||||
: resolveLatestLogFile();
|
||||
|
||||
const MAX_SAFE_READ_MB = 50;
|
||||
const DEFAULT_LAST_LINES = 200;
|
||||
|
||||
function setLogFile(filePath) { LOG_FILE = path.resolve(process.cwd(), filePath); }
|
||||
|
||||
function MB(n){return n*1024*1024;}
|
||||
function toInt(v,d){const n=parseInt(v,10);return Number.isFinite(n)?n:d;}
|
||||
|
||||
const LEVEL_MAP_NUM = {10:'TRACE',20:'DEBUG',25:'PROMPT',30:'INFO',40:'WARN',50:'ERROR',60:'FATAL'};
|
||||
function normLevel(v){
|
||||
if (v==null) return 'UNKNOWN';
|
||||
if (typeof v==='number') return LEVEL_MAP_NUM[v]||String(v);
|
||||
const s=String(v).toUpperCase();
|
||||
return LEVEL_MAP_NUM[Number(s)] || s;
|
||||
}
|
||||
|
||||
function parseWhen(obj){
|
||||
const t = obj.time ?? obj.timestamp;
|
||||
if (t==null) return null;
|
||||
if (typeof t==='number') return new Date(t);
|
||||
const d=new Date(String(t));
|
||||
return isNaN(d)?null:d;
|
||||
}
|
||||
|
||||
function prettyLine(obj){
|
||||
const d=parseWhen(obj);
|
||||
const ts = d? d.toISOString() : '';
|
||||
const lvl = normLevel(obj.level).padEnd(5,' ');
|
||||
const mod = (obj.module || obj.path || obj.name || 'root').slice(0,60).padEnd(60,' ');
|
||||
const msg = obj.msg ?? obj.message ?? '';
|
||||
const extra = obj.evt ? ` [${obj.evt}${obj.dur_ms?` ${obj.dur_ms}ms`:''}]` : '';
|
||||
return `${ts} ${lvl} ${mod} ${msg}${extra}`;
|
||||
}
|
||||
|
||||
function buildFilters({ level, mod, since, until, includes, regex, timeareaCenter, timeareaRadiusSec, filterTerms }) {
|
||||
let rx=null; if (regex){ try{rx=new RegExp(regex,'i');}catch{} }
|
||||
const sinceDate = since? new Date(since): null;
|
||||
const untilDate = until? new Date(until): null;
|
||||
const wantLvl = level? normLevel(level): null;
|
||||
|
||||
// timearea : centre + rayon (en secondes)
|
||||
let areaStart = null, areaEnd = null;
|
||||
if (timeareaCenter && timeareaRadiusSec!=null) {
|
||||
const c = new Date(timeareaCenter);
|
||||
if (!isNaN(c)) {
|
||||
const rMs = Number(timeareaRadiusSec) * 1000;
|
||||
areaStart = new Date(c.getTime() - rMs);
|
||||
areaEnd = new Date(c.getTime() + rMs);
|
||||
}
|
||||
}
|
||||
|
||||
// terms (peuvent être multiples) : match sur msg/path/module/evt/name/attrs stringify
|
||||
const terms = Array.isArray(filterTerms) ? filterTerms.filter(Boolean) : (filterTerms ? [filterTerms] : []);
|
||||
|
||||
return { wantLvl, mod, sinceDate, untilDate, includes, rx, areaStart, areaEnd, terms };
|
||||
}
|
||||
|
||||
function objectToSearchString(o) {
|
||||
const parts = [];
|
||||
if (o.msg!=null) parts.push(String(o.msg));
|
||||
if (o.message!=null) parts.push(String(o.message));
|
||||
if (o.module!=null) parts.push(String(o.module));
|
||||
if (o.path!=null) parts.push(String(o.path));
|
||||
if (o.name!=null) parts.push(String(o.name));
|
||||
if (o.evt!=null) parts.push(String(o.evt));
|
||||
if (o.span!=null) parts.push(String(o.span));
|
||||
if (o.attrs!=null) parts.push(safeStringify(o.attrs));
|
||||
return parts.join(' | ').toLowerCase();
|
||||
}
|
||||
|
||||
function safeStringify(v){ try{return JSON.stringify(v);}catch{return String(v);} }
|
||||
|
||||
function passesAll(obj,f){
|
||||
if (!obj || typeof obj!=='object') return false;
|
||||
|
||||
if (f.wantLvl && normLevel(obj.level)!==f.wantLvl) return false;
|
||||
|
||||
if (f.mod){
|
||||
const mod = String(obj.module||obj.path||obj.name||'');
|
||||
if (mod!==f.mod) return false;
|
||||
}
|
||||
|
||||
// since/until
|
||||
let d=parseWhen(obj);
|
||||
if (f.sinceDate || f.untilDate){
|
||||
if (!d) return false;
|
||||
if (f.sinceDate && d < f.sinceDate) return false;
|
||||
if (f.untilDate && d > f.untilDate) return false;
|
||||
}
|
||||
|
||||
// timearea (zone centrée)
|
||||
if (f.areaStart || f.areaEnd) {
|
||||
if (!d) d = parseWhen(obj);
|
||||
if (!d) return false;
|
||||
if (f.areaStart && d < f.areaStart) return false;
|
||||
if (f.areaEnd && d > f.areaEnd) return false;
|
||||
}
|
||||
|
||||
const msg = String(obj.msg ?? obj.message ?? '');
|
||||
if (f.includes && !msg.toLowerCase().includes(String(f.includes).toLowerCase())) return false;
|
||||
if (f.rx && !f.rx.test(msg)) return false;
|
||||
|
||||
// terms : tous les --filter doivent matcher (AND)
|
||||
if (f.terms && f.terms.length) {
|
||||
const hay = objectToSearchString(obj); // multi-champs
|
||||
for (const t of f.terms) {
|
||||
if (!hay.includes(String(t).toLowerCase())) return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function applyFilters(arr, f){ return arr.filter(o=>passesAll(o,f)); }
|
||||
function safeParse(line){ try{return JSON.parse(line);}catch{return null;} }
|
||||
function safeParseLines(lines){ const out=[]; for(const l of lines){const o=safeParse(l); if(o) out.push(o);} return out; }
|
||||
|
||||
async function getFileSize(file){ const st=await fs.promises.stat(file).catch(()=>null); if(!st) throw new Error(`Log file not found: ${file}`); return st.size; }
|
||||
async function readAllLines(file){ const data=await fs.promises.readFile(file,'utf8'); const lines=data.split(/\r?\n/).filter(Boolean); return safeParseLines(lines); }
|
||||
|
||||
async function tailJsonl(file, approxLines=DEFAULT_LAST_LINES){
|
||||
const fd=await fs.promises.open(file,'r');
|
||||
try{
|
||||
const stat=await fd.stat(); const chunk=64*1024;
|
||||
let pos=stat.size; let buffer=''; const lines=[];
|
||||
while(pos>0 && lines.length<approxLines){
|
||||
const sz=Math.min(chunk,pos); pos-=sz;
|
||||
const buf=Buffer.alloc(sz); await fd.read(buf,0,sz,pos);
|
||||
buffer = buf.toString('utf8') + buffer;
|
||||
let parts=buffer.split(/\r?\n/); buffer=parts.shift();
|
||||
for(const p of parts){ if(!p.trim()) continue; const o=safeParse(p); if(o) lines.push(o); }
|
||||
}
|
||||
if (buffer && buffer.trim()){ const o=safeParse(buffer); if(o) lines.unshift(o); }
|
||||
return lines.slice(-approxLines);
|
||||
} finally { await fd.close(); }
|
||||
}
|
||||
|
||||
async function streamFilter(file, filters, limit){
|
||||
const rl=readline.createInterface({ input: fs.createReadStream(file,{encoding:'utf8'}), crlfDelay:Infinity });
|
||||
const out=[];
|
||||
for await (const line of rl){
|
||||
if (!line.trim()) continue;
|
||||
const o=safeParse(line); if(!o) continue;
|
||||
if (passesAll(o,filters)){ out.push(o); if (out.length>=limit) break; }
|
||||
}
|
||||
rl.close(); return out;
|
||||
}
|
||||
|
||||
async function streamEach(file, onObj){
|
||||
const rl=readline.createInterface({ input: fs.createReadStream(file,{encoding:'utf8'}), crlfDelay:Infinity });
|
||||
for await (const line of rl){ if(!line.trim()) continue; const o=safeParse(line); if(o) onObj(o); }
|
||||
rl.close();
|
||||
}
|
||||
|
||||
async function getLast(opts={}){
|
||||
const {
|
||||
lines=DEFAULT_LAST_LINES, level, module:mod, since, until, includes, regex,
|
||||
timeareaCenter, timeareaRadiusSec, filterTerms, pretty=false
|
||||
} = opts;
|
||||
|
||||
const filters=buildFilters({level,mod,since,until,includes,regex,timeareaCenter,timeareaRadiusSec,filterTerms});
|
||||
const size=await getFileSize(LOG_FILE);
|
||||
|
||||
if (size<=MB(MAX_SAFE_READ_MB)){
|
||||
const arr=await readAllLines(LOG_FILE);
|
||||
const out=applyFilters(arr.slice(-Math.max(lines,1)),filters);
|
||||
return pretty? out.map(prettyLine): out;
|
||||
}
|
||||
const out=await tailJsonl(LOG_FILE, lines*3);
|
||||
const filtered=applyFilters(out,filters).slice(-Math.max(lines,1));
|
||||
return pretty? filtered.map(prettyLine): filtered;
|
||||
}
|
||||
|
||||
async function search(opts={}){
|
||||
const {
|
||||
limit=500, level, module:mod, since, until, includes, regex,
|
||||
timeareaCenter, timeareaRadiusSec, filterTerms, pretty=false
|
||||
} = opts;
|
||||
|
||||
const filters=buildFilters({level,mod,since,until,includes,regex,timeareaCenter,timeareaRadiusSec,filterTerms});
|
||||
const size=await getFileSize(LOG_FILE);
|
||||
const res = size<=MB(MAX_SAFE_READ_MB)
|
||||
? applyFilters(await readAllLines(LOG_FILE),filters).slice(-limit)
|
||||
: await streamFilter(LOG_FILE,filters,limit);
|
||||
return pretty? res.map(prettyLine): res;
|
||||
}
|
||||
|
||||
async function stats(opts={}){
|
||||
const {by='level', since, until, level, module:mod, includes, regex, timeareaCenter, timeareaRadiusSec, filterTerms}=opts;
|
||||
const filters=buildFilters({level,mod,since,until,includes,regex,timeareaCenter,timeareaRadiusSec,filterTerms});
|
||||
const agg={};
|
||||
await streamEach(LOG_FILE,(o)=>{
|
||||
if(!passesAll(o,filters)) return;
|
||||
let key;
|
||||
if (by==='day'){ const d=parseWhen(o); if(!d) return; key=d.toISOString().slice(0,10); }
|
||||
else if (by==='module'){ key= o.module || o.path || o.name || 'unknown'; }
|
||||
else { key= normLevel(o.level); }
|
||||
agg[key]=(agg[key]||0)+1;
|
||||
});
|
||||
return Object.entries(agg).sort((a,b)=>b[1]-a[1]).map(([k,v])=>({[by]:k, count:v}));
|
||||
}
|
||||
|
||||
// --- CLI ---
|
||||
if (require.main===module){
|
||||
(async ()=>{
|
||||
try{
|
||||
const args=parseArgs(process.argv.slice(2));
|
||||
if (args.help) return printHelp();
|
||||
if (args.file) setLogFile(args.file);
|
||||
// Support for positional filename arguments
|
||||
if (args.unknown && args.unknown.length > 0 && !args.file) {
|
||||
const possibleFile = args.unknown[0];
|
||||
if (possibleFile && !possibleFile.startsWith('-')) {
|
||||
setLogFile(possibleFile);
|
||||
}
|
||||
}
|
||||
|
||||
const common = {
|
||||
level: args.level,
|
||||
module: args.module,
|
||||
since: args.since,
|
||||
until: args.until,
|
||||
includes: args.includes,
|
||||
regex: args.regex,
|
||||
timeareaCenter: args.timeareaCenter,
|
||||
timeareaRadiusSec: args.timeareaRadiusSec,
|
||||
filterTerms: args.filterTerms,
|
||||
};
|
||||
|
||||
if (args.stats){
|
||||
const res=await stats({by:args.by||'level', ...common});
|
||||
return console.log(JSON.stringify(res,null,2));
|
||||
}
|
||||
if (args.search){
|
||||
const res=await search({limit:toInt(args.limit,500), ...common, pretty:!!args.pretty});
|
||||
return printResult(res,!!args.pretty);
|
||||
}
|
||||
const res=await getLast({lines:toInt(args.last,DEFAULT_LAST_LINES), ...common, pretty:!!args.pretty});
|
||||
return printResult(res,!!args.pretty);
|
||||
}catch(e){ console.error(`[logViewer] Error: ${e.message}`); process.exitCode=1; }
|
||||
})();
|
||||
}
|
||||
|
||||
function parseArgs(argv){
|
||||
const o={ filterTerms: [] };
|
||||
for(let i=0;i<argv.length;i++){
|
||||
const a=argv[i], nx=()=> (i+1<argv.length?argv[i+1]:undefined);
|
||||
switch(a){
|
||||
case '--help': case '-h': o.help=true; break;
|
||||
case '--file': o.file=nx(); i++; break;
|
||||
case '--last': o.last=nx(); i++; break;
|
||||
case '--search': o.search=true; break;
|
||||
case '--limit': o.limit=nx(); i++; break;
|
||||
case '--level': o.level=nx(); i++; break;
|
||||
case '--module': o.module=nx(); i++; break;
|
||||
case '--since': o.since=nx(); i++; break;
|
||||
case '--until': o.until=nx(); i++; break;
|
||||
case '--includes': o.includes=nx(); i++; break;
|
||||
case '--regex': o.regex=nx(); i++; break;
|
||||
case '--pretty': o.pretty=true; break;
|
||||
case '--stats': o.stats=true; break;
|
||||
case '--by': o.by=nx(); i++; break;
|
||||
|
||||
// NEW: --timearea <ISO> <seconds>
|
||||
case '--timearea': {
|
||||
o.timeareaCenter = nx(); i++;
|
||||
const radius = nx(); i++;
|
||||
o.timeareaRadiusSec = radius != null ? Number(radius) : undefined;
|
||||
break;
|
||||
}
|
||||
|
||||
// NEW: --filter (répétable)
|
||||
case '--filter': {
|
||||
const term = nx(); i++;
|
||||
if (term!=null) o.filterTerms.push(term);
|
||||
break;
|
||||
}
|
||||
|
||||
default: (o.unknown??=[]).push(a);
|
||||
}
|
||||
}
|
||||
if (o.filterTerms.length===0) delete o.filterTerms;
|
||||
return o;
|
||||
}
|
||||
|
||||
function printHelp(){
|
||||
const bin=`node ${path.relative(process.cwd(), __filename)}`;
|
||||
console.log(`
|
||||
LogViewer (Pino-compatible JSONL)
|
||||
|
||||
Usage:
|
||||
${bin} [--file logs/app.log] [--pretty] [--last 200] [filters...]
|
||||
${bin} --search [--limit 500] [filters...]
|
||||
${bin} --stats [--by level|module|day] [filters...]
|
||||
|
||||
Time filters:
|
||||
--since 2025-09-02T00:00:00Z
|
||||
--until 2025-09-02T23:59:59Z
|
||||
--timearea <ISO_CENTER> <RADIUS_SECONDS> # fenêtre centrée
|
||||
|
||||
Text filters:
|
||||
--includes "keyword in msg"
|
||||
--regex "(timeout|ECONNRESET)"
|
||||
--filter TERM # multi-champs (msg, path/module, name, evt, attrs). Répétable. AND.
|
||||
|
||||
Other filters:
|
||||
--level 30|INFO|ERROR
|
||||
--module "Workflow SEO > Génération contenu multi-LLM"
|
||||
|
||||
Examples:
|
||||
${bin} --timearea 2025-09-02T23:59:59Z 200 --pretty
|
||||
${bin} --timearea 2025-09-02T12:00:00Z 900 --filter INFO --filter PROMPT --search --pretty
|
||||
${bin} --last 300 --level ERROR --filter "Génération contenu" --pretty
|
||||
`);}
|
||||
|
||||
function printResult(res, pretty){ console.log(pretty? res.join(os.EOL) : JSON.stringify(res,null,2)); }
|
||||
|
||||
module.exports = { setLogFile, getLast, search, stats };
|
||||
Loading…
Reference in New Issue
Block a user