seo-generator-server/tools/audit-unused.cjs
StillHammer 5f9ff4941d Complete API system implementation with comprehensive testing
- APIController.js: Full RESTful API with articles, projects, templates endpoints
- Real HTTP integration tests with live server validation
- Unit tests with proper mocking and error handling
- API documentation with examples and usage patterns
- Enhanced audit tool supporting HTML, npm scripts, dynamic imports
- Cleaned 28 dead files identified by enhanced audit analysis
- Google Sheets integration fully validated in test environment
2025-09-16 11:10:46 +08:00

426 lines
14 KiB
JavaScript

// tools/audit-unused.cjs
/*
Audit "unused":
- Fichiers injoignables depuis des entrypoints
- Exports non référencés
- Imports orphelins
⚠️ Static analysis best-effort: si tu fais des require dynamiques, DI, ou evals, ajoute un @keep pour ignorer un export:
// @keep:export NomExport
*/
const fs = require('fs');
const path = require('path');
const ROOT = process.cwd().replace(/\\/g, '/');
const EXTS = ['.js', '.cjs', '.mjs', '.jsx', '.html', '.htm'];
const IGNORE_DIRS = new Set(['node_modules', '.git', 'dist', 'build', 'out', '.next', '.vercel']);
// Exclusion patterns for files that are normally "unreachable" but should be kept
const EXCLUSION_PATTERNS = [
/^tests\//, // Test files
/^tools\//, // Development tools
/\.test\./, // Test files anywhere
/\.spec\./, // Spec files
/^scripts\//, // Build/deploy scripts
/^docs?\//, // Documentation
];
function getEntrypoints() {
const entrypoints = [
'lib/test-manual.js',
'lib/Main.js',
];
// Add package.json main entry
try {
const pkg = JSON.parse(fs.readFileSync(path.join(ROOT, 'package.json'), 'utf8'));
if (pkg.main) {
entrypoints.push(pkg.main);
}
// Add npm scripts that reference files
if (pkg.scripts) {
for (const [scriptName, command] of Object.entries(pkg.scripts)) {
// Extract file references from npm scripts
const fileRefs = command.match(/node\s+([^\s]+\.js)/g);
if (fileRefs) {
fileRefs.forEach(ref => {
const file = ref.replace(/^node\s+/, '');
if (!file.startsWith('-') && !file.includes('*')) {
entrypoints.push(file);
}
});
}
}
}
} catch (e) {
// package.json not found or invalid, continue with default entrypoints
}
return [...new Set(entrypoints)].map(p => path.resolve(ROOT, p));
}
const ENTRYPOINTS = getEntrypoints();
const files = [];
(function walk(dir) {
for (const name of fs.readdirSync(dir, { withFileTypes: true })) {
if (name.isDirectory()) {
if (!IGNORE_DIRS.has(name.name)) walk(path.join(dir, name.name));
} else {
const ext = path.extname(name.name);
if (EXTS.includes(ext)) files.push(path.join(dir, name.name));
}
}
})(ROOT);
const byNorm = new Map();
for (const f of files) byNorm.set(path.normalize(f), f);
function resolveImport(fromFile, spec) {
// ignore packages ('openai', 'undici', etc.)
if (!spec.startsWith('.') && !spec.startsWith('/')) return null;
// Handle special cases for static files
if (spec.match(/\.(html|css|png|jpg|svg|json)$/)) {
// For static files, try direct resolution first
const base = path.resolve(path.dirname(fromFile), spec);
const n = path.normalize(base);
if (byNorm.has(n)) return byNorm.get(n);
// Try common static directories
const staticDirs = ['public', 'tools', 'assets', 'static'];
for (const dir of staticDirs) {
const staticPath = path.resolve(ROOT, dir, path.basename(spec));
const n = path.normalize(staticPath);
if (byNorm.has(n)) return byNorm.get(n);
}
return null;
}
// Handle JS files
const base = path.resolve(path.dirname(fromFile), spec);
const candidates = [
base,
base + '.js', base + '.cjs', base + '.mjs', base + '.jsx',
path.join(base, 'index.js'), path.join(base, 'index.cjs'), path.join(base, 'index.mjs'),
];
for (const c of candidates) {
const n = path.normalize(c);
if (byNorm.has(n)) return byNorm.get(n);
}
return null;
}
const RE = {
// imports (more comprehensive)
require: /require\s*\(\s*['"`]([^'"`]+)['"`]\s*\)/g,
importFrom: /import\s+[^'"]+\s+from\s+['"`]([^'"`]+)['"`]/g,
importOnly: /import\s+['"`]([^'"`]+)['"`]\s*;?/g,
// Additional require patterns (anywhere in code, including functions)
requireAnywhere: /require\s*\(\s*['"`]([^'"`]+)['"`]\s*\)/g,
requireWithDestructure: /(?:const|let|var)\s*{\s*[^}]*}\s*=\s*require\s*\(\s*['"`]([^'"`]+)['"`]\s*\)/g,
// Dynamic imports
dynamicImport: /import\s*\(\s*['"`]([^'"`]+)['"`]\s*\)/g,
// exports CJS
moduleExportsObj: /module\.exports\s*=\s*{([\s\S]*?)}/g,
moduleExportsAssign: /module\.exports\.(\w+)\s*=/g,
exportsAssign: /exports\.(\w+)\s*=/g,
// exports ESM
exportNamed: /export\s+(?:const|let|var|function|class)\s+(\w+)/g,
exportList: /export\s*{\s*([^}]+)\s*}/g,
// usage of members
// const { a, b } = require('./m'); import { a } from './m'
destructured: /(?:const|let|var|import)\s*{([^}]+)}\s*=\s*(?:require\(|from\s+['"`])/g,
// require('./m').foo OR (await import('./m')).foo
dotUse: /(?:require\(\s*['"`][^'"`]+['"`]\s*\)|import\(\s*['"`][^'"`]+['"`]\s*\))\.(\w+)/g,
keep: /@keep:export\s+(\w+)/g,
};
function parseFile(file) {
const txt = fs.readFileSync(file, 'utf8');
const ext = path.extname(file).toLowerCase();
const imports = [];
const exports = new Set();
const keep = new Set();
// Handle HTML files
if (ext === '.html' || ext === '.htm') {
// Extract script src references
const scriptSrcRE = /<script[^>]+src\s*=\s*['"`]([^'"`]+)['"`][^>]*>/gi;
let m;
while ((m = scriptSrcRE.exec(txt))) {
const src = m[1];
if (src.startsWith('./') || src.startsWith('../') || (!src.startsWith('http') && !src.startsWith('//'))) {
imports.push(src);
}
}
// HTML files don't have exports in the traditional sense
return { file, txt, imports, exports: [], keep: [] };
}
// Handle JS files - comprehensive import detection
for (const rx of [RE.require, RE.importFrom, RE.importOnly, RE.requireAnywhere, RE.requireWithDestructure, RE.dynamicImport]) {
let m; while ((m = rx.exec(txt))) imports.push(m[1]);
}
// Template literal imports (basic detection)
const templateImportRE = /(?:require|import)\s*\(\s*`([^`]+)`/g;
let m;
while ((m = templateImportRE.exec(txt))) {
// Only add if it looks like a static path (no ${} interpolation)
if (!m[1].includes('${')) {
imports.push(m[1]);
}
}
// Special case: require inside function calls (common pattern)
const conditionalRequireRE = /(?:if|when|case|function|=>|\{)\s*[^}]*require\s*\(\s*['"`]([^'"`]+)['"`]\s*\)/g;
while ((m = conditionalRequireRE.exec(txt))) {
imports.push(m[1]);
}
// Static file references in Express/servers
// res.sendFile(path.join(__dirname, 'file.html'))
const sendFileRE = /(?:sendFile|readFile|createReadStream)\s*\(\s*(?:path\.join\s*\([^)]*?,\s*['"`]([^'"`]+)['"`]|['"`]([^'"`]+)['"`])/g;
while ((m = sendFileRE.exec(txt))) {
const filePath = m[1] || m[2];
if (filePath && (filePath.endsWith('.html') || filePath.endsWith('.css') || filePath.endsWith('.js') || filePath.endsWith('.json'))) {
// Convert relative path to current directory structure
imports.push('./' + filePath.replace(/^\.\//, ''));
}
}
// File references in strings (href, src, etc.)
const fileRefRE = /(?:href|src|action|data-src|url)\s*=\s*['"`]\.?\/([^'"`]+\.(?:html|css|js|json|png|jpg|svg))['"`]/g;
while ((m = fileRefRE.exec(txt))) {
imports.push('./' + m[1]);
}
// Path.join patterns with file extensions
const pathJoinRE = /path\.join\s*\([^)]*?,\s*['"`]([^'"`]*\.(?:html|css|js|json))['"`]/g;
while ((m = pathJoinRE.exec(txt))) {
imports.push('./' + m[1].replace(/^\.\//, ''));
}
// CJS object export
while ((m = RE.moduleExportsObj.exec(txt))) {
const inside = m[1];
inside.split(',').forEach(p => {
const n = (p.split(':')[0] || '').trim().replace(/\s+/g, '');
if (n) exports.add(n);
});
}
while ((m = RE.moduleExportsAssign.exec(txt))) exports.add(m[1]);
while ((m = RE.exportsAssign.exec(txt))) exports.add(m[1]);
// ESM named
while ((m = RE.exportNamed.exec(txt))) exports.add(m[1]);
while ((m = RE.exportList.exec(txt))) {
m[1].split(',').forEach(s => {
const n = s.split(' as ')[0].trim();
if (n) exports.add(n);
});
}
while ((m = RE.keep.exec(txt))) keep.add(m[1]);
return { file, txt, imports, exports: [...exports], keep: [...keep] };
}
const parsed = files.map(parseFile);
// Build edges
const edges = new Map(); // from -> Set(to)
const importMap = new Map(); // from -> [{raw, resolved}]
for (const p of parsed) {
const tos = new Set();
const details = [];
for (const spec of p.imports) {
const resolved = resolveImport(p.file, spec);
if (resolved) tos.add(resolved);
details.push({ raw: spec, resolved });
}
edges.set(p.file, tos);
importMap.set(p.file, details);
}
// Reachability from entrypoints
const reachable = new Set();
function dfs(start) {
const stack = [start];
while (stack.length) {
const f = stack.pop();
if (!byNorm.has(path.normalize(f))) continue;
if (reachable.has(f)) continue;
reachable.add(f);
const tos = edges.get(f) || new Set();
tos.forEach(t => stack.push(t));
}
}
ENTRYPOINTS.forEach(dfs);
// Usage of exports (very simple heuristic)
const exportIndex = new Map(); // file -> Set(exportName)
parsed.forEach(p => exportIndex.set(p.file, new Set(p.exports)));
const used = new Map(); // file -> Set(usedName)
for (const p of parsed) {
const txt = p.txt;
// destructured usage
let m;
while ((m = RE.destructured.exec(txt))) {
const names = m[1].split(',').map(s => s.split(':')[0].trim());
names.forEach(n => {
if (!n) return;
// mark globally; we can't know which file exactly (static pass), will map later by imports
});
}
// dot usage; we can't resolve to a file reliably; will do name-only aggregation
}
function collectImportedNames(file) {
// Return map: targetFile -> Set(names)
const res = new Map();
const txt = fs.readFileSync(file, 'utf8');
// import {a,b} from './x'
const importNamed = /import\s*{([^}]+)}\s*from\s*['"`]([^'"`]+)['"`]/g;
let m;
while ((m = importNamed.exec(txt))) {
const names = m[1].split(',').map(s => s.split(' as ')[0].trim()).filter(Boolean);
const target = resolveImport(file, m[2]);
if (target) {
if (!res.has(target)) res.set(target, new Set());
names.forEach(n => res.get(target).add(n));
}
}
// const {a,b} = require('./x')
const reqNamed = /(?:const|let|var)\s*{([^}]+)}\s*=\s*require\(\s*['"`]([^'"`]+)['"`]\s*\)/g;
while ((m = reqNamed.exec(txt))) {
const names = m[1].split(',').map(s => s.split(':')[0].trim()).filter(Boolean);
const target = resolveImport(file, m[2]);
if (target) {
if (!res.has(target)) res.set(target, new Set());
names.forEach(n => res.get(target).add(n));
}
}
// require('./x').foo
const reqDot = /require\(\s*['"`]([^'"`]+)['"`]\s*\)\.(\w+)/g;
while ((m = reqDot.exec(txt))) {
const target = resolveImport(file, m[1]);
const name = m[2];
if (target) {
if (!res.has(target)) res.set(target, new Set());
res.get(target).add(name);
}
}
// (await import('./x')).foo OR import('./x').then(m=>m.foo)
const impDot = /import\(\s*['"`]([^'"`]+)['"`]\s*\)\.(\w+)/g;
while ((m = impDot.exec(txt))) {
const target = resolveImport(file, m[1]);
const name = m[2];
if (target) {
if (!res.has(target)) res.set(target, new Set());
res.get(target).add(name);
}
}
return res;
}
const usageByTarget = new Map(); // targetFile -> Set(namesUsed)
for (const p of parsed) {
const m = collectImportedNames(p.file);
for (const [target, names] of m) {
if (!usageByTarget.has(target)) usageByTarget.set(target, new Set());
names.forEach(n => usageByTarget.get(target).add(n));
}
}
function rel(p) { return path.relative(ROOT, p).replace(/\\/g, '/'); }
const report = {
entrypoints: ENTRYPOINTS.filter(fs.existsSync).map(rel),
unreachableFiles: [],
brokenImports: [],
unusedExports: [],
};
function isExcluded(filePath) {
const relPath = rel(filePath);
return EXCLUSION_PATTERNS.some(pattern => pattern.test(relPath));
}
for (const f of files) {
if (!reachable.has(f) && !ENTRYPOINTS.includes(f) && !isExcluded(f)) {
report.unreachableFiles.push(rel(f));
}
}
for (const [from, list] of importMap) {
for (const { raw, resolved } of list) {
if ((raw.startsWith('.') || raw.startsWith('/')) && !resolved) {
report.brokenImports.push({ from: rel(from), spec: raw });
}
}
}
for (const p of parsed) {
const exp = exportIndex.get(p.file);
if (!exp || exp.size === 0) continue;
const usedSet = usageByTarget.get(p.file) || new Set();
const keepMarks = new Set(p.keep || []);
const unused = [...exp].filter(n => !usedSet.has(n) && !keepMarks.has(n));
if (unused.length > 0) {
report.unusedExports.push({ file: rel(p.file), unused });
}
}
console.log('=== ENHANCED UNUSED AUDIT REPORT ===');
console.log('');
console.log('Entrypoints:', report.entrypoints);
console.log('');
// Show excluded patterns
console.log('— Exclusion patterns (files automatically kept):');
EXCLUSION_PATTERNS.forEach(pattern => console.log(`${pattern.source}`));
console.log('');
console.log('— Unreachable files (dead):');
if (report.unreachableFiles.length === 0) console.log(' ✔ none');
else report.unreachableFiles.sort().forEach(f => console.log(' •', f));
console.log('');
console.log('— Broken relative imports:');
if (report.brokenImports.length === 0) console.log(' ✔ none');
else report.brokenImports.forEach(i => console.log(`${i.from} -> ${i.spec}`));
console.log('');
console.log('— Unused exports:');
if (report.unusedExports.length === 0) console.log(' ✔ none');
else report.unusedExports
.sort((a,b)=>a.file.localeCompare(b.file))
.forEach(r => console.log(`${r.file}: ${r.unused.join(', ')}`));
console.log('');
console.log('Tips:');
console.log(' • Add "@keep:export Name" comment to protect exports from false positives');
console.log(' • Test/tool files are automatically excluded from "unreachable" reports');
console.log(' • HTML files are now supported (script src detection)');