Added plan.md with complete architecture for format-agnostic content generation: - Support for Markdown, HTML, Plain Text, JSON formats - New FormatExporter module with neutral data structure - Integration strategy with existing ContentAssembly and ArticleStorage - Bonus features: SEO metadata generation, readability scoring, WordPress Gutenberg format - Implementation roadmap with 4 phases (6h total estimated) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
399 lines
15 KiB
JavaScript
399 lines
15 KiB
JavaScript
// ========================================
|
|
// FICHIER: lib/main.js - CONVERTI POUR NODE.JS
|
|
// RESPONSABILITÉ: COEUR DU WORKFLOW DE GÉNÉRATION
|
|
// ========================================
|
|
|
|
// 🔧 CONFIGURATION ENVIRONNEMENT
|
|
require('dotenv').config({ path: require('path').join(__dirname, '..', '.env') });
|
|
|
|
|
|
// 🔄 IMPORTS NODE.JS (remplace les dépendances Apps Script)
|
|
const { getBrainConfig } = require('./BrainConfig');
|
|
const { extractElements, buildSmartHierarchy } = require('./ElementExtraction');
|
|
const { generateMissingKeywords } = require('./MissingKeywords');
|
|
const { generateWithContext } = require('./ContentGeneration');
|
|
const { injectGeneratedContent, cleanStrongTags } = require('./ContentAssembly');
|
|
const { validateWorkflowIntegrity, logSh } = require('./ErrorReporting');
|
|
const { saveGeneratedArticleOrganic } = require('./ArticleStorage');
|
|
const { tracer } = require('./trace.js');
|
|
const { fetchXMLFromDigitalOcean } = require('./DigitalOceanWorkflow');
|
|
const { spawn } = require('child_process');
|
|
const path = require('path');
|
|
|
|
// Variable pour éviter de relancer Edge plusieurs fois
|
|
let logViewerLaunched = false;
|
|
|
|
/**
|
|
* Lancer le log viewer dans Edge
|
|
*/
|
|
function launchLogViewer() {
|
|
if (logViewerLaunched || process.env.NODE_ENV === 'test') return;
|
|
|
|
try {
|
|
const logViewerPath = path.join(__dirname, '..', 'tools', 'logs-viewer.html');
|
|
const fileUrl = `file:///${logViewerPath.replace(/\\/g, '/')}`;
|
|
|
|
// Détecter l'environnement et adapter la commande
|
|
const isWSL = process.env.WSL_DISTRO_NAME || process.env.WSL_INTEROP;
|
|
const isWindows = process.platform === 'win32';
|
|
|
|
if (isWindows && !isWSL) {
|
|
// Windows natif
|
|
const edgeProcess = spawn('cmd', ['/c', 'start', 'msedge', fileUrl], {
|
|
detached: true,
|
|
stdio: 'ignore'
|
|
});
|
|
edgeProcess.unref();
|
|
} else if (isWSL) {
|
|
// WSL - utiliser cmd.exe via /mnt/c/Windows/System32/
|
|
const edgeProcess = spawn('/mnt/c/Windows/System32/cmd.exe', ['/c', 'start', 'msedge', fileUrl], {
|
|
detached: true,
|
|
stdio: 'ignore'
|
|
});
|
|
edgeProcess.unref();
|
|
} else {
|
|
// Linux/Mac - essayer xdg-open ou open
|
|
const command = process.platform === 'darwin' ? 'open' : 'xdg-open';
|
|
const browserProcess = spawn(command, [fileUrl], {
|
|
detached: true,
|
|
stdio: 'ignore'
|
|
});
|
|
browserProcess.unref();
|
|
}
|
|
|
|
logViewerLaunched = true;
|
|
logSh('🌐 Log viewer lancé', 'INFO');
|
|
} catch (error) {
|
|
logSh(`⚠️ Impossible d'ouvrir le log viewer: ${error.message}`, 'WARNING');
|
|
}
|
|
}
|
|
|
|
/**
|
|
* COEUR DU WORKFLOW - Compatible Make.com ET Digital Ocean ET Node.js
|
|
* @param {object} data - Données du workflow
|
|
* @param {string} data.xmlTemplate - XML template (base64 encodé)
|
|
* @param {object} data.csvData - Données CSV ou rowNumber
|
|
* @param {string} data.source - 'make_com' | 'digital_ocean_autonomous' | 'node_server'
|
|
*/
|
|
async function handleFullWorkflow(data) {
|
|
// Lancer le log viewer au début du workflow
|
|
launchLogViewer();
|
|
|
|
return await tracer.run('Main.handleFullWorkflow()', async () => {
|
|
await tracer.annotate({ source: data.source || 'node_server', mc0: data.csvData?.mc0 || data.rowNumber });
|
|
|
|
// 1. PRÉPARER LES DONNÉES CSV
|
|
const csvData = await tracer.run('Main.prepareCSVData()', async () => {
|
|
const result = await prepareCSVData(data);
|
|
await tracer.event(`CSV préparé: ${result.mc0}`, { csvKeys: Object.keys(result) });
|
|
return result;
|
|
}, { rowNumber: data.rowNumber, source: data.source });
|
|
|
|
// 2. DÉCODER LE XML TEMPLATE
|
|
const xmlString = await tracer.run('Main.decodeXMLTemplate()', async () => {
|
|
const result = decodeXMLTemplate(data.xmlTemplate);
|
|
await tracer.event(`XML décodé: ${result.length} caractères`);
|
|
return result;
|
|
}, { templateLength: data.xmlTemplate?.length });
|
|
|
|
// 3. PREPROCESSING XML
|
|
const processedXML = await tracer.run('Main.preprocessXML()', async () => {
|
|
const result = preprocessXML(xmlString);
|
|
await tracer.event('XML préprocessé');
|
|
global.currentXmlTemplate = result;
|
|
return result;
|
|
}, { originalLength: xmlString?.length });
|
|
|
|
// 4. EXTRAIRE ÉLÉMENTS
|
|
const elements = await tracer.run('ElementExtraction.extractElements()', async () => {
|
|
const result = await extractElements(processedXML, csvData);
|
|
await tracer.event(`${result.length} éléments extraits`);
|
|
return result;
|
|
}, { xmlLength: processedXML?.length, mc0: csvData.mc0 });
|
|
|
|
// 5. GÉNÉRER MOTS-CLÉS MANQUANTS
|
|
const finalElements = await tracer.run('MissingKeywords.generateMissingKeywords()', async () => {
|
|
const updatedElements = await generateMissingKeywords(elements, csvData);
|
|
const result = Object.keys(updatedElements).length > 0 ? updatedElements : elements;
|
|
await tracer.event('Mots-clés manquants traités');
|
|
return result;
|
|
}, { elementsCount: elements.length, mc0: csvData.mc0 });
|
|
|
|
// 6. CONSTRUIRE HIÉRARCHIE INTELLIGENTE
|
|
const hierarchy = await tracer.run('ElementExtraction.buildSmartHierarchy()', async () => {
|
|
const result = await buildSmartHierarchy(finalElements);
|
|
await tracer.event(`Hiérarchie construite: ${Object.keys(result).length} sections`);
|
|
return result;
|
|
}, { finalElementsCount: finalElements.length });
|
|
|
|
// 7. 🎯 GÉNÉRATION AVEC SELECTIVE ENHANCEMENT (Phase 2)
|
|
const generatedContent = await tracer.run('ContentGeneration.generateWithContext()', async () => {
|
|
const result = await generateWithContext(hierarchy, csvData);
|
|
await tracer.event(`Contenu généré: ${Object.keys(result).length} éléments`);
|
|
return result;
|
|
}, { elementsCount: Object.keys(hierarchy).length, personality: csvData.personality?.nom });
|
|
|
|
// 8. ASSEMBLER XML FINAL
|
|
const finalXML = await tracer.run('ContentAssembly.injectGeneratedContent()', async () => {
|
|
const result = injectGeneratedContent(processedXML, generatedContent, finalElements);
|
|
await tracer.event('XML final assemblé');
|
|
return result;
|
|
}, { contentPieces: Object.keys(generatedContent).length, elementsCount: finalElements.length });
|
|
|
|
// 9. VALIDATION INTÉGRITÉ
|
|
const validationReport = await tracer.run('ErrorReporting.validateWorkflowIntegrity()', async () => {
|
|
const result = validateWorkflowIntegrity(finalElements, generatedContent, finalXML, csvData);
|
|
await tracer.event(`Validation: ${result.status}`);
|
|
return result;
|
|
}, { finalXMLLength: finalXML?.length, contentKeys: Object.keys(generatedContent).length });
|
|
|
|
// 10. SAUVEGARDE ARTICLE
|
|
const articleStorage = await tracer.run('Main.saveArticle()', async () => {
|
|
const result = await saveArticle(finalXML, generatedContent, finalElements, csvData, data.source);
|
|
if (result) {
|
|
await tracer.event(`Article sauvé: ID ${result.articleId}`);
|
|
}
|
|
return result;
|
|
}, { source: data.source, mc0: csvData.mc0, elementsCount: finalElements.length });
|
|
|
|
// 11. RÉPONSE FINALE
|
|
const response = await tracer.run('Main.buildWorkflowResponse()', async () => {
|
|
const result = await buildWorkflowResponse(finalXML, generatedContent, finalElements, csvData, validationReport, articleStorage, data.source);
|
|
await tracer.event(`Response keys: ${Object.keys(result).join(', ')}`);
|
|
return result;
|
|
}, { validationStatus: validationReport?.status, articleId: articleStorage?.articleId });
|
|
|
|
return response;
|
|
}, { source: data.source || 'node_server', rowNumber: data.rowNumber, hasXMLTemplate: !!data.xmlTemplate });
|
|
}
|
|
|
|
// ============= PRÉPARATION DONNÉES =============
|
|
|
|
/**
|
|
* Préparer les données CSV selon la source - ASYNC pour Node.js
|
|
* RÉCUPÈRE: Google Sheets (données CSV) + Digital Ocean (XML template)
|
|
*/
|
|
async function prepareCSVData(data) {
|
|
if (data.csvData && data.csvData.mc0) {
|
|
// Données déjà préparées (Digital Ocean ou direct)
|
|
return data.csvData;
|
|
} else if (data.rowNumber) {
|
|
// 1. RÉCUPÉRER DONNÉES CSV depuis Google Sheet (OBLIGATOIRE)
|
|
await logSh(`🧠 Récupération données CSV ligne ${data.rowNumber}...`, 'INFO');
|
|
const config = await getBrainConfig(data.rowNumber);
|
|
if (!config.success) {
|
|
await logSh('❌ ÉCHEC: Impossible de récupérer les données Google Sheets', 'ERROR');
|
|
throw new Error('FATAL: Google Sheets inaccessible - arrêt du workflow');
|
|
}
|
|
|
|
// 2. VÉRIFIER XML FILENAME depuis Google Sheet (colonne I)
|
|
const xmlFileName = config.data.xmlFileName;
|
|
if (!xmlFileName || xmlFileName.trim() === '') {
|
|
await logSh('❌ ÉCHEC: Nom fichier XML manquant (colonne I Google Sheets)', 'ERROR');
|
|
throw new Error('FATAL: XML filename manquant - arrêt du workflow');
|
|
}
|
|
|
|
await logSh(`📋 CSV récupéré: ${config.data.mc0}`, 'INFO');
|
|
await logSh(`📄 XML filename: ${xmlFileName}`, 'INFO');
|
|
|
|
// 3. RÉCUPÉRER XML CONTENT depuis Digital Ocean avec AUTH (OBLIGATOIRE)
|
|
await logSh(`🌊 Récupération XML template depuis Digital Ocean (avec signature AWS)...`, 'INFO');
|
|
let xmlContent;
|
|
try {
|
|
xmlContent = await fetchXMLFromDigitalOcean(xmlFileName);
|
|
await logSh(`✅ XML récupéré: ${xmlContent.length} caractères`, 'INFO');
|
|
} catch (digitalOceanError) {
|
|
await logSh(`❌ ÉCHEC: Digital Ocean inaccessible - ${digitalOceanError.message}`, 'ERROR');
|
|
throw new Error(`FATAL: Digital Ocean échec - arrêt du workflow: ${digitalOceanError.message}`);
|
|
}
|
|
|
|
// 4. ENCODER XML pour le workflow (comme Make.com)
|
|
// Si on a récupéré un fichier XML, l'utiliser. Sinon utiliser le template par défaut déjà dans config.data.xmlTemplate
|
|
if (xmlContent) {
|
|
data.xmlTemplate = Buffer.from(xmlContent).toString('base64');
|
|
await logSh('🔄 XML depuis Digital Ocean encodé base64 pour le workflow', 'DEBUG');
|
|
} else if (config.data.xmlTemplate) {
|
|
data.xmlTemplate = Buffer.from(config.data.xmlTemplate).toString('base64');
|
|
await logSh('🔄 XML template par défaut encodé base64 pour le workflow', 'DEBUG');
|
|
}
|
|
|
|
return config.data;
|
|
} else {
|
|
throw new Error('FATAL: Données CSV invalides - rowNumber requis');
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Décoder le XML template - NODE.JS VERSION
|
|
*/
|
|
function decodeXMLTemplate(xmlTemplate) {
|
|
if (!xmlTemplate) {
|
|
throw new Error('Template XML manquant');
|
|
}
|
|
|
|
// Si le template commence déjà par <?xml, c'est du texte plain
|
|
if (xmlTemplate.startsWith('<?xml') || xmlTemplate.startsWith('<')) {
|
|
return xmlTemplate;
|
|
}
|
|
|
|
try {
|
|
// 🔄 NODE.JS : Tenter base64 uniquement si ce n'est pas déjà du XML
|
|
const decoded = Buffer.from(xmlTemplate, 'base64').toString('utf8');
|
|
return decoded;
|
|
} catch (error) {
|
|
// Si échec, considérer comme texte plain
|
|
logSh('🔍 XML pas encodé base64, utilisation directe', 'DEBUG'); // Using logSh instead of console.log
|
|
return xmlTemplate;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Preprocessing XML (nettoyage) - IDENTIQUE
|
|
*/
|
|
function preprocessXML(xmlString) {
|
|
let processed = xmlString;
|
|
|
|
// Nettoyer balises <strong>
|
|
processed = cleanStrongTags(processed);
|
|
|
|
// Autres nettoyages futurs...
|
|
|
|
return processed;
|
|
}
|
|
|
|
// ============= SAUVEGARDE =============
|
|
|
|
/**
|
|
* Sauvegarder l'article avec métadonnées source - ASYNC pour Node.js
|
|
*/
|
|
async function saveArticle(finalXML, generatedContent, finalElements, csvData, source) {
|
|
await logSh('💾 Sauvegarde article...', 'INFO');
|
|
|
|
const articleData = {
|
|
xmlContent: finalXML,
|
|
generatedTexts: generatedContent,
|
|
elementsGenerated: finalElements.length,
|
|
originalElements: finalElements
|
|
};
|
|
|
|
const storageConfig = {
|
|
antiDetectionLevel: 'Selective_Enhancement',
|
|
llmUsed: 'claude+openai+gemini+mistral',
|
|
workflowVersion: '2.0-NodeJS', // 🔄 Mise à jour version
|
|
source: source || 'node_server', // 🔄 Source par défaut
|
|
enhancementTechniques: [
|
|
'technical_terms_gpt4',
|
|
'transitions_gemini',
|
|
'personality_style_mistral'
|
|
]
|
|
};
|
|
|
|
try {
|
|
const articleStorage = await saveGeneratedArticleOrganic(articleData, csvData, storageConfig);
|
|
await logSh(`✅ Article sauvé: ID ${articleStorage.articleId}`, 'INFO');
|
|
return articleStorage;
|
|
} catch (storageError) {
|
|
await logSh(`⚠️ Erreur sauvegarde: ${storageError.toString()}`, 'WARNING');
|
|
return null; // Non-bloquant
|
|
}
|
|
}
|
|
|
|
// ============= RÉPONSE =============
|
|
|
|
/**
|
|
* Construire la réponse finale du workflow - ASYNC pour logSh
|
|
*/
|
|
async function buildWorkflowResponse(finalXML, generatedContent, finalElements, csvData, validationReport, articleStorage, source) {
|
|
const response = {
|
|
success: true,
|
|
source: source,
|
|
xmlContent: finalXML,
|
|
generatedTexts: generatedContent,
|
|
elementsGenerated: finalElements.length,
|
|
personality: csvData.personality?.nom || 'Unknown',
|
|
csvData: {
|
|
mc0: csvData.mc0,
|
|
t0: csvData.t0,
|
|
personality: csvData.personality?.nom
|
|
},
|
|
timestamp: new Date().toISOString(),
|
|
validationReport: validationReport,
|
|
articleStorage: articleStorage,
|
|
|
|
// NOUVELLES MÉTADONNÉES PHASE 2
|
|
antiDetectionLevel: 'Selective_Enhancement',
|
|
llmsUsed: ['claude', 'openai', 'gemini', 'mistral'],
|
|
enhancementApplied: true,
|
|
workflowVersion: '2.0-NodeJS', // 🔄 Version mise à jour
|
|
|
|
// STATS PERFORMANCE
|
|
stats: {
|
|
xmlLength: finalXML.length,
|
|
contentPieces: Object.keys(generatedContent).length,
|
|
wordCount: calculateTotalWordCount(generatedContent),
|
|
validationStatus: validationReport.status
|
|
}
|
|
};
|
|
|
|
await logSh(`🔍 Response.stats: ${JSON.stringify(response.stats)}`, 'DEBUG');
|
|
|
|
return response;
|
|
}
|
|
|
|
// ============= HELPERS =============
|
|
|
|
/**
|
|
* Calculer nombre total de mots - IDENTIQUE
|
|
*/
|
|
function calculateTotalWordCount(generatedContent) {
|
|
let totalWords = 0;
|
|
Object.values(generatedContent).forEach(content => {
|
|
if (content && typeof content === 'string') {
|
|
totalWords += content.trim().split(/\s+/).length;
|
|
}
|
|
});
|
|
return totalWords;
|
|
}
|
|
|
|
// ============= POINTS D'ENTRÉE SUPPLÉMENTAIRES =============
|
|
|
|
/**
|
|
* Test du workflow principal - ASYNC pour Node.js
|
|
*/
|
|
async function testMainWorkflow() {
|
|
try {
|
|
const testData = {
|
|
csvData: {
|
|
mc0: 'plaque test nodejs',
|
|
t0: 'Test workflow principal Node.js',
|
|
personality: { nom: 'Marc', style: 'professionnel' },
|
|
tMinus1: 'parent test',
|
|
mcPlus1: 'mot1,mot2,mot3,mot4',
|
|
tPlus1: 'Titre1,Titre2,Titre3,Titre4'
|
|
},
|
|
xmlTemplate: Buffer.from('<?xml version="1.0"?><test>|Test_Element{{T0}}|</test>').toString('base64'),
|
|
source: 'test_main_nodejs'
|
|
};
|
|
|
|
const result = await handleFullWorkflow(testData);
|
|
return result;
|
|
|
|
} catch (error) {
|
|
throw error;
|
|
} finally {
|
|
tracer.printSummary();
|
|
}
|
|
}
|
|
|
|
// 🔄 NODE.JS EXPORTS
|
|
module.exports = {
|
|
handleFullWorkflow,
|
|
testMainWorkflow,
|
|
prepareCSVData,
|
|
decodeXMLTemplate,
|
|
preprocessXML,
|
|
saveArticle,
|
|
buildWorkflowResponse,
|
|
calculateTotalWordCount,
|
|
launchLogViewer
|
|
}; |