- Add new database migrations for analysis data and job tracking - Implement enhanced document processing service with LLM integration - Add processing progress and queue status components - Create testing guides and utility scripts for CIM processing - Update frontend components for better user experience - Add environment configuration and backup files - Implement job queue service and upload progress tracking
88 lines
3.1 KiB
JavaScript
88 lines
3.1 KiB
JavaScript
const { Pool } = require('pg');
|
|
|
|
const pool = new Pool({
|
|
connectionString: 'postgresql://postgres:password@localhost:5432/cim_processor'
|
|
});
|
|
|
|
async function startStaxProcessing() {
|
|
try {
|
|
console.log('🔍 Finding STAX CIM document...');
|
|
|
|
// Find the STAX CIM document
|
|
const docResult = await pool.query(`
|
|
SELECT id, original_file_name, status, user_id, file_path
|
|
FROM documents
|
|
WHERE original_file_name = 'stax-cim-test.pdf'
|
|
ORDER BY created_at DESC
|
|
LIMIT 1
|
|
`);
|
|
|
|
if (docResult.rows.length === 0) {
|
|
console.log('❌ No STAX CIM document found');
|
|
return;
|
|
}
|
|
|
|
const document = docResult.rows[0];
|
|
console.log(`📄 Found document: ${document.original_file_name} (${document.status})`);
|
|
console.log(`📁 File path: ${document.file_path}`);
|
|
|
|
// Create processing jobs for the document
|
|
console.log('🚀 Creating processing jobs...');
|
|
|
|
// 1. Text extraction job
|
|
const textExtractionJob = await pool.query(`
|
|
INSERT INTO processing_jobs (document_id, type, status, progress, created_at)
|
|
VALUES ($1, 'text_extraction', 'pending', 0, CURRENT_TIMESTAMP)
|
|
RETURNING id
|
|
`, [document.id]);
|
|
|
|
console.log(`✅ Text extraction job created: ${textExtractionJob.rows[0].id}`);
|
|
|
|
// 2. LLM processing job
|
|
const llmProcessingJob = await pool.query(`
|
|
INSERT INTO processing_jobs (document_id, type, status, progress, created_at)
|
|
VALUES ($1, 'llm_processing', 'pending', 0, CURRENT_TIMESTAMP)
|
|
RETURNING id
|
|
`, [document.id]);
|
|
|
|
console.log(`✅ LLM processing job created: ${llmProcessingJob.rows[0].id}`);
|
|
|
|
// 3. PDF generation job
|
|
const pdfGenerationJob = await pool.query(`
|
|
INSERT INTO processing_jobs (document_id, type, status, progress, created_at)
|
|
VALUES ($1, 'pdf_generation', 'pending', 0, CURRENT_TIMESTAMP)
|
|
RETURNING id
|
|
`, [document.id]);
|
|
|
|
console.log(`✅ PDF generation job created: ${pdfGenerationJob.rows[0].id}`);
|
|
|
|
// Update document status to show it's ready for processing
|
|
await pool.query(`
|
|
UPDATE documents
|
|
SET status = 'processing_llm',
|
|
updated_at = CURRENT_TIMESTAMP
|
|
WHERE id = $1
|
|
`, [document.id]);
|
|
|
|
console.log('');
|
|
console.log('🎉 Processing jobs created successfully!');
|
|
console.log('');
|
|
console.log('📊 Next steps:');
|
|
console.log('1. The backend should automatically pick up these jobs');
|
|
console.log('2. Check the backend logs for processing progress');
|
|
console.log('3. The document will be processed with your LLM API keys');
|
|
console.log('4. You can monitor progress in the frontend');
|
|
console.log('');
|
|
console.log('🔍 To monitor:');
|
|
console.log('- Backend logs: Watch the terminal for processing logs');
|
|
console.log('- Frontend: http://localhost:3000 (Documents tab)');
|
|
console.log('- Database: Check processing_jobs table for status updates');
|
|
|
|
} catch (error) {
|
|
console.error('❌ Error starting processing:', error.message);
|
|
} finally {
|
|
await pool.end();
|
|
}
|
|
}
|
|
|
|
startStaxProcessing();
|