Clean up temporary files and logs - Remove test PDF files, log files, and temporary scripts - Keep important documentation and configuration files - Clean up root directory test files and logs - Maintain project structure integrity
This commit is contained in:
@@ -1,97 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setup test data for agentic RAG database integration tests
|
|
||||||
* Creates test users and documents with proper UUIDs
|
|
||||||
*/
|
|
||||||
|
|
||||||
const { v4: uuidv4 } = require('uuid');
|
|
||||||
const db = require('./dist/config/database').default;
|
|
||||||
const bcrypt = require('bcrypt');
|
|
||||||
|
|
||||||
async function setupTestData() {
|
|
||||||
console.log('🔧 Setting up test data for agentic RAG database integration...\n');
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Create test user
|
|
||||||
console.log('1. Creating test user...');
|
|
||||||
const testUserId = uuidv4();
|
|
||||||
const hashedPassword = await bcrypt.hash('testpassword123', 12);
|
|
||||||
|
|
||||||
await db.query(`
|
|
||||||
INSERT INTO users (id, email, password_hash, name, role, created_at, updated_at)
|
|
||||||
VALUES ($1, $2, $3, $4, $5, NOW(), NOW())
|
|
||||||
ON CONFLICT (email) DO NOTHING
|
|
||||||
`, [testUserId, 'test@agentic-rag.com', hashedPassword, 'Test User', 'admin']);
|
|
||||||
|
|
||||||
// Create test document
|
|
||||||
console.log('2. Creating test document...');
|
|
||||||
const testDocumentId = uuidv4();
|
|
||||||
|
|
||||||
await db.query(`
|
|
||||||
INSERT INTO documents (id, user_id, original_file_name, file_path, file_size, status, extracted_text, created_at, updated_at)
|
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW())
|
|
||||||
`, [
|
|
||||||
testDocumentId,
|
|
||||||
testUserId,
|
|
||||||
'test-cim-document.pdf',
|
|
||||||
'/uploads/test-cim-document.pdf',
|
|
||||||
1024000,
|
|
||||||
'completed',
|
|
||||||
'This is a test CIM document for agentic RAG testing.'
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Create test document for full flow
|
|
||||||
console.log('3. Creating test document for full flow...');
|
|
||||||
const testDocumentId2 = uuidv4();
|
|
||||||
|
|
||||||
await db.query(`
|
|
||||||
INSERT INTO documents (id, user_id, original_file_name, file_path, file_size, status, extracted_text, created_at, updated_at)
|
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW())
|
|
||||||
`, [
|
|
||||||
testDocumentId2,
|
|
||||||
testUserId,
|
|
||||||
'test-cim-document-full.pdf',
|
|
||||||
'/uploads/test-cim-document-full.pdf',
|
|
||||||
2048000,
|
|
||||||
'completed',
|
|
||||||
'This is a comprehensive test CIM document for full agentic RAG flow testing.'
|
|
||||||
]);
|
|
||||||
|
|
||||||
console.log('✅ Test data setup completed successfully!');
|
|
||||||
console.log('\n📋 Test Data Summary:');
|
|
||||||
console.log(` Test User ID: ${testUserId}`);
|
|
||||||
console.log(` Test Document ID: ${testDocumentId}`);
|
|
||||||
console.log(` Test Document ID (Full Flow): ${testDocumentId2}`);
|
|
||||||
console.log(` Test User Email: test@agentic-rag.com`);
|
|
||||||
console.log(` Test User Password: testpassword123`);
|
|
||||||
|
|
||||||
// Export the IDs for use in tests
|
|
||||||
module.exports = {
|
|
||||||
testUserId,
|
|
||||||
testDocumentId,
|
|
||||||
testDocumentId2
|
|
||||||
};
|
|
||||||
|
|
||||||
return { testUserId, testDocumentId, testDocumentId2 };
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Failed to setup test data:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run setup if called directly
|
|
||||||
if (require.main === module) {
|
|
||||||
setupTestData()
|
|
||||||
.then(() => {
|
|
||||||
console.log('\n✨ Test data setup completed!');
|
|
||||||
process.exit(0);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
console.error('❌ Test data setup failed:', error);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { setupTestData };
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
const axios = require('axios');
|
|
||||||
|
|
||||||
async function checkStaxStatus() {
|
|
||||||
try {
|
|
||||||
console.log('🔍 Checking STAX document processing status...');
|
|
||||||
|
|
||||||
// First login to get a token
|
|
||||||
const loginResponse = await axios.post('http://localhost:5000/api/auth/login', {
|
|
||||||
email: 'test@stax-processing.com',
|
|
||||||
password: 'TestPass123!'
|
|
||||||
});
|
|
||||||
|
|
||||||
const accessToken = loginResponse.data.data.tokens.accessToken;
|
|
||||||
console.log('✅ Authenticated successfully');
|
|
||||||
|
|
||||||
// Check document status
|
|
||||||
const documentId = '73fe2304-be3e-4195-871e-98d860e768a4';
|
|
||||||
const docResponse = await axios.get(`http://localhost:5000/api/documents/${documentId}`, {
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${accessToken}`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('📄 Document Status:');
|
|
||||||
console.log(JSON.stringify(docResponse.data, null, 2));
|
|
||||||
|
|
||||||
// Check if there are any processing jobs
|
|
||||||
const jobsResponse = await axios.get(`http://localhost:5000/api/documents/${documentId}/jobs`, {
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${accessToken}`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('\n🔄 Processing Jobs:');
|
|
||||||
console.log(JSON.stringify(jobsResponse.data, null, 2));
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Error:', error.response?.data || error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
checkStaxStatus();
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
# Confidential Information Memorandum
|
|
||||||
## TechStart Solutions Inc.
|
|
||||||
|
|
||||||
### Executive Summary
|
|
||||||
TechStart Solutions Inc. is a rapidly growing SaaS company specializing in AI-powered business intelligence tools. The company has achieved 300% year-over-year growth and is seeking $15M in Series B funding to expand its product portfolio and enter new markets.
|
|
||||||
|
|
||||||
### Company Overview
|
|
||||||
- **Founded**: 2020
|
|
||||||
- **Headquarters**: San Francisco, CA
|
|
||||||
- **Employees**: 85 (45 engineers, 25 sales, 15 operations)
|
|
||||||
- **Revenue**: $8.2M (2023), $2.1M (2022), $500K (2021)
|
|
||||||
- **Customers**: 1,200+ enterprise clients
|
|
||||||
- **Market Cap**: $45M (pre-money valuation)
|
|
||||||
|
|
||||||
### Business Model
|
|
||||||
- **Primary Revenue**: SaaS subscriptions (85% of revenue)
|
|
||||||
- **Secondary Revenue**: Professional services (10%), API licensing (5%)
|
|
||||||
- **Average Contract Value**: $45,000 annually
|
|
||||||
- **Customer Retention Rate**: 94%
|
|
||||||
- **Gross Margin**: 78%
|
|
||||||
|
|
||||||
### Market Opportunity
|
|
||||||
- **Total Addressable Market**: $45B
|
|
||||||
- **Serviceable Addressable Market**: $2.8B
|
|
||||||
- **Target Market**: Mid-market enterprises (500-5,000 employees)
|
|
||||||
- **Competitive Landscape**: 15 major competitors, 3 direct competitors
|
|
||||||
|
|
||||||
### Financial Highlights
|
|
||||||
**Revenue Growth**:
|
|
||||||
- 2021: $500K
|
|
||||||
- 2022: $2.1M (320% growth)
|
|
||||||
- 2023: $8.2M (290% growth)
|
|
||||||
- 2024 (projected): $18M (120% growth)
|
|
||||||
|
|
||||||
**Key Metrics**:
|
|
||||||
- Monthly Recurring Revenue: $683K
|
|
||||||
- Annual Recurring Revenue: $8.2M
|
|
||||||
- Customer Acquisition Cost: $12,000
|
|
||||||
- Lifetime Value: $180,000
|
|
||||||
- Payback Period: 8 months
|
|
||||||
|
|
||||||
### Use of Funds
|
|
||||||
- **Product Development**: $8M (53%)
|
|
||||||
- **Sales & Marketing**: $4M (27%)
|
|
||||||
- **Operations**: $2M (13%)
|
|
||||||
- **Working Capital**: $1M (7%)
|
|
||||||
|
|
||||||
### Management Team
|
|
||||||
- **CEO**: Sarah Johnson (ex-Google, 15 years experience)
|
|
||||||
- **CTO**: Michael Chen (ex-Microsoft, PhD Computer Science)
|
|
||||||
- **CFO**: David Rodriguez (ex-Salesforce, CPA)
|
|
||||||
- **VP Sales**: Lisa Thompson (ex-Oracle, 12 years experience)
|
|
||||||
|
|
||||||
### Risk Factors
|
|
||||||
- Dependency on key personnel
|
|
||||||
- Competition from larger tech companies
|
|
||||||
- Economic downturn impact on SaaS spending
|
|
||||||
- Regulatory changes in data privacy
|
|
||||||
- Technology obsolescence
|
|
||||||
|
|
||||||
### Investment Terms
|
|
||||||
- **Round**: Series B
|
|
||||||
- **Amount**: $15M
|
|
||||||
- **Valuation**: $45M pre-money, $60M post-money
|
|
||||||
- **Structure**: Preferred equity
|
|
||||||
- **Board Seats**: 2 seats for investors
|
|
||||||
- **Exit Strategy**: IPO in 3-5 years or strategic acquisition
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
const FormData = require('form-data');
|
|
||||||
const fs = require('fs');
|
|
||||||
const axios = require('axios');
|
|
||||||
|
|
||||||
async function testEnhancedPipeline() {
|
|
||||||
try {
|
|
||||||
console.log('🚀 Testing Enhanced Agentic RAG Pipeline...');
|
|
||||||
|
|
||||||
// Login
|
|
||||||
const loginResponse = await axios.post('http://localhost:5000/api/auth/login', {
|
|
||||||
email: 'user1@example.com',
|
|
||||||
password: 'user123'
|
|
||||||
});
|
|
||||||
|
|
||||||
const token = loginResponse.data.data.tokens.accessToken;
|
|
||||||
console.log('✅ Authenticated successfully');
|
|
||||||
|
|
||||||
// Upload the same document again to trigger the new enhanced pipeline
|
|
||||||
const staxFilePath = '/home/jonathan/Coding/cim_summary/stax-cim-test.pdf';
|
|
||||||
const form = new FormData();
|
|
||||||
form.append('document', fs.createReadStream(staxFilePath));
|
|
||||||
|
|
||||||
console.log('📄 Uploading document for enhanced agentic RAG processing...');
|
|
||||||
const uploadResponse = await axios.post('http://localhost:5000/api/documents', form, {
|
|
||||||
headers: {
|
|
||||||
...form.getHeaders(),
|
|
||||||
'Authorization': `Bearer ${token}`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!uploadResponse.data.success) {
|
|
||||||
console.error('❌ Upload failed:', uploadResponse.data);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const documentId = uploadResponse.data.data.document.id;
|
|
||||||
console.log('✅ Document uploaded! ID:', documentId);
|
|
||||||
console.log('🧠 Enhanced agentic RAG with vectorization should now be processing...');
|
|
||||||
|
|
||||||
// Monitor for the new logs indicating enhanced processing
|
|
||||||
console.log('⏳ Monitoring for enhanced processing logs...');
|
|
||||||
let attempts = 0;
|
|
||||||
const maxAttempts = 10;
|
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
|
||||||
await new Promise(resolve => setTimeout(resolve, 3000));
|
|
||||||
attempts++;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const progressResponse = await axios.get(
|
|
||||||
`http://localhost:5000/api/documents/${documentId}/progress`,
|
|
||||||
{ headers: { 'Authorization': `Bearer ${token}` } }
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(`📊 Attempt ${attempts}: ${progressResponse.data.progress}% - ${progressResponse.data.step}`);
|
|
||||||
|
|
||||||
if (progressResponse.data.status === 'completed') {
|
|
||||||
console.log('🎉 Enhanced processing completed!');
|
|
||||||
break;
|
|
||||||
} else if (progressResponse.data.status === 'failed') {
|
|
||||||
console.error('❌ Processing failed:', progressResponse.data.error);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`⚠️ Progress check ${attempts}: ${error.response?.status || error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('✅ Enhanced agentic RAG pipeline test completed!');
|
|
||||||
console.log('📋 Check backend logs for vectorization and enhanced search logs.');
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Test failed:', error.message);
|
|
||||||
if (error.response) {
|
|
||||||
console.error('Response:', error.response.data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
testEnhancedPipeline();
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
const axios = require('axios');
|
|
||||||
|
|
||||||
async function testLLMProcessing() {
|
|
||||||
try {
|
|
||||||
console.log('🚀 Testing LLM Processing for STAX CIM...');
|
|
||||||
|
|
||||||
// First, authenticate to get a valid token
|
|
||||||
const loginResponse = await axios.post('http://localhost:5000/api/auth/login', {
|
|
||||||
email: 'test@stax-processing.com',
|
|
||||||
password: 'TestPass123!'
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('✅ Authentication successful');
|
|
||||||
console.log('Login response structure:', Object.keys(loginResponse.data));
|
|
||||||
|
|
||||||
const token = loginResponse.data.data?.tokens?.accessToken;
|
|
||||||
console.log('Token:', token ? 'Received' : 'Not received');
|
|
||||||
|
|
||||||
if (!token) {
|
|
||||||
console.error('No token received from login');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Document ID that's already in the system
|
|
||||||
const documentId = '0876b7f4-0899-4eb0-b2c6-434ec4e7a46d';
|
|
||||||
|
|
||||||
// Trigger LLM processing
|
|
||||||
const response = await axios.post(`http://localhost:5000/api/documents/${documentId}/process`, {
|
|
||||||
processingType: 'llm',
|
|
||||||
template: 'BPCP CIM Review Template'
|
|
||||||
}, {
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': `Bearer ${token}`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('✅ LLM Processing triggered successfully');
|
|
||||||
console.log('Response:', response.data);
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Error:', error.response?.data || error.message);
|
|
||||||
if (error.response?.data) {
|
|
||||||
console.error('Full error response:', JSON.stringify(error.response.data, null, 2));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
testLLMProcessing();
|
|
||||||
@@ -1,91 +0,0 @@
|
|||||||
const axios = require('axios');
|
|
||||||
const FormData = require('form-data');
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
async function testOptimizedStax() {
|
|
||||||
try {
|
|
||||||
console.log('🚀 Testing Optimized Agentic RAG Processing for STAX CIM...');
|
|
||||||
|
|
||||||
// First login to get a token
|
|
||||||
const loginResponse = await axios.post('http://localhost:5000/api/auth/login', {
|
|
||||||
email: 'test@stax-processing.com',
|
|
||||||
password: 'TestPass123!'
|
|
||||||
});
|
|
||||||
|
|
||||||
const accessToken = loginResponse.data.data.tokens.accessToken;
|
|
||||||
console.log('✅ Authenticated successfully');
|
|
||||||
|
|
||||||
// Upload STAX document with optimized agentic RAG processing
|
|
||||||
const form = new FormData();
|
|
||||||
const filePath = path.join(__dirname, 'stax-cim-test.pdf');
|
|
||||||
form.append('document', fs.createReadStream(filePath));
|
|
||||||
form.append('processImmediately', 'true');
|
|
||||||
form.append('processingStrategy', 'optimized_agentic_rag'); // Use optimized strategy
|
|
||||||
|
|
||||||
console.log('📤 Uploading STAX document with optimized agentic RAG processing...');
|
|
||||||
|
|
||||||
const uploadResponse = await axios.post('http://localhost:5000/api/documents/upload', form, {
|
|
||||||
headers: {
|
|
||||||
...form.getHeaders(),
|
|
||||||
'Authorization': `Bearer ${accessToken}`
|
|
||||||
},
|
|
||||||
timeout: 300000 // 5 minutes timeout for large document
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('✅ Upload successful!');
|
|
||||||
console.log('📄 Document ID:', uploadResponse.data.id);
|
|
||||||
console.log('🔄 Status:', uploadResponse.data.status);
|
|
||||||
|
|
||||||
// Monitor processing progress
|
|
||||||
console.log('⏳ Monitoring processing progress...');
|
|
||||||
let attempts = 0;
|
|
||||||
const maxAttempts = 60; // 5 minutes with 5-second intervals
|
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
|
||||||
await new Promise(resolve => setTimeout(resolve, 5000)); // Wait 5 seconds
|
|
||||||
attempts++;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const docResponse = await axios.get(`http://localhost:5000/api/documents/${uploadResponse.data.id}`, {
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${accessToken}`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const status = docResponse.data.status;
|
|
||||||
console.log(`📊 Attempt ${attempts}/${maxAttempts}: Status = ${status}`);
|
|
||||||
|
|
||||||
if (status === 'completed') {
|
|
||||||
console.log('🎉 Processing completed successfully!');
|
|
||||||
console.log('📄 Final Document Status:');
|
|
||||||
console.log(JSON.stringify(docResponse.data, null, 2));
|
|
||||||
break;
|
|
||||||
} else if (status === 'failed' || status === 'error') {
|
|
||||||
console.log('❌ Processing failed');
|
|
||||||
console.log('📄 Error Details:');
|
|
||||||
console.log(JSON.stringify(docResponse.data, null, 2));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`⚠️ Error checking status (attempt ${attempts}):`, error.response?.data?.message || error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (attempts >= maxAttempts) {
|
|
||||||
console.log('⏰ Processing timeout - checking final status...');
|
|
||||||
const finalResponse = await axios.get(`http://localhost:5000/api/documents/${uploadResponse.data.id}`, {
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${accessToken}`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
console.log('📄 Final Document Status:');
|
|
||||||
console.log(JSON.stringify(finalResponse.data, null, 2));
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Error:', error.response?.data || error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
testOptimizedStax();
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
const axios = require('axios');
|
|
||||||
const FormData = require('form-data');
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
async function testStaxSimple() {
|
|
||||||
try {
|
|
||||||
console.log('🔍 Testing STAX processing with simple strategy...');
|
|
||||||
|
|
||||||
// First login to get a token
|
|
||||||
const loginResponse = await axios.post('http://localhost:5000/api/auth/login', {
|
|
||||||
email: 'test@stax-processing.com',
|
|
||||||
password: 'TestPass123!'
|
|
||||||
});
|
|
||||||
|
|
||||||
const accessToken = loginResponse.data.data.tokens.accessToken;
|
|
||||||
console.log('✅ Authenticated successfully');
|
|
||||||
|
|
||||||
// Upload STAX document with simple processing strategy
|
|
||||||
const form = new FormData();
|
|
||||||
const filePath = path.join(__dirname, 'stax-cim-test.pdf');
|
|
||||||
form.append('document', fs.createReadStream(filePath));
|
|
||||||
form.append('processImmediately', 'true');
|
|
||||||
form.append('processingStrategy', 'basic'); // Use basic instead of agentic_rag
|
|
||||||
|
|
||||||
console.log('📤 Uploading STAX document with basic processing...');
|
|
||||||
|
|
||||||
const uploadResponse = await axios.post('http://localhost:5000/api/documents/upload', form, {
|
|
||||||
headers: {
|
|
||||||
...form.getHeaders(),
|
|
||||||
'Authorization': `Bearer ${accessToken}`
|
|
||||||
},
|
|
||||||
timeout: 120000 // 2 minutes timeout
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('✅ Upload successful!');
|
|
||||||
console.log('📄 Document ID:', uploadResponse.data.id);
|
|
||||||
console.log('🔄 Status:', uploadResponse.data.status);
|
|
||||||
|
|
||||||
// Wait a bit and check status
|
|
||||||
console.log('⏳ Waiting for processing...');
|
|
||||||
await new Promise(resolve => setTimeout(resolve, 10000)); // Wait 10 seconds
|
|
||||||
|
|
||||||
// Check document status
|
|
||||||
const docResponse = await axios.get(`http://localhost:5000/api/documents/${uploadResponse.data.id}`, {
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${accessToken}`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('📄 Final Document Status:');
|
|
||||||
console.log(JSON.stringify(docResponse.data, null, 2));
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Error:', error.response?.data || error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
testStaxSimple();
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
const FormData = require('form-data');
|
|
||||||
const fs = require('fs');
|
|
||||||
const axios = require('axios');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
async function testStaxUpload() {
|
|
||||||
try {
|
|
||||||
console.log('🚀 Starting Stax CIM agentic RAG test...');
|
|
||||||
|
|
||||||
// Step 1: Login to get token
|
|
||||||
console.log('📝 Logging in...');
|
|
||||||
const loginResponse = await axios.post('http://localhost:5000/api/auth/login', {
|
|
||||||
email: 'user1@example.com',
|
|
||||||
password: 'user123'
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!loginResponse.data.success) {
|
|
||||||
console.error('❌ Login failed:', loginResponse.data.message);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const token = loginResponse.data.data.tokens.accessToken;
|
|
||||||
console.log('✅ Login successful');
|
|
||||||
|
|
||||||
// Step 2: Upload Stax CIM document
|
|
||||||
const staxFilePath = '/home/jonathan/Coding/cim_summary/stax-cim-test.pdf';
|
|
||||||
|
|
||||||
if (!fs.existsSync(staxFilePath)) {
|
|
||||||
console.error('❌ Stax CIM file not found:', staxFilePath);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('📄 Uploading Stax CIM document...');
|
|
||||||
const form = new FormData();
|
|
||||||
form.append('document', fs.createReadStream(staxFilePath));
|
|
||||||
|
|
||||||
const uploadResponse = await axios.post('http://localhost:5000/api/documents', form, {
|
|
||||||
headers: {
|
|
||||||
...form.getHeaders(),
|
|
||||||
'Authorization': `Bearer ${token}`
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!uploadResponse.data.success) {
|
|
||||||
console.error('❌ Upload failed:', uploadResponse.data.message || uploadResponse.data.error);
|
|
||||||
console.error('Full response:', uploadResponse.data);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const documentId = uploadResponse.data.data.document.id;
|
|
||||||
console.log('✅ Upload successful! Document ID:', documentId);
|
|
||||||
console.log('🧠 Processing strategy: agentic_rag with enhanced vectorization');
|
|
||||||
|
|
||||||
// Step 3: Monitor processing progress
|
|
||||||
console.log('⏳ Monitoring processing progress...');
|
|
||||||
let isProcessing = true;
|
|
||||||
let lastProgress = 0;
|
|
||||||
|
|
||||||
while (isProcessing) {
|
|
||||||
await new Promise(resolve => setTimeout(resolve, 3000)); // Wait 3 seconds
|
|
||||||
|
|
||||||
try {
|
|
||||||
const progressResponse = await axios.get(
|
|
||||||
`http://localhost:5000/api/documents/${documentId}/progress`,
|
|
||||||
{
|
|
||||||
headers: { 'Authorization': `Bearer ${token}` }
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const progress = progressResponse.data;
|
|
||||||
if (progress.progress !== lastProgress) {
|
|
||||||
console.log(`📊 Progress: ${progress.progress}% - ${progress.step || 'Processing...'}`);
|
|
||||||
lastProgress = progress.progress;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (progress.status === 'completed') {
|
|
||||||
console.log('🎉 Processing completed successfully!');
|
|
||||||
isProcessing = false;
|
|
||||||
} else if (progress.status === 'failed') {
|
|
||||||
console.error('❌ Processing failed:', progress.error);
|
|
||||||
isProcessing = false;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
if (error.response?.status === 404) {
|
|
||||||
console.log('📄 Document processing completed (progress endpoint not found)');
|
|
||||||
isProcessing = false;
|
|
||||||
} else {
|
|
||||||
console.error('⚠️ Progress check error:', error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 4: Get final document with analysis
|
|
||||||
console.log('📋 Retrieving final analysis...');
|
|
||||||
const docResponse = await axios.get(
|
|
||||||
`http://localhost:5000/api/documents/${documentId}`,
|
|
||||||
{
|
|
||||||
headers: { 'Authorization': `Bearer ${token}` }
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const document = docResponse.data.data;
|
|
||||||
console.log('✅ Document retrieved:');
|
|
||||||
console.log('- Status:', document.status);
|
|
||||||
console.log('- Processing strategy:', document.processing_strategy || 'agentic_rag');
|
|
||||||
console.log('- Summary available:', !!document.generated_summary);
|
|
||||||
console.log('- Analysis data available:', !!document.analysis_data);
|
|
||||||
|
|
||||||
if (document.generated_summary) {
|
|
||||||
console.log('\n📝 Summary preview (first 500 chars):');
|
|
||||||
console.log(document.generated_summary.substring(0, 500) + '...');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (document.analysis_data) {
|
|
||||||
console.log('\n📊 Analysis data structure:');
|
|
||||||
try {
|
|
||||||
const analysis = typeof document.analysis_data === 'string'
|
|
||||||
? JSON.parse(document.analysis_data)
|
|
||||||
: document.analysis_data;
|
|
||||||
console.log('- Company name:', analysis.dealOverview?.targetCompanyName || 'Not found');
|
|
||||||
console.log('- Sectors:', analysis.dealOverview?.sectors || 'Not found');
|
|
||||||
console.log('- Financial data available:', !!analysis.financialPerformance);
|
|
||||||
console.log('- Market analysis available:', !!analysis.marketAnalysis);
|
|
||||||
} catch (e) {
|
|
||||||
console.log('- Raw analysis data length:', document.analysis_data.length, 'characters');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('\n🎯 Test completed successfully!');
|
|
||||||
console.log('The enhanced agentic RAG pipeline with vectorization has been tested.');
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Test failed:', error.message);
|
|
||||||
if (error.response) {
|
|
||||||
console.error('Response:', error.response.data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
testStaxUpload();
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
This is a comprehensive test document for agentic RAG processing. It contains detailed financial information about a technology company including revenue growth of 25% year-over-year, market position as a leader in cloud infrastructure, and significant investment opportunities in AI and machine learning sectors.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Test document for verification
|
|
||||||
Reference in New Issue
Block a user