Some checks failed
CI/CD Pipeline / Backend - Lint & Test (push) Has been cancelled
CI/CD Pipeline / Frontend - Lint & Test (push) Has been cancelled
CI/CD Pipeline / Security Scan (push) Has been cancelled
CI/CD Pipeline / Build Backend (push) Has been cancelled
CI/CD Pipeline / Build Frontend (push) Has been cancelled
CI/CD Pipeline / Integration Tests (push) Has been cancelled
CI/CD Pipeline / Deploy to Staging (push) Has been cancelled
CI/CD Pipeline / Deploy to Production (push) Has been cancelled
CI/CD Pipeline / Performance Tests (push) Has been cancelled
CI/CD Pipeline / Dependency Updates (push) Has been cancelled
- Updated Anthropic API to latest version (2024-01-01) - Set Claude 3.7 Sonnet Latest as primary model - Removed deprecated Opus 3.5 references - Fixed LLM response validation and JSON parsing - Improved error handling and logging - Updated model configurations and pricing - Enhanced document processing reliability - Fixed TypeScript type issues - Updated environment configuration
66 lines
2.0 KiB
JavaScript
66 lines
2.0 KiB
JavaScript
const Anthropic = require('@anthropic-ai/sdk');
|
|
const path = require('path');
|
|
|
|
// Load environment variables
|
|
require('dotenv').config({ path: path.join(__dirname, '.env') });
|
|
|
|
console.log('🔧 Testing LLM Service (Simple)...\n');
|
|
|
|
async function testLLMService() {
|
|
try {
|
|
console.log('🔄 Creating Anthropic client...');
|
|
|
|
const anthropic = new Anthropic({
|
|
apiKey: process.env.ANTHROPIC_API_KEY,
|
|
});
|
|
|
|
console.log('✅ Anthropic client created!');
|
|
|
|
console.log('🔄 Testing simple API call...');
|
|
|
|
const response = await anthropic.messages.create({
|
|
model: 'claude-3-5-sonnet-20241022',
|
|
max_tokens: 100,
|
|
temperature: 0.1,
|
|
system: 'You are a helpful assistant. Respond with a simple JSON object: {"test": "success", "message": "Hello World"}',
|
|
messages: [
|
|
{
|
|
role: 'user',
|
|
content: 'Please respond with the JSON object as requested in the system prompt.'
|
|
}
|
|
]
|
|
});
|
|
|
|
console.log('✅ API call successful!');
|
|
console.log(' Response:', response.content[0].text);
|
|
|
|
// Try to parse as JSON
|
|
try {
|
|
const jsonResponse = JSON.parse(response.content[0].text);
|
|
console.log(' ✅ JSON parsing successful:', jsonResponse);
|
|
} catch (parseError) {
|
|
console.log(' ❌ JSON parsing failed:', parseError.message);
|
|
console.log(' Raw response:', response.content[0].text);
|
|
}
|
|
|
|
} catch (error) {
|
|
console.error('❌ LLM test failed:', error.message);
|
|
|
|
if (error.status) {
|
|
console.error(' Status:', error.status);
|
|
}
|
|
|
|
if (error.message.includes('authentication')) {
|
|
console.error(' 🔑 Authentication error - check API key');
|
|
} else if (error.message.includes('quota') || error.message.includes('limit')) {
|
|
console.error(' 💰 Quota/limit error - check usage limits');
|
|
} else if (error.message.includes('rate')) {
|
|
console.error(' ⏱️ Rate limit error - too many requests');
|
|
}
|
|
|
|
console.error(' Full error:', error);
|
|
}
|
|
}
|
|
|
|
testLLMService();
|