🔧 Fix authentication and document upload issues

## What was done:
 Fixed Firebase Admin initialization to use default credentials for Firebase Functions
 Updated frontend to use correct Firebase Functions URL (was using Cloud Run URL)
 Added comprehensive debugging to authentication middleware
 Added debugging to file upload middleware and CORS handling
 Added debug buttons to frontend for troubleshooting authentication
 Enhanced error handling and logging throughout the stack

## Current issues:
 Document upload still returns 400 Bad Request despite authentication working
 GET requests work fine (200 OK) but POST upload requests fail
 Frontend authentication is working correctly (valid JWT tokens)
 Backend authentication middleware is working (rejects invalid tokens)
 CORS is configured correctly and allowing requests

## Root cause analysis:
- Authentication is NOT the issue (tokens are valid, GET requests work)
- The problem appears to be in the file upload handling or multer configuration
- Request reaches the server but fails during upload processing
- Need to identify exactly where in the upload pipeline the failure occurs

## TODO next steps:
1. 🔍 Check Firebase Functions logs after next upload attempt to see debugging output
2. 🔍 Verify if request reaches upload middleware (look for '�� Upload middleware called' logs)
3. 🔍 Check if file validation is triggered (look for '🔍 File filter called' logs)
4. 🔍 Identify specific error in upload pipeline (multer, file processing, etc.)
5. 🔍 Test with smaller file or different file type to isolate issue
6. 🔍 Check if issue is with Firebase Functions file size limits or timeout
7. 🔍 Verify multer configuration and file handling in Firebase Functions environment

## Technical details:
- Frontend: https://cim-summarizer.web.app
- Backend: https://us-central1-cim-summarizer.cloudfunctions.net/api
- Authentication: Firebase Auth with JWT tokens (working correctly)
- File upload: Multer with memory storage for immediate GCS upload
- Debug buttons available in production frontend for troubleshooting
This commit is contained in:
Jon
2025-07-31 16:18:53 -04:00
parent aa0931ecd7
commit 6057d1d7fd
79 changed files with 8920 additions and 1786 deletions

View File

@@ -0,0 +1,173 @@
const { createClient } = require('@supabase/supabase-js');
// Supabase configuration from environment
const SUPABASE_URL = 'https://gzoclmbqmgmpuhufbnhy.supabase.co';
const SUPABASE_SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Imd6b2NsbWJxbWdtcHVodWZibmh5Iiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzgxNjY3OCwiZXhwIjoyMDY5MzkyNjc4fQ.f9PUzL1F8JqIkqD_DwrGBIyHPcehMo-97jXD8hee5ss';
const serviceClient = createClient(SUPABASE_URL, SUPABASE_SERVICE_KEY);
async function createTables() {
console.log('Creating Supabase database tables...\n');
try {
// Create users table
console.log('🔄 Creating users table...');
const { error: usersError } = await serviceClient.rpc('exec_sql', {
sql: `
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
firebase_uid VARCHAR(255) UNIQUE NOT NULL,
name VARCHAR(255),
email VARCHAR(255) UNIQUE NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
`
});
if (usersError) {
console.log(`❌ Users table error: ${usersError.message}`);
} else {
console.log('✅ Users table created successfully');
}
// Create documents table
console.log('\n🔄 Creating documents table...');
const { error: docsError } = await serviceClient.rpc('exec_sql', {
sql: `
CREATE TABLE IF NOT EXISTS documents (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id VARCHAR(255) NOT NULL,
original_file_name VARCHAR(255) NOT NULL,
file_path TEXT NOT NULL,
file_size BIGINT NOT NULL,
status VARCHAR(50) DEFAULT 'uploaded',
extracted_text TEXT,
generated_summary TEXT,
error_message TEXT,
analysis_data JSONB,
processing_completed_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
`
});
if (docsError) {
console.log(`❌ Documents table error: ${docsError.message}`);
} else {
console.log('✅ Documents table created successfully');
}
// Create document_versions table
console.log('\n🔄 Creating document_versions table...');
const { error: versionsError } = await serviceClient.rpc('exec_sql', {
sql: `
CREATE TABLE IF NOT EXISTS document_versions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id UUID REFERENCES documents(id) ON DELETE CASCADE,
version_number INTEGER NOT NULL,
file_path TEXT NOT NULL,
processing_strategy VARCHAR(50),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
`
});
if (versionsError) {
console.log(`❌ Document versions table error: ${versionsError.message}`);
} else {
console.log('✅ Document versions table created successfully');
}
// Create document_feedback table
console.log('\n🔄 Creating document_feedback table...');
const { error: feedbackError } = await serviceClient.rpc('exec_sql', {
sql: `
CREATE TABLE IF NOT EXISTS document_feedback (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id UUID REFERENCES documents(id) ON DELETE CASCADE,
user_id VARCHAR(255) NOT NULL,
feedback_type VARCHAR(50) NOT NULL,
feedback_text TEXT,
rating INTEGER CHECK (rating >= 1 AND rating <= 5),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
`
});
if (feedbackError) {
console.log(`❌ Document feedback table error: ${feedbackError.message}`);
} else {
console.log('✅ Document feedback table created successfully');
}
// Create processing_jobs table
console.log('\n🔄 Creating processing_jobs table...');
const { error: jobsError } = await serviceClient.rpc('exec_sql', {
sql: `
CREATE TABLE IF NOT EXISTS processing_jobs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
job_type VARCHAR(50) NOT NULL,
status VARCHAR(50) DEFAULT 'pending',
data JSONB NOT NULL,
priority INTEGER DEFAULT 0,
started_at TIMESTAMP WITH TIME ZONE,
completed_at TIMESTAMP WITH TIME ZONE,
error_message TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
`
});
if (jobsError) {
console.log(`❌ Processing jobs table error: ${jobsError.message}`);
} else {
console.log('✅ Processing jobs table created successfully');
}
// Create indexes
console.log('\n🔄 Creating indexes...');
const indexes = [
'CREATE INDEX IF NOT EXISTS idx_documents_user_id ON documents(user_id);',
'CREATE INDEX IF NOT EXISTS idx_documents_status ON documents(status);',
'CREATE INDEX IF NOT EXISTS idx_processing_jobs_status ON processing_jobs(status);',
'CREATE INDEX IF NOT EXISTS idx_processing_jobs_priority ON processing_jobs(priority);'
];
for (const indexSql of indexes) {
const { error: indexError } = await serviceClient.rpc('exec_sql', { sql: indexSql });
if (indexError) {
console.log(`❌ Index creation error: ${indexError.message}`);
}
}
console.log('✅ Indexes created successfully');
console.log('\n🎉 All tables created successfully!');
// Verify tables exist
console.log('\n🔍 Verifying tables...');
const tables = ['users', 'documents', 'document_versions', 'document_feedback', 'processing_jobs'];
for (const table of tables) {
const { data, error } = await serviceClient
.from(table)
.select('*')
.limit(1);
if (error) {
console.log(`❌ Table ${table} verification failed: ${error.message}`);
} else {
console.log(`✅ Table ${table} verified successfully`);
}
}
} catch (error) {
console.error('❌ Table creation failed:', error.message);
console.error('Error details:', error);
}
}
createTables();

View File

@@ -0,0 +1,127 @@
const { createClient } = require('@supabase/supabase-js');
// Supabase configuration from environment
const SUPABASE_URL = 'https://gzoclmbqmgmpuhufbnhy.supabase.co';
const SUPABASE_SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Imd6b2NsbWJxbWdtcHVodWZibmh5Iiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzgxNjY3OCwiZXhwIjoyMDY5MzkyNjc4fQ.f9PUzL1F8JqIkqD_DwrGBIyHPcehMo-97jXD8hee5ss';
const serviceClient = createClient(SUPABASE_URL, SUPABASE_SERVICE_KEY);
async function createTables() {
console.log('Creating Supabase database tables via SQL...\n');
try {
// Try to create tables using the SQL editor approach
console.log('🔄 Attempting to create tables...');
// Create users table
console.log('Creating users table...');
const { error: usersError } = await serviceClient
.from('users')
.select('*')
.limit(0); // This will fail if table doesn't exist, but we can catch the error
if (usersError && usersError.message.includes('does not exist')) {
console.log('❌ Users table does not exist - need to create via SQL editor');
} else {
console.log('✅ Users table exists');
}
// Create documents table
console.log('Creating documents table...');
const { error: docsError } = await serviceClient
.from('documents')
.select('*')
.limit(0);
if (docsError && docsError.message.includes('does not exist')) {
console.log('❌ Documents table does not exist - need to create via SQL editor');
} else {
console.log('✅ Documents table exists');
}
console.log('\n📋 Tables need to be created via Supabase SQL Editor');
console.log('Please run the following SQL in your Supabase dashboard:');
console.log('\n--- SQL TO RUN IN SUPABASE DASHBOARD ---');
console.log(`
-- Create users table
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
firebase_uid VARCHAR(255) UNIQUE NOT NULL,
name VARCHAR(255),
email VARCHAR(255) UNIQUE NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create documents table
CREATE TABLE IF NOT EXISTS documents (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id VARCHAR(255) NOT NULL,
original_file_name VARCHAR(255) NOT NULL,
file_path TEXT NOT NULL,
file_size BIGINT NOT NULL,
status VARCHAR(50) DEFAULT 'uploaded',
extracted_text TEXT,
generated_summary TEXT,
error_message TEXT,
analysis_data JSONB,
processing_completed_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create document_versions table
CREATE TABLE IF NOT EXISTS document_versions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id UUID REFERENCES documents(id) ON DELETE CASCADE,
version_number INTEGER NOT NULL,
file_path TEXT NOT NULL,
processing_strategy VARCHAR(50),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create document_feedback table
CREATE TABLE IF NOT EXISTS document_feedback (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id UUID REFERENCES documents(id) ON DELETE CASCADE,
user_id VARCHAR(255) NOT NULL,
feedback_type VARCHAR(50) NOT NULL,
feedback_text TEXT,
rating INTEGER CHECK (rating >= 1 AND rating <= 5),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create processing_jobs table
CREATE TABLE IF NOT EXISTS processing_jobs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
job_type VARCHAR(50) NOT NULL,
status VARCHAR(50) DEFAULT 'pending',
data JSONB NOT NULL,
priority INTEGER DEFAULT 0,
started_at TIMESTAMP WITH TIME ZONE,
completed_at TIMESTAMP WITH TIME ZONE,
error_message TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes
CREATE INDEX IF NOT EXISTS idx_documents_user_id ON documents(user_id);
CREATE INDEX IF NOT EXISTS idx_documents_status ON documents(status);
CREATE INDEX IF NOT EXISTS idx_processing_jobs_status ON processing_jobs(status);
CREATE INDEX IF NOT EXISTS idx_processing_jobs_priority ON processing_jobs(priority);
`);
console.log('--- END SQL ---\n');
console.log('📝 Instructions:');
console.log('1. Go to your Supabase dashboard');
console.log('2. Navigate to SQL Editor');
console.log('3. Paste the SQL above and run it');
console.log('4. Come back and test the application');
} catch (error) {
console.error('❌ Error:', error.message);
}
}
createTables();

View File

@@ -0,0 +1,84 @@
const { Pool } = require('pg');
const fs = require('fs');
const path = require('path');
// Database configuration
const poolConfig = process.env.DATABASE_URL
? { connectionString: process.env.DATABASE_URL }
: {
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB_NAME,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
};
const pool = new Pool({
...poolConfig,
max: 1,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 10000,
});
async function runMigrations() {
console.log('Starting database migrations...');
try {
// Test connection first
const client = await pool.connect();
console.log('✅ Database connection successful');
// Create migrations table if it doesn't exist
await client.query(`
CREATE TABLE IF NOT EXISTS migrations (
id VARCHAR(255) PRIMARY KEY,
name VARCHAR(255) NOT NULL,
executed_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
`);
console.log('✅ Migrations table created or already exists');
// Get migration files
const migrationsDir = path.join(__dirname, '../src/models/migrations');
const files = fs.readdirSync(migrationsDir)
.filter(file => file.endsWith('.sql'))
.sort();
console.log(`Found ${files.length} migration files`);
for (const file of files) {
const migrationId = file.replace('.sql', '');
// Check if migration already executed
const { rows } = await client.query('SELECT id FROM migrations WHERE id = $1', [migrationId]);
if (rows.length > 0) {
console.log(`⏭️ Migration ${migrationId} already executed, skipping`);
continue;
}
// Load and execute migration
const filePath = path.join(migrationsDir, file);
const sql = fs.readFileSync(filePath, 'utf-8');
console.log(`🔄 Executing migration: ${migrationId}`);
await client.query(sql);
// Mark as executed
await client.query('INSERT INTO migrations (id, name) VALUES ($1, $2)', [migrationId, file]);
console.log(`✅ Migration ${migrationId} completed`);
}
client.release();
await pool.end();
console.log('🎉 All migrations completed successfully!');
} catch (error) {
console.error('❌ Migration failed:', error.message);
console.error('Error details:', error);
process.exit(1);
}
}
runMigrations();

View File

@@ -0,0 +1,77 @@
const { Pool } = require('pg');
const fs = require('fs');
const path = require('path');
// Production DATABASE_URL from deployed function
const DATABASE_URL = 'postgresql://postgres.gzoclmbqmgmpuhufbnhy:postgres@aws-0-us-east-1.pooler.supabase.com:6543/postgres';
const pool = new Pool({
connectionString: DATABASE_URL,
max: 1,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 10000,
});
async function runMigrations() {
console.log('Starting production database migrations...');
console.log('Using DATABASE_URL:', DATABASE_URL.replace(/:[^:@]*@/, ':****@')); // Hide password
try {
// Test connection first
const client = await pool.connect();
console.log('✅ Database connection successful');
// Create migrations table if it doesn't exist
await client.query(`
CREATE TABLE IF NOT EXISTS migrations (
id VARCHAR(255) PRIMARY KEY,
name VARCHAR(255) NOT NULL,
executed_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
`);
console.log('✅ Migrations table created or already exists');
// Get migration files
const migrationsDir = path.join(__dirname, '../src/models/migrations');
const files = fs.readdirSync(migrationsDir)
.filter(file => file.endsWith('.sql'))
.sort();
console.log(`Found ${files.length} migration files`);
for (const file of files) {
const migrationId = file.replace('.sql', '');
// Check if migration already executed
const { rows } = await client.query('SELECT id FROM migrations WHERE id = $1', [migrationId]);
if (rows.length > 0) {
console.log(`⏭️ Migration ${migrationId} already executed, skipping`);
continue;
}
// Load and execute migration
const filePath = path.join(migrationsDir, file);
const sql = fs.readFileSync(filePath, 'utf-8');
console.log(`🔄 Executing migration: ${migrationId}`);
await client.query(sql);
// Mark as executed
await client.query('INSERT INTO migrations (id, name) VALUES ($1, $2)', [migrationId, file]);
console.log(`✅ Migration ${migrationId} completed`);
}
client.release();
await pool.end();
console.log('🎉 All production migrations completed successfully!');
} catch (error) {
console.error('❌ Migration failed:', error.message);
console.error('Error details:', error);
process.exit(1);
}
}
runMigrations();

View File

@@ -0,0 +1,88 @@
const { createClient } = require('@supabase/supabase-js');
// Supabase configuration from environment
const SUPABASE_URL = 'https://gzoclmbqmgmpuhufbnhy.supabase.co';
const SUPABASE_SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Imd6b2NsbWJxbWdtcHVodWZibmh5Iiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzgxNjY3OCwiZXhwIjoyMDY5MzkyNjc4fQ.f9PUzL1F8JqIkqD_DwrGBIyHPcehMo-97jXD8hee5ss';
const serviceClient = createClient(SUPABASE_URL, SUPABASE_SERVICE_KEY);
async function testDatabaseWorking() {
console.log('🔍 Testing essential database functionality...\n');
try {
// Test 1: Users table
console.log('1⃣ Testing users table...');
const { data: usersData, error: usersError } = await serviceClient
.from('users')
.select('*')
.limit(1);
if (usersError) {
console.log(`❌ Users table error: ${usersError.message}`);
} else {
console.log(`✅ Users table working! Found ${usersData?.length || 0} users`);
}
// Test 2: Documents table
console.log('\n2⃣ Testing documents table...');
const { data: docsData, error: docsError } = await serviceClient
.from('documents')
.select('*')
.limit(1);
if (docsError) {
console.log(`❌ Documents table error: ${docsError.message}`);
} else {
console.log(`✅ Documents table working! Found ${docsData?.length || 0} documents`);
}
// Test 3: Document versions table
console.log('\n3⃣ Testing document_versions table...');
const { data: versionsData, error: versionsError } = await serviceClient
.from('document_versions')
.select('*')
.limit(1);
if (versionsError) {
console.log(`❌ Document versions table error: ${versionsError.message}`);
} else {
console.log(`✅ Document versions table working! Found ${versionsData?.length || 0} versions`);
}
// Test 4: Document feedback table
console.log('\n4⃣ Testing document_feedback table...');
const { data: feedbackData, error: feedbackError } = await serviceClient
.from('document_feedback')
.select('*')
.limit(1);
if (feedbackError) {
console.log(`❌ Document feedback table error: ${feedbackError.message}`);
} else {
console.log(`✅ Document feedback table working! Found ${feedbackData?.length || 0} feedback entries`);
}
// Test 5: Processing jobs table
console.log('\n5⃣ Testing processing_jobs table...');
const { data: jobsData, error: jobsError } = await serviceClient
.from('processing_jobs')
.select('*')
.limit(1);
if (jobsError) {
console.log(`❌ Processing jobs table error: ${jobsError.message}`);
} else {
console.log(`✅ Processing jobs table working! Found ${jobsData?.length || 0} jobs`);
}
console.log('\n🎉 Database functionality test completed!');
console.log('📋 All essential tables are working correctly.');
console.log('🚀 The application should now function without 500 errors.');
} catch (error) {
console.error('❌ Database test failed:', error.message);
console.error('Error details:', error);
}
}
testDatabaseWorking();

View File

@@ -0,0 +1,77 @@
const { Pool } = require('pg');
// Try different possible DATABASE_URL formats for Supabase
const possibleUrls = [
'postgresql://postgres.gzoclmbqmgmpuhufbnhy:postgres@aws-0-us-east-1.pooler.supabase.com:6543/postgres',
'postgresql://postgres.gzoclmbqmgmpuhufbnhy:postgres@db.gzoclmbqmgmpuhufbnhy.supabase.co:5432/postgres',
'postgresql://postgres:postgres@db.gzoclmbqmgmpuhufbnhy.supabase.co:5432/postgres'
];
async function testConnection(url, index) {
console.log(`\n🔍 Testing connection ${index + 1}: ${url.replace(/:[^:@]*@/, ':****@')}`);
const pool = new Pool({
connectionString: url,
max: 1,
idleTimeoutMillis: 10000,
connectionTimeoutMillis: 10000,
});
try {
const client = await pool.connect();
console.log(`✅ Connection ${index + 1} successful!`);
// Test basic query
const result = await client.query('SELECT NOW() as current_time');
console.log(`✅ Query successful: ${result.rows[0].current_time}`);
// Check if tables exist
const tablesResult = await client.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
ORDER BY table_name
`);
console.log(`📋 Tables found: ${tablesResult.rows.length}`);
if (tablesResult.rows.length > 0) {
console.log('Tables:', tablesResult.rows.map(row => row.table_name).join(', '));
}
client.release();
await pool.end();
return { success: true, url, tables: tablesResult.rows };
} catch (error) {
console.log(`❌ Connection ${index + 1} failed: ${error.message}`);
await pool.end();
return { success: false, url, error: error.message };
}
}
async function testAllConnections() {
console.log('Testing production database connections...\n');
const results = [];
for (let i = 0; i < possibleUrls.length; i++) {
const result = await testConnection(possibleUrls[i], i);
results.push(result);
if (result.success) {
console.log(`\n🎉 Found working connection!`);
console.log(`URL: ${result.url.replace(/:[^:@]*@/, ':****@')}`);
return result;
}
}
console.log('\n❌ All connection attempts failed');
results.forEach((result, index) => {
console.log(`Connection ${index + 1}: ${result.error}`);
});
return null;
}
testAllConnections();

View File

@@ -0,0 +1,89 @@
const { createClient } = require('@supabase/supabase-js');
// Supabase configuration from environment
const SUPABASE_URL = 'https://gzoclmbqmgmpuhufbnhy.supabase.co';
const SUPABASE_ANON_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Imd6b2NsbWJxbWdtcHVodWZibmh5Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTM4MTY2NzgsImV4cCI6MjA2OTM5MjY3OH0.Jg8cAKbujDv7YgeLCeHsOkgkP-LwM-7fAXVIHno0pLI';
const SUPABASE_SERVICE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Imd6b2NsbWJxbWdtcHVodWZibmh5Iiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImlhdCI6MTc1MzgxNjY3OCwiZXhwIjoyMDY5MzkyNjc4fQ.f9PUzL1F8JqIkqD_DwrGBIyHPcehMo-97jXD8hee5ss';
async function testSupabaseClient() {
console.log('Testing Supabase client connection...');
try {
// Test with anon key
console.log('\n🔍 Testing with anon key...');
const anonClient = createClient(SUPABASE_URL, SUPABASE_ANON_KEY);
// Test a simple query
const { data: anonData, error: anonError } = await anonClient
.from('users')
.select('*')
.limit(1);
if (anonError) {
console.log(`❌ Anon client error: ${anonError.message}`);
} else {
console.log(`✅ Anon client working! Found ${anonData?.length || 0} users`);
}
// Test with service key
console.log('\n🔍 Testing with service key...');
const serviceClient = createClient(SUPABASE_URL, SUPABASE_SERVICE_KEY);
// Test a simple query
const { data: serviceData, error: serviceError } = await serviceClient
.from('users')
.select('*')
.limit(1);
if (serviceError) {
console.log(`❌ Service client error: ${serviceError.message}`);
} else {
console.log(`✅ Service client working! Found ${serviceData?.length || 0} users`);
}
// Test if documents table exists
console.log('\n🔍 Testing documents table...');
const { data: docsData, error: docsError } = await serviceClient
.from('documents')
.select('*')
.limit(1);
if (docsError) {
console.log(`❌ Documents table error: ${docsError.message}`);
if (docsError.message.includes('relation "documents" does not exist')) {
console.log('📋 Documents table does not exist - this is the issue!');
}
} else {
console.log(`✅ Documents table exists! Found ${docsData?.length || 0} documents`);
}
// List all tables
console.log('\n🔍 Listing all tables...');
const { data: tablesData, error: tablesError } = await serviceClient
.rpc('get_tables');
if (tablesError) {
console.log(`❌ Could not list tables: ${tablesError.message}`);
// Try a different approach to list tables
const { data: schemaData, error: schemaError } = await serviceClient
.from('information_schema.tables')
.select('table_name')
.eq('table_schema', 'public');
if (schemaError) {
console.log(`❌ Could not query schema: ${schemaError.message}`);
} else {
console.log(`✅ Found tables: ${schemaData?.map(t => t.table_name).join(', ') || 'none'}`);
}
} else {
console.log(`✅ Tables: ${tablesData?.join(', ') || 'none'}`);
}
} catch (error) {
console.error('❌ Supabase client test failed:', error.message);
console.error('Error details:', error);
}
}
testSupabaseClient();