temp: firebase deployment progress

This commit is contained in:
Jon
2025-07-30 22:02:17 -04:00
parent 67b77b0f15
commit 2d98dfc814
31 changed files with 2311 additions and 1729 deletions

17
.gcloudignore Normal file
View File

@@ -0,0 +1,17 @@
# This file specifies files that are *not* uploaded to Google Cloud
# using gcloud. It follows the same syntax as .gitignore, with the addition of
# "#!include" directives (which insert the entries of the given .gitignore-style
# file at that point).
#
# For more information, run:
# $ gcloud topic gcloudignore
#
.gcloudignore
# If you would like to upload your .git directory, .gitignore file or files
# from your .gitignore file, remove the corresponding line
# below:
.git
.gitignore
node_modules
#!include:.gitignore

5
backend/.firebaserc Normal file
View File

@@ -0,0 +1,5 @@
{
"projects": {
"default": "cim-summarizer"
}
}

76
backend/.gcloudignore Normal file
View File

@@ -0,0 +1,76 @@
# This file specifies files that are intentionally untracked by Git.
# Files matching these patterns will not be uploaded to Cloud Functions
# Dependencies
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Build outputs
dist/
build/
.next/
out/
# Environment variables
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
# Logs
logs/
*.log
firebase-debug.log
firebase-debug.*.log
# Test files
coverage/
.nyc_output
*.lcov
# Upload files and temporary data
uploads/
temp/
tmp/
# Documentation and markdown files
*.md
AGENTIC_RAG_DATABASE_INTEGRATION.md
DATABASE.md
HYBRID_IMPLEMENTATION_SUMMARY.md
RAG_PROCESSING_README.md
go-forward-fixes-summary.md
# Scripts and setup files
*.sh
setup-env.sh
fix-env-config.sh
# Database files
*.sql
supabase_setup.sql
# IDE and editor files
.vscode/
.idea/
*.swp
*.swo
*~
# OS generated files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
# Jest configuration
jest.config.js
# TypeScript config (we only need the transpiled JS)
tsconfig.json

57
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,57 @@
# Dependencies
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Build outputs
dist/
build/
.next/
out/
# Environment variables
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
.env.development
.env.production
# Logs
logs/
*.log
firebase-debug.log
firebase-debug.*.log
# Test files
coverage/
.nyc_output
*.lcov
# Upload files and temporary data
uploads/
temp/
tmp/
# IDE and editor files
.vscode/
.idea/
*.swp
*.swo
*~
# OS generated files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
# Firebase
.firebase/
firebase-debug.log*
firebase-debug.*.log*

15
backend/deploy.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
set -e
echo "Building TypeScript..."
npm run build
echo "Deploying function to Firebase..."
gcloud functions deploy api \
--gen2 \
--runtime nodejs20 \
--region us-central1 \
--source . \
--entry-point api \
--trigger-http \
--allow-unauthenticated

8
backend/firebase.json Normal file
View File

@@ -0,0 +1,8 @@
{
"functions": {
"source": ".",
"runtime": "nodejs20",
"ignore": ["node_modules"],
"predeploy": "npm run build"
}
}

1897
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -17,12 +17,15 @@
},
"dependencies": {
"@anthropic-ai/sdk": "^0.57.0",
"@supabase/supabase-js": "^2.53.0",
"axios": "^1.11.0",
"bcryptjs": "^2.4.3",
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"express-rate-limit": "^7.1.5",
"firebase-admin": "^13.4.0",
"firebase-functions": "^6.4.0",
"helmet": "^7.1.0",
"joi": "^17.11.0",
"jsonwebtoken": "^9.0.2",

View File

@@ -9,23 +9,28 @@ const envSchema = Joi.object({
NODE_ENV: Joi.string().valid('development', 'production', 'test').default('development'),
PORT: Joi.number().default(5000),
// Database
DATABASE_URL: Joi.string().required(),
// Database - Made optional for Firebase deployment with Supabase
DATABASE_URL: Joi.string().allow('').default(''),
DB_HOST: Joi.string().default('localhost'),
DB_PORT: Joi.number().default(5432),
DB_NAME: Joi.string().required(),
DB_USER: Joi.string().required(),
DB_PASSWORD: Joi.string().required(),
DB_NAME: Joi.string().allow('').default(''),
DB_USER: Joi.string().allow('').default(''),
DB_PASSWORD: Joi.string().allow('').default(''),
// Supabase Configuration
SUPABASE_URL: Joi.string().allow('').optional(),
SUPABASE_ANON_KEY: Joi.string().allow('').optional(),
SUPABASE_SERVICE_KEY: Joi.string().allow('').optional(),
// Redis
REDIS_URL: Joi.string().default('redis://localhost:6379'),
REDIS_HOST: Joi.string().default('localhost'),
REDIS_PORT: Joi.number().default(6379),
// JWT
JWT_SECRET: Joi.string().required(),
// JWT - Optional for Firebase Auth
JWT_SECRET: Joi.string().default('default-jwt-secret-change-in-production'),
JWT_EXPIRES_IN: Joi.string().default('1h'),
JWT_REFRESH_SECRET: Joi.string().required(),
JWT_REFRESH_SECRET: Joi.string().default('default-refresh-secret-change-in-production'),
JWT_REFRESH_EXPIRES_IN: Joi.string().default('7d'),
// File Upload
@@ -137,6 +142,12 @@ export const config = {
password: envVars.DB_PASSWORD,
},
supabase: {
url: envVars.SUPABASE_URL,
anonKey: envVars.SUPABASE_ANON_KEY,
serviceKey: envVars.SUPABASE_SERVICE_KEY,
},
redis: {
url: envVars.REDIS_URL,
host: envVars.REDIS_HOST,
@@ -260,7 +271,7 @@ export const config = {
// Vector Database Configuration
vector: {
provider: envVars['VECTOR_PROVIDER'] || 'pgvector', // 'pinecone' | 'pgvector' | 'chroma'
provider: envVars['VECTOR_PROVIDER'] || 'supabase', // 'pinecone' | 'pgvector' | 'chroma' | 'supabase'
// Pinecone Configuration
pineconeApiKey: envVars['PINECONE_API_KEY'],

View File

@@ -0,0 +1,47 @@
export const errorConfig = {
// Authentication timeouts
auth: {
tokenRefreshInterval: 45 * 60 * 1000, // 45 minutes
sessionTimeout: 60 * 60 * 1000, // 1 hour
maxRetryAttempts: 3,
},
// Upload timeouts
upload: {
maxUploadTime: 300000, // 5 minutes
maxFileSize: 100 * 1024 * 1024, // 100MB
progressCheckInterval: 2000, // 2 seconds
},
// Processing timeouts
processing: {
maxProcessingTime: 1800000, // 30 minutes
progressUpdateInterval: 5000, // 5 seconds
maxRetries: 3,
},
// Network timeouts
network: {
requestTimeout: 30000, // 30 seconds
retryDelay: 1000, // 1 second
maxRetries: 3,
},
// Error messages
messages: {
tokenExpired: 'Your session has expired. Please log in again.',
uploadFailed: 'File upload failed. Please try again.',
processingFailed: 'Document processing failed. Please try again.',
networkError: 'Network error. Please check your connection and try again.',
unauthorized: 'You are not authorized to perform this action.',
serverError: 'Server error. Please try again later.',
},
// Logging levels
logging: {
auth: 'info',
upload: 'info',
processing: 'info',
error: 'error',
},
};

View File

@@ -0,0 +1,49 @@
import admin from 'firebase-admin';
// Initialize Firebase Admin SDK
if (!admin.apps.length) {
try {
// Check if we're running in Firebase Functions environment
const isCloudFunction = process.env['FUNCTION_TARGET'] || process.env['FUNCTIONS_EMULATOR'];
if (isCloudFunction) {
// In Firebase Functions, use default initialization
admin.initializeApp({
projectId: process.env['GCLOUD_PROJECT'] || 'cim-summarizer',
});
console.log('Firebase Admin SDK initialized for Cloud Functions');
} else {
// For local development, try to use service account key if available
try {
const serviceAccount = require('../../serviceAccountKey.json');
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
projectId: 'cim-summarizer',
});
console.log('Firebase Admin SDK initialized with service account');
} catch (serviceAccountError) {
// Fallback to default initialization
admin.initializeApp({
projectId: 'cim-summarizer',
});
console.log('Firebase Admin SDK initialized with default credentials');
}
}
console.log('Firebase apps count:', admin.apps.length);
console.log('Project ID:', admin.app().options.projectId);
} catch (error) {
console.error('Failed to initialize Firebase Admin SDK:', error);
// Final fallback: try with minimal config
try {
admin.initializeApp();
console.log('Firebase Admin SDK initialized with minimal fallback');
} catch (fallbackError) {
console.error('All Firebase initialization attempts failed:', fallbackError);
// Don't throw here to prevent the entire app from crashing
}
}
}
export default admin;

View File

@@ -0,0 +1,56 @@
import { createClient, SupabaseClient } from '@supabase/supabase-js';
import { config } from './env';
import { logger } from '../utils/logger';
let supabase: SupabaseClient | null = null;
export const getSupabaseClient = (): SupabaseClient => {
if (!supabase) {
const supabaseUrl = config.supabase?.url;
const supabaseKey = config.supabase?.anonKey;
if (!supabaseUrl || !supabaseKey) {
logger.warn('Supabase credentials not configured, some features may not work');
throw new Error('Supabase configuration missing');
}
supabase = createClient(supabaseUrl, supabaseKey);
logger.info('Supabase client initialized');
}
return supabase;
};
export const getSupabaseServiceClient = (): SupabaseClient => {
const supabaseUrl = config.supabase?.url;
const supabaseServiceKey = config.supabase?.serviceKey;
if (!supabaseUrl || !supabaseServiceKey) {
logger.warn('Supabase service credentials not configured');
throw new Error('Supabase service configuration missing');
}
return createClient(supabaseUrl, supabaseServiceKey);
};
// Test connection function
export const testSupabaseConnection = async (): Promise<boolean> => {
try {
const client = getSupabaseClient();
const { error } = await client.from('_health_check').select('*').limit(1);
// If the table doesn't exist, that's fine - we just tested the connection
if (error && !error.message.includes('relation "_health_check" does not exist')) {
logger.error('Supabase connection test failed:', error);
return false;
}
logger.info('Supabase connection test successful');
return true;
} catch (error) {
logger.error('Supabase connection test failed:', error);
return false;
}
};
export default getSupabaseClient;

View File

@@ -1,14 +1,5 @@
import { Request, Response } from 'express';
import { AuthenticatedRequest } from '../middleware/auth';
import { UserModel } from '../models/UserModel';
import {
generateAuthTokens,
verifyRefreshToken,
hashPassword,
comparePassword,
validatePassword
} from '../utils/auth';
import { sessionService } from '../services/sessionService';
import logger from '../utils/logger';
export interface RegisterRequest extends Request {
@@ -33,432 +24,106 @@ export interface RefreshTokenRequest extends Request {
}
/**
* Register a new user
* DEPRECATED: Legacy auth controller
* All auth functions are now handled by Firebase Auth
*/
export async function register(req: RegisterRequest, res: Response): Promise<void> {
try {
const { email, name, password } = req.body;
// Validate input
if (!email || !name || !password) {
res.status(400).json({
success: false,
message: 'Email, name, and password are required'
});
return;
}
// Validate email format
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
if (!emailRegex.test(email)) {
res.status(400).json({
success: false,
message: 'Invalid email format'
});
return;
}
// Validate password strength
const passwordValidation = validatePassword(password);
if (!passwordValidation.isValid) {
res.status(400).json({
success: false,
message: 'Password does not meet requirements',
errors: passwordValidation.errors
});
return;
}
// Check if user already exists
const existingUser = await UserModel.findByEmail(email);
if (existingUser) {
res.status(409).json({
success: false,
message: 'User with this email already exists'
});
return;
}
// Hash password
const hashedPassword = await hashPassword(password);
// Create user
const user = await UserModel.create({
email,
name,
password: hashedPassword,
role: 'user'
});
// Generate tokens
const tokens = generateAuthTokens({
userId: user.id,
email: user.email,
role: user.role
});
// Store session
await sessionService.storeSession(user.id, {
userId: user.id,
email: user.email,
role: user.role,
refreshToken: tokens.refreshToken
});
logger.info(`New user registered: ${email}`);
res.status(201).json({
success: true,
message: 'User registered successfully',
data: {
user: {
id: user.id,
email: user.email,
name: user.name,
role: user.role
},
tokens: {
accessToken: tokens.accessToken,
refreshToken: tokens.refreshToken,
expiresIn: tokens.expiresIn
}
}
});
} catch (error) {
logger.error('Registration error:', error);
res.status(500).json({
export const authController = {
async register(_req: RegisterRequest, res: Response): Promise<void> {
logger.warn('Legacy register endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Internal server error during registration'
message: 'Legacy registration is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async login(_req: LoginRequest, res: Response): Promise<void> {
logger.warn('Legacy login endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy login is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async refreshToken(_req: RefreshTokenRequest, res: Response): Promise<void> {
logger.warn('Legacy refresh token endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy token refresh is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async logout(_req: AuthenticatedRequest, res: Response): Promise<void> {
logger.warn('Legacy logout endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy logout is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async getProfile(_req: AuthenticatedRequest, res: Response): Promise<void> {
logger.warn('Legacy profile endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy profile access is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async updateProfile(_req: AuthenticatedRequest, res: Response): Promise<void> {
logger.warn('Legacy profile update endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy profile updates are disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async changePassword(_req: AuthenticatedRequest, res: Response): Promise<void> {
logger.warn('Legacy password change endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy password changes are disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async deleteAccount(_req: AuthenticatedRequest, res: Response): Promise<void> {
logger.warn('Legacy account deletion endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy account deletion is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async verifyEmail(_req: Request, res: Response): Promise<void> {
logger.warn('Legacy email verification endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy email verification is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async requestPasswordReset(_req: Request, res: Response): Promise<void> {
logger.warn('Legacy password reset endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy password reset is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
},
async resetPassword(_req: Request, res: Response): Promise<void> {
logger.warn('Legacy password reset endpoint is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy password reset is disabled. Use Firebase Auth instead.',
error: 'DEPRECATED_ENDPOINT'
});
}
}
/**
* Login user
*/
export async function login(req: LoginRequest, res: Response): Promise<void> {
try {
const { email, password } = req.body;
// Validate input
if (!email || !password) {
res.status(400).json({
success: false,
message: 'Email and password are required'
});
return;
}
// Find user by email
const user = await UserModel.findByEmail(email);
if (!user) {
res.status(401).json({
success: false,
message: 'Invalid email or password'
});
return;
}
// Check if user is active
if (!user.is_active) {
res.status(401).json({
success: false,
message: 'Account is deactivated'
});
return;
}
// Verify password
const isPasswordValid = await comparePassword(password, user.password_hash);
if (!isPasswordValid) {
res.status(401).json({
success: false,
message: 'Invalid email or password'
});
return;
}
// Generate tokens
const tokens = generateAuthTokens({
userId: user.id,
email: user.email,
role: user.role
});
// Store session
await sessionService.storeSession(user.id, {
userId: user.id,
email: user.email,
role: user.role,
refreshToken: tokens.refreshToken
});
// Update last login
await UserModel.updateLastLogin(user.id);
logger.info(`User logged in: ${email}`);
res.status(200).json({
success: true,
message: 'Login successful',
data: {
user: {
id: user.id,
email: user.email,
name: user.name,
role: user.role
},
tokens: {
accessToken: tokens.accessToken,
refreshToken: tokens.refreshToken,
expiresIn: tokens.expiresIn
}
}
});
} catch (error) {
logger.error('Login error:', error);
res.status(500).json({
success: false,
message: 'Internal server error during login'
});
}
}
/**
* Logout user
*/
export async function logout(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
if (!req.user) {
res.status(401).json({
success: false,
message: 'Authentication required'
});
return;
}
// Get the token from header for blacklisting
const authHeader = req.headers.authorization;
if (authHeader) {
const token = authHeader.split(' ')[1];
if (token) {
// Blacklist the access token
await sessionService.blacklistToken(token, 3600); // 1 hour
}
}
// Remove session
await sessionService.removeSession(req.user.id);
logger.info(`User logged out: ${req.user.email}`);
res.status(200).json({
success: true,
message: 'Logout successful'
});
} catch (error) {
logger.error('Logout error:', error);
res.status(500).json({
success: false,
message: 'Internal server error during logout'
});
}
}
/**
* Refresh access token
*/
export async function refreshToken(req: RefreshTokenRequest, res: Response): Promise<void> {
try {
const { refreshToken } = req.body;
if (!refreshToken) {
res.status(400).json({
success: false,
message: 'Refresh token is required'
});
return;
}
// Verify refresh token
const decoded = verifyRefreshToken(refreshToken);
// Check if user exists and is active
const user = await UserModel.findById(decoded.userId);
if (!user || !user.is_active) {
res.status(401).json({
success: false,
message: 'Invalid refresh token'
});
return;
}
// Check if session exists and matches
const session = await sessionService.getSession(decoded.userId);
if (!session || session.refreshToken !== refreshToken) {
res.status(401).json({
success: false,
message: 'Invalid refresh token'
});
return;
}
// Generate new tokens
const tokens = generateAuthTokens({
userId: user.id,
email: user.email,
role: user.role
});
// Update session with new refresh token
await sessionService.storeSession(user.id, {
userId: user.id,
email: user.email,
role: user.role,
refreshToken: tokens.refreshToken
});
// Blacklist old refresh token
await sessionService.blacklistToken(refreshToken, 86400); // 24 hours
logger.info(`Token refreshed for user: ${user.email}`);
res.status(200).json({
success: true,
message: 'Token refreshed successfully',
data: {
tokens: {
accessToken: tokens.accessToken,
refreshToken: tokens.refreshToken,
expiresIn: tokens.expiresIn
}
}
});
} catch (error) {
logger.error('Token refresh error:', error);
res.status(401).json({
success: false,
message: 'Invalid refresh token'
});
}
}
/**
* Get current user profile
*/
export async function getProfile(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
if (!req.user) {
res.status(401).json({
success: false,
message: 'Authentication required'
});
return;
}
const user = await UserModel.findById(req.user.id);
if (!user) {
res.status(404).json({
success: false,
message: 'User not found'
});
return;
}
res.status(200).json({
success: true,
data: {
user: {
id: user.id,
email: user.email,
name: user.name,
role: user.role,
created_at: user.created_at,
last_login: user.last_login
}
}
});
} catch (error) {
logger.error('Get profile error:', error);
res.status(500).json({
success: false,
message: 'Internal server error'
});
}
}
/**
* Update user profile
*/
export async function updateProfile(req: AuthenticatedRequest, res: Response): Promise<void> {
try {
if (!req.user) {
res.status(401).json({
success: false,
message: 'Authentication required'
});
return;
}
const { name, email } = req.body;
// Validate input
if (email) {
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
if (!emailRegex.test(email)) {
res.status(400).json({
success: false,
message: 'Invalid email format'
});
return;
}
// Check if email is already taken by another user
const existingUser = await UserModel.findByEmail(email);
if (existingUser && existingUser.id !== req.user.id) {
res.status(409).json({
success: false,
message: 'Email is already taken'
});
return;
}
}
// Update user
const updatedUser = await UserModel.update(req.user.id, {
name: name || undefined,
email: email || undefined
});
if (!updatedUser) {
res.status(404).json({
success: false,
message: 'User not found'
});
return;
}
logger.info(`Profile updated for user: ${req.user.email}`);
res.status(200).json({
success: true,
message: 'Profile updated successfully',
data: {
user: {
id: updatedUser.id,
email: updatedUser.email,
name: updatedUser.name,
role: updatedUser.role,
created_at: updatedUser.created_at,
last_login: updatedUser.last_login
}
}
});
} catch (error) {
logger.error('Update profile error:', error);
res.status(500).json({
success: false,
message: 'Internal server error'
});
}
}
};

View File

@@ -8,7 +8,7 @@ import { uploadProgressService } from '../services/uploadProgressService';
export const documentController = {
async uploadDocument(req: Request, res: Response): Promise<void> {
try {
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
res.status(401).json({ error: 'User not authenticated' });
return;
@@ -85,7 +85,7 @@ export const documentController = {
async getDocuments(req: Request, res: Response): Promise<void> {
try {
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
res.status(401).json({ error: 'User not authenticated' });
return;
@@ -116,7 +116,7 @@ export const documentController = {
async getDocument(req: Request, res: Response): Promise<void> {
try {
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
res.status(401).json({ error: 'User not authenticated' });
return;
@@ -164,7 +164,7 @@ export const documentController = {
async getDocumentProgress(req: Request, res: Response): Promise<void> {
try {
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
res.status(401).json({ error: 'User not authenticated' });
return;
@@ -219,7 +219,7 @@ export const documentController = {
async deleteDocument(req: Request, res: Response): Promise<void> {
try {
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
res.status(401).json({ error: 'User not authenticated' });
return;

View File

@@ -1,3 +1,6 @@
// Initialize Firebase Admin SDK first
import './config/firebase';
import express from 'express';
import cors from 'cors';
import helmet from 'helmet';
@@ -5,15 +8,17 @@ import morgan from 'morgan';
import rateLimit from 'express-rate-limit';
import { config } from './config/env';
import { logger } from './utils/logger';
import authRoutes from './routes/auth';
import documentRoutes from './routes/documents';
import vectorRoutes from './routes/vector';
import { errorHandler } from './middleware/errorHandler';
import { notFoundHandler } from './middleware/notFoundHandler';
import { jobQueueService } from './services/jobQueueService';
const app = express();
const PORT = config.port || 5000;
// Enable trust proxy to ensure Express works correctly behind the proxy
app.set('trust proxy', 1);
// Security middleware
app.use(helmet({
@@ -28,11 +33,27 @@ app.use(helmet({
}));
// CORS configuration
const allowedOrigins = [
'https://cim-summarizer.web.app',
'https://cim-summarizer.firebaseapp.com',
'http://localhost:3000',
'http://localhost:5173'
];
app.use(cors({
origin: config.frontendUrl || 'http://localhost:3000',
origin: function (origin, callback) {
console.log('CORS request from origin:', origin);
if (!origin || allowedOrigins.indexOf(origin) !== -1) {
callback(null, true);
} else {
console.log('CORS blocked origin:', origin);
callback(new Error('Not allowed by CORS'));
}
},
credentials: true,
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization'],
allowedHeaders: ['Content-Type', 'Authorization', 'X-Requested-With'],
optionsSuccessStatus: 200
}));
// Rate limiting
@@ -97,19 +118,21 @@ app.get('/health/agentic-rag/metrics', async (_req, res) => {
}
});
// API routes
app.use('/api/auth', authRoutes);
app.use('/api/documents', documentRoutes);
app.use('/api/vector', vectorRoutes);
// API routes - remove the /api prefix as it's handled by Firebase
app.use('/documents', documentRoutes);
app.use('/vector', vectorRoutes);
import * as functions from 'firebase-functions';
// API root endpoint
app.get('/api', (_req, res) => { // _req to fix TS6133
app.get('/', (_req, res) => { // _req to fix TS6133
res.json({
message: 'CIM Document Processor API',
version: '1.0.0',
endpoints: {
auth: '/api/auth',
documents: '/api/documents',
auth: '/auth',
documents: '/documents',
health: '/health',
agenticRagHealth: '/health/agentic-rag',
agenticRagMetrics: '/health/agentic-rag/metrics',
@@ -123,51 +146,18 @@ app.use(notFoundHandler);
// Global error handler (must be last)
app.use(errorHandler);
// Start server
const server = app.listen(PORT, () => {
logger.info(`🚀 Server running on port ${PORT}`);
logger.info(`📊 Environment: ${config.nodeEnv}`);
logger.info(`🔗 API URL: http://localhost:${PORT}/api`);
logger.info(`🏥 Health check: http://localhost:${PORT}/health`);
});
// Initialize job queue service for document processing
import { jobQueueService } from './services/jobQueueService';
// Start job queue service
jobQueueService.start();
logger.info('📋 Job queue service started');
// Start the job queue service asynchronously to avoid blocking function startup
// Use a longer delay to ensure the function is fully initialized
setTimeout(() => {
try {
jobQueueService.start();
logger.info('Job queue service started successfully');
} catch (error) {
logger.error('Failed to start job queue service', { error });
}
}, 5000);
// Graceful shutdown
const gracefulShutdown = (signal: string) => {
logger.info(`${signal} received, shutting down gracefully`);
// Stop accepting new connections
server.close(async () => {
logger.info('HTTP server closed');
// Stop job queue service
jobQueueService.stop();
logger.info('Job queue service stopped');
// Stop upload progress service
try {
const { uploadProgressService } = await import('./services/uploadProgressService');
uploadProgressService.stop();
logger.info('Upload progress service stopped');
} catch (error) {
logger.warn('Could not stop upload progress service', { error });
}
logger.info('Process terminated');
process.exit(0);
});
// Force close after 30 seconds
setTimeout(() => {
logger.error('Could not close connections in time, forcefully shutting down');
process.exit(1);
}, 30000);
};
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
export default app;
export const api = functions.https.onRequest(app);

View File

@@ -1,244 +1,107 @@
import { Request, Response, NextFunction } from 'express';
import { verifyAccessToken, extractTokenFromHeader } from '../utils/auth';
import { sessionService } from '../services/sessionService';
import { UserModel } from '../models/UserModel';
import logger from '../utils/logger';
export interface AuthenticatedRequest extends Request {
user?: {
id: string;
email: string;
role: string;
};
user?: import('firebase-admin').auth.DecodedIdToken;
}
/**
* Authentication middleware to verify JWT tokens
* DEPRECATED: Legacy authentication middleware
* Use Firebase Auth instead via ../middleware/firebaseAuth
*/
export async function authenticateToken(
req: AuthenticatedRequest,
_req: AuthenticatedRequest,
res: Response,
next: NextFunction
_next: NextFunction
): Promise<void> {
try {
const authHeader = req.headers.authorization;
const token = extractTokenFromHeader(authHeader);
if (!token) {
res.status(401).json({
success: false,
message: 'Access token is required'
});
return;
}
// Check if token is blacklisted
const isBlacklisted = await sessionService.isTokenBlacklisted(token);
if (isBlacklisted) {
res.status(401).json({
success: false,
message: 'Token has been revoked'
});
return;
}
// Verify the token
const decoded = verifyAccessToken(token);
// Check if user still exists and is active
const user = await UserModel.findById(decoded.userId);
if (!user || !user.is_active) {
res.status(401).json({
success: false,
message: 'User account is inactive or does not exist'
});
return;
}
// Check if session exists
const session = await sessionService.getSession(decoded.userId);
if (!session) {
res.status(401).json({
success: false,
message: 'Session expired, please login again'
});
return;
}
// Attach user info to request
req.user = {
id: decoded.userId,
email: decoded.email,
role: decoded.role
};
logger.info(`Authenticated request for user: ${decoded.email}`);
next();
} catch (error) {
logger.error('Authentication error:', error);
res.status(401).json({
success: false,
message: 'Invalid or expired token'
});
}
logger.warn('Legacy auth middleware is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy authentication is disabled. Use Firebase Auth instead.'
});
}
// Alias for backward compatibility
export const auth = authenticateToken;
/**
* Role-based authorization middleware
* DEPRECATED: Role-based authorization middleware
*/
export function requireRole(allowedRoles: string[]) {
return (req: AuthenticatedRequest, res: Response, next: NextFunction): void => {
if (!req.user) {
res.status(401).json({
success: false,
message: 'Authentication required'
});
return;
}
if (!allowedRoles.includes(req.user.role)) {
res.status(403).json({
success: false,
message: 'Insufficient permissions'
});
return;
}
logger.info(`Authorized request for user: ${req.user.email} with role: ${req.user.role}`);
next();
export function requireRole(_allowedRoles: string[]) {
return (_req: AuthenticatedRequest, res: Response, _next: NextFunction): void => {
logger.warn('Legacy role-based auth is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy role-based authentication is disabled. Use Firebase Auth instead.'
});
};
}
/**
* Admin-only middleware
* DEPRECATED: Admin-only middleware
*/
export function requireAdmin(
req: AuthenticatedRequest,
_req: AuthenticatedRequest,
res: Response,
next: NextFunction
_next: NextFunction
): void {
requireRole(['admin'])(req, res, next);
logger.warn('Legacy admin auth is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy admin authentication is disabled. Use Firebase Auth instead.'
});
}
/**
* User or admin middleware
* DEPRECATED: User or admin middleware
*/
export function requireUserOrAdmin(
req: AuthenticatedRequest,
_req: AuthenticatedRequest,
res: Response,
next: NextFunction
_next: NextFunction
): void {
requireRole(['user', 'admin'])(req, res, next);
logger.warn('Legacy user/admin auth is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy user/admin authentication is disabled. Use Firebase Auth instead.'
});
}
/**
* Optional authentication middleware (doesn't fail if no token)
* DEPRECATED: Optional authentication middleware
*/
export async function optionalAuth(
req: AuthenticatedRequest,
_req: AuthenticatedRequest,
_res: Response,
next: NextFunction
): Promise<void> {
try {
const authHeader = req.headers.authorization;
const token = extractTokenFromHeader(authHeader);
if (!token) {
// No token provided, continue without authentication
next();
return;
}
// Check if token is blacklisted
const isBlacklisted = await sessionService.isTokenBlacklisted(token);
if (isBlacklisted) {
// Token is blacklisted, continue without authentication
next();
return;
}
// Verify the token
const decoded = verifyAccessToken(token);
// Check if user still exists and is active
const user = await UserModel.findById(decoded.userId);
if (!user || !user.is_active) {
// User doesn't exist or is inactive, continue without authentication
next();
return;
}
// Check if session exists
const session = await sessionService.getSession(decoded.userId);
if (!session) {
// Session doesn't exist, continue without authentication
next();
return;
}
// Attach user info to request
req.user = {
id: decoded.userId,
email: decoded.email,
role: decoded.role
};
logger.info(`Optional authentication successful for user: ${decoded.email}`);
next();
} catch (error) {
// Token verification failed, continue without authentication
logger.debug('Optional authentication failed, continuing without user context');
next();
}
logger.debug('Legacy optional auth is deprecated. Use Firebase Auth instead.');
// For optional auth, we just continue without authentication
next();
}
/**
* Rate limiting middleware for authentication endpoints
* DEPRECATED: Rate limiting middleware
*/
export function authRateLimit(
_req: Request,
_res: Response,
next: NextFunction
): void {
// This would typically integrate with a rate limiting library
// For now, we'll just pass through
// TODO: Implement proper rate limiting
next();
}
/**
* Logout middleware to invalidate session
* DEPRECATED: Logout middleware
*/
export async function logout(
req: AuthenticatedRequest,
_req: AuthenticatedRequest,
res: Response,
next: NextFunction
_next: NextFunction
): Promise<void> {
try {
if (!req.user) {
res.status(401).json({
success: false,
message: 'Authentication required'
});
return;
}
// Remove session
await sessionService.removeSession(req.user.id);
// Update last login in database
await UserModel.updateLastLogin(req.user.id);
logger.info(`User logged out: ${req.user.email}`);
next();
} catch (error) {
logger.error('Logout error:', error);
res.status(500).json({
success: false,
message: 'Error during logout'
});
}
}
logger.warn('Legacy logout is deprecated. Use Firebase Auth instead.');
res.status(501).json({
success: false,
message: 'Legacy logout is disabled. Use Firebase Auth instead.'
});
}

View File

@@ -0,0 +1,116 @@
import { Request, Response, NextFunction } from 'express';
import admin from 'firebase-admin';
import { logger } from '../utils/logger';
// Initialize Firebase Admin if not already initialized
if (!admin.apps.length) {
admin.initializeApp();
}
export interface FirebaseAuthenticatedRequest extends Request {
user?: admin.auth.DecodedIdToken;
}
export const verifyFirebaseToken = async (
req: FirebaseAuthenticatedRequest,
res: Response,
next: NextFunction
): Promise<void> => {
try {
// Debug Firebase Admin initialization
console.log('Firebase apps available:', admin.apps.length);
console.log('Firebase app names:', admin.apps.filter(app => app !== null).map(app => app!.name));
const authHeader = req.headers.authorization;
if (!authHeader || !authHeader.startsWith('Bearer ')) {
res.status(401).json({ error: 'No valid authorization header' });
return;
}
const idToken = authHeader.split('Bearer ')[1];
if (!idToken) {
res.status(401).json({ error: 'No token provided' });
return;
}
// Verify the Firebase ID token
const decodedToken = await admin.auth().verifyIdToken(idToken, true);
// Check if token is expired
const now = Math.floor(Date.now() / 1000);
if (decodedToken.exp && decodedToken.exp < now) {
logger.warn('Token expired for user:', decodedToken.uid);
res.status(401).json({ error: 'Token expired' });
return;
}
req.user = decodedToken;
// Log successful authentication
logger.info('Authenticated request for user:', decodedToken.email);
next();
} catch (error: any) {
logger.error('Firebase token verification failed:', {
error: error.message,
code: error.code,
ip: req.ip,
userAgent: req.get('User-Agent')
});
// Try to recover from session if Firebase auth fails
try {
const authHeader = req.headers.authorization;
if (authHeader && authHeader.startsWith('Bearer ')) {
const idToken = authHeader.split('Bearer ')[1];
if (idToken) {
// Try to verify without force refresh
const decodedToken = await admin.auth().verifyIdToken(idToken, false);
req.user = decodedToken;
logger.info('Recovered authentication from session for user:', decodedToken.email);
next();
return;
}
}
} catch (recoveryError) {
logger.debug('Session recovery failed:', recoveryError);
}
// Provide more specific error messages
if (error.code === 'auth/id-token-expired') {
res.status(401).json({ error: 'Token expired', code: 'TOKEN_EXPIRED' });
} else if (error.code === 'auth/id-token-revoked') {
res.status(401).json({ error: 'Token revoked', code: 'TOKEN_REVOKED' });
} else if (error.code === 'auth/invalid-id-token') {
res.status(401).json({ error: 'Invalid token', code: 'INVALID_TOKEN' });
} else {
res.status(401).json({ error: 'Invalid token' });
}
}
};
export const optionalFirebaseAuth = async (
req: FirebaseAuthenticatedRequest,
_res: Response,
next: NextFunction
): Promise<void> => {
try {
const authHeader = req.headers.authorization;
if (authHeader && authHeader.startsWith('Bearer ')) {
const idToken = authHeader.split('Bearer ')[1];
if (idToken) {
const decodedToken = await admin.auth().verifyIdToken(idToken, true);
req.user = decodedToken;
}
}
} catch (error) {
// Silently ignore auth errors for optional auth
logger.debug('Optional auth failed:', error);
}
next();
};

View File

@@ -141,8 +141,30 @@ export const handleUploadError = (error: any, req: Request, res: Response, next:
next();
};
// Main upload middleware
export const uploadMiddleware = upload.single('document');
// Main upload middleware with timeout handling
export const uploadMiddleware = (req: Request, res: Response, next: NextFunction) => {
// Set a timeout for the upload
const uploadTimeout = setTimeout(() => {
logger.error('Upload timeout for request:', {
ip: req.ip,
userAgent: req.get('User-Agent'),
});
res.status(408).json({
success: false,
error: 'Upload timeout',
message: 'Upload took too long to complete',
});
}, 300000); // 5 minutes timeout
// Clear timeout on successful upload
const originalNext = next;
next = (err?: any) => {
clearTimeout(uploadTimeout);
originalNext(err);
};
upload.single('document')(req, res, next);
};
// Combined middleware for file uploads
export const handleFileUpload = [

View File

@@ -1,59 +0,0 @@
import { Router } from 'express';
import {
register,
login,
logout,
refreshToken,
getProfile,
updateProfile
} from '../controllers/authController';
import {
authenticateToken,
authRateLimit
} from '../middleware/auth';
const router = Router();
/**
* @route POST /api/auth/register
* @desc Register a new user
* @access Public
*/
router.post('/register', authRateLimit, register);
/**
* @route POST /api/auth/login
* @desc Login user
* @access Public
*/
router.post('/login', authRateLimit, login);
/**
* @route POST /api/auth/logout
* @desc Logout user
* @access Private
*/
router.post('/logout', authenticateToken, logout);
/**
* @route POST /api/auth/refresh
* @desc Refresh access token
* @access Public
*/
router.post('/refresh', authRateLimit, refreshToken);
/**
* @route GET /api/auth/profile
* @desc Get current user profile
* @access Private
*/
router.get('/profile', authenticateToken, getProfile);
/**
* @route PUT /api/auth/profile
* @desc Update current user profile
* @access Private
*/
router.put('/profile', authenticateToken, updateProfile);
export default router;

View File

@@ -1,5 +1,5 @@
import express from 'express';
import { authenticateToken } from '../middleware/auth';
import { verifyFirebaseToken } from '../middleware/firebaseAuth';
import { documentController } from '../controllers/documentController';
import { unifiedDocumentProcessor } from '../services/unifiedDocumentProcessor';
import { logger } from '../utils/logger';
@@ -11,11 +11,7 @@ import { DocumentModel } from '../models/DocumentModel';
declare global {
namespace Express {
interface Request {
user?: {
id: string;
email: string;
role: string;
};
user?: import('firebase-admin').auth.DecodedIdToken;
}
}
}
@@ -23,7 +19,7 @@ declare global {
const router = express.Router();
// Apply authentication to all routes
router.use(authenticateToken);
router.use(verifyFirebaseToken);
// Essential document management routes (keeping these)
router.post('/upload', handleFileUpload, documentController.uploadDocument);
@@ -36,7 +32,7 @@ router.delete('/:id', documentController.deleteDocument);
// Analytics endpoints (keeping these for monitoring)
router.get('/analytics', async (req, res) => {
try {
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
return res.status(401).json({ error: 'User not authenticated' });
}
@@ -67,7 +63,7 @@ router.get('/processing-stats', async (_req, res) => {
// Download endpoint (keeping this)
router.get('/:id/download', async (req, res) => {
try {
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
return res.status(401).json({ error: 'User not authenticated' });
}
@@ -106,7 +102,7 @@ router.get('/:id/download', async (req, res) => {
router.post('/:id/process-optimized-agentic-rag', async (req, res) => {
try {
const { id } = req.params;
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
return res.status(401).json({ error: 'User not authenticated' });
@@ -147,7 +143,7 @@ router.post('/:id/process-optimized-agentic-rag', async (req, res) => {
router.get('/:id/agentic-rag-sessions', async (req, res) => {
try {
const { id } = req.params;
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
return res.status(401).json({ error: 'User not authenticated' });
@@ -183,7 +179,7 @@ router.get('/:id/agentic-rag-sessions', async (req, res) => {
router.get('/agentic-rag-sessions/:sessionId', async (req, res) => {
try {
const { sessionId } = req.params;
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
return res.status(401).json({ error: 'User not authenticated' });
@@ -245,7 +241,7 @@ router.get('/agentic-rag-sessions/:sessionId', async (req, res) => {
router.get('/:id/analytics', async (req, res) => {
try {
const { id } = req.params;
const userId = req.user?.id;
const userId = req.user?.uid;
if (!userId) {
return res.status(401).json({ error: 'User not authenticated' });

View File

@@ -90,7 +90,7 @@ router.get('/document-chunks/:documentId', async (req, res) => {
*/
router.get('/analytics', async (req, res) => {
try {
const userId = req.user?.id;
const userId = req.user?.uid;
const { days = 30 } = req.query;
if (!userId) {

View File

@@ -1,23 +1,24 @@
import { config } from '../config/env';
import { logger } from '../utils/logger';
import { VectorDatabaseModel, DocumentChunk, VectorSearchResult } from '../models/VectorDatabaseModel';
import pool from '../config/database';
// Re-export types from the model
export { VectorSearchResult, DocumentChunk } from '../models/VectorDatabaseModel';
class VectorDatabaseService {
private provider: 'pinecone' | 'pgvector' | 'chroma';
private provider: 'pinecone' | 'pgvector' | 'chroma' | 'supabase';
private client: any;
private semanticCache: Map<string, { embedding: number[]; timestamp: number }> = new Map();
private readonly CACHE_TTL = 3600000; // 1 hour cache TTL
constructor() {
this.provider = config.vector.provider;
this.initializeClient();
// Don't initialize client immediately - do it lazily when needed
}
private async initializeClient() {
if (this.client) return; // Already initialized
switch (this.provider) {
case 'pinecone':
await this.initializePinecone();
@@ -28,11 +29,22 @@ class VectorDatabaseService {
case 'chroma':
await this.initializeChroma();
break;
case 'supabase':
await this.initializeSupabase();
break;
default:
throw new Error(`Unsupported vector database provider: ${this.provider}`);
logger.error(`Unsupported vector database provider: ${this.provider}`);
this.client = null;
}
}
private async ensureInitialized() {
if (!this.client) {
await this.initializeClient();
}
return this.client !== null;
}
private async initializePinecone() {
// const { Pinecone } = await import('@pinecone-database/pinecone');
// this.client = new Pinecone({
@@ -42,42 +54,12 @@ class VectorDatabaseService {
}
private async initializePgVector() {
// Use imported database pool
this.client = pool;
// Ensure pgvector extension is enabled
try {
await pool.query('CREATE EXTENSION IF NOT EXISTS vector');
// Create vector tables if they don't exist
await this.createVectorTables();
logger.info('pgvector extension initialized successfully');
} catch (error) {
logger.error('Failed to initialize pgvector', error);
throw new Error('pgvector initialization failed');
}
// Note: pgvector is deprecated in favor of Supabase
// This method is kept for backward compatibility but will not work in Firebase
logger.warn('pgvector provider is deprecated. Use Supabase instead for cloud deployment.');
this.client = null;
}
private async createVectorTables() {
const createTableQuery = `
CREATE TABLE IF NOT EXISTS document_chunks (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id VARCHAR(255) NOT NULL,
chunk_index INTEGER NOT NULL,
content TEXT NOT NULL,
embedding vector(3072),
metadata JSONB DEFAULT '{}',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS document_chunks_document_id_idx ON document_chunks(document_id);
CREATE INDEX IF NOT EXISTS document_chunks_embedding_idx ON document_chunks USING ivfflat (embedding vector_cosine_ops);
`;
await this.client.query(createTableQuery);
}
private async initializeChroma() {
// const { ChromaClient } = await import('chromadb');
@@ -87,6 +69,40 @@ class VectorDatabaseService {
logger.info('Chroma vector database initialized');
}
private async initializeSupabase() {
try {
const { getSupabaseServiceClient } = await import('../config/supabase');
this.client = getSupabaseServiceClient();
// Create the document_chunks table if it doesn't exist
await this.createSupabaseVectorTables();
logger.info('Supabase vector database initialized successfully');
} catch (error) {
logger.error('Failed to initialize Supabase vector database', error);
// Don't throw error, just log it and continue without vector DB
this.client = null;
}
}
private async createSupabaseVectorTables() {
try {
// Enable pgvector extension
await this.client.rpc('enable_pgvector');
// Create document_chunks table with vector support
const { error } = await this.client.rpc('create_document_chunks_table');
if (error && !error.message.includes('already exists')) {
throw error;
}
logger.info('Supabase vector tables created successfully');
} catch (error) {
logger.warn('Could not create vector tables automatically. Please run the setup SQL manually:', error);
}
}
/**
* Generate embeddings for text using OpenAI or Anthropic with caching
*/
@@ -225,6 +241,12 @@ class VectorDatabaseService {
* Store document chunks with embeddings
*/
async storeDocumentChunks(chunks: DocumentChunk[]): Promise<void> {
const initialized = await this.ensureInitialized();
if (!initialized) {
logger.warn('Vector database not available, skipping chunk storage');
return;
}
try {
switch (this.provider) {
case 'pinecone':
@@ -236,6 +258,9 @@ class VectorDatabaseService {
case 'chroma':
await this.storeInChroma(chunks);
break;
case 'supabase':
await this.storeInSupabase(chunks);
break;
}
logger.info(`Stored ${chunks.length} document chunks in vector database`);
} catch (error) {
@@ -257,6 +282,12 @@ class VectorDatabaseService {
enableQueryExpansion?: boolean;
} = {}
): Promise<VectorSearchResult[]> {
const initialized = await this.ensureInitialized();
if (!initialized) {
logger.warn('Vector database not available, returning empty search results');
return [];
}
try {
let queries = [query];
@@ -281,6 +312,9 @@ class VectorDatabaseService {
case 'chroma':
results = await this.searchChroma(embedding, options);
break;
case 'supabase':
results = await this.searchSupabase(embedding, options);
break;
default:
throw new Error(`Unsupported provider: ${this.provider}`);
}
@@ -401,55 +435,14 @@ class VectorDatabaseService {
}
// Private implementation methods for different providers
private async storeInPinecone(chunks: DocumentChunk[]): Promise<void> {
const index = this.client.index(config.vector.pineconeIndex!);
const vectors = chunks.map(chunk => ({
id: chunk.id,
values: chunk.embedding,
metadata: {
...chunk.metadata,
documentId: chunk.documentId,
content: chunk.content
}
}));
await index.upsert(vectors);
private async storeInPinecone(_chunks: DocumentChunk[]): Promise<void> {
logger.warn('Pinecone provider not fully implemented');
throw new Error('Pinecone provider not available');
}
private async storeInPgVector(chunks: DocumentChunk[]): Promise<void> {
try {
// Delete existing chunks for this document
if (chunks.length > 0 && chunks[0]) {
await this.client.query(
'DELETE FROM document_chunks WHERE document_id = $1',
[chunks[0].documentId]
);
}
// Insert new chunks with embeddings using proper pgvector format
for (const chunk of chunks) {
// Ensure embedding is properly formatted for pgvector
const embeddingArray = Array.isArray(chunk.embedding) ? chunk.embedding : [];
await this.client.query(
`INSERT INTO document_chunks (document_id, chunk_index, content, embedding, metadata)
VALUES ($1, $2, $3, $4::vector, $5)`,
[
chunk.documentId,
chunk.metadata?.['chunkIndex'] || 0,
chunk.content,
embeddingArray, // Pass as array, pgvector will handle the conversion
JSON.stringify(chunk.metadata || {})
]
);
}
logger.info(`Stored ${chunks.length} chunks in pgvector for document ${chunks[0]?.documentId}`);
} catch (error) {
logger.error('Failed to store chunks in pgvector', error);
throw error;
}
private async storeInPgVector(_chunks: DocumentChunk[]): Promise<void> {
logger.warn('pgvector provider is deprecated. Use Supabase instead for cloud deployment.');
throw new Error('pgvector provider not available in Firebase environment. Use Supabase instead.');
}
private async storeInChroma(chunks: DocumentChunk[]): Promise<void> {
@@ -472,73 +465,19 @@ class VectorDatabaseService {
}
private async searchPinecone(
embedding: number[],
options: any
_embedding: number[],
_options: any
): Promise<VectorSearchResult[]> {
const index = this.client.index(config.vector.pineconeIndex!);
const queryResponse = await index.query({
vector: embedding,
topK: options.limit || 10,
filter: options.filters,
includeMetadata: true
});
return queryResponse.matches?.map((match: any) => ({
id: match.id,
score: match.score,
metadata: match.metadata,
content: match.metadata.content
})) || [];
logger.warn('Pinecone provider not fully implemented');
throw new Error('Pinecone provider not available');
}
private async searchPgVector(
embedding: number[],
options: any
_embedding: number[],
_options: any
): Promise<VectorSearchResult[]> {
try {
const { documentId, limit = 5, similarity = 0.7 } = options;
// Ensure embedding is properly formatted
const embeddingArray = Array.isArray(embedding) ? embedding : [];
// Build query with optional document filter
let query = `
SELECT
id,
document_id,
content,
metadata,
1 - (embedding <=> $1::vector) as similarity
FROM document_chunks
WHERE 1 - (embedding <=> $1::vector) > $2
`;
const params: any[] = [embeddingArray, similarity];
if (documentId) {
query += ' AND document_id = $3';
params.push(documentId);
}
query += ' ORDER BY embedding <=> $1::vector LIMIT $' + (params.length + 1);
params.push(limit);
const result = await this.client.query(query, params);
return result.rows.map((row: any) => ({
id: row.id,
documentId: row.document_id,
content: row.content,
metadata: row.metadata || {},
similarity: row.similarity,
chunkContent: row.content, // Alias for compatibility
similarityScore: row.similarity // Add this for consistency
}));
} catch (error) {
logger.error('pgvector search failed', error);
throw error;
}
logger.warn('pgvector provider is deprecated. Use Supabase instead for cloud deployment.');
throw new Error('pgvector provider not available in Firebase environment. Use Supabase instead.');
}
private async searchChroma(
@@ -563,6 +502,80 @@ class VectorDatabaseService {
}));
}
private async storeInSupabase(chunks: DocumentChunk[]): Promise<void> {
try {
// Transform chunks to include embeddings
const supabaseRows = await Promise.all(
chunks.map(async (chunk) => ({
id: chunk.id,
document_id: chunk.documentId,
chunk_index: chunk.chunkIndex,
content: chunk.content,
embedding: chunk.embedding,
metadata: chunk.metadata || {}
}))
);
const { error } = await this.client
.from('document_chunks')
.upsert(supabaseRows);
if (error) {
throw error;
}
logger.info(`Successfully stored ${chunks.length} chunks in Supabase`);
} catch (error) {
logger.error('Failed to store chunks in Supabase:', error);
throw error;
}
}
private async searchSupabase(
embedding: number[],
options: {
documentId?: string;
limit?: number;
similarity?: number;
filters?: Record<string, any>;
}
): Promise<VectorSearchResult[]> {
try {
let query = this.client
.from('document_chunks')
.select('id, content, metadata, document_id')
.rpc('match_documents', {
query_embedding: embedding,
match_threshold: options.similarity || 0.7,
match_count: options.limit || 10
});
// Add document filter if specified
if (options.documentId) {
query = query.eq('document_id', options.documentId);
}
const { data, error } = await query;
if (error) {
throw error;
}
return data.map((row: any) => ({
id: row.id,
score: row.similarity,
metadata: {
...row.metadata,
documentId: row.document_id
},
content: row.content
}));
} catch (error) {
logger.error('Failed to search in Supabase:', error);
return [];
}
}
private async getDocumentChunks(documentId: string): Promise<DocumentChunk[]> {
return await VectorDatabaseModel.getDocumentChunks(documentId);
}

View File

@@ -0,0 +1,89 @@
-- Enable the pgvector extension
CREATE EXTENSION IF NOT EXISTS vector;
-- Create document_chunks table with vector support
CREATE TABLE IF NOT EXISTS document_chunks (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id VARCHAR(255) NOT NULL,
chunk_index INTEGER NOT NULL,
content TEXT NOT NULL,
embedding vector(1536), -- OpenAI embeddings are 1536 dimensions
metadata JSONB DEFAULT '{}',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for better performance
CREATE INDEX IF NOT EXISTS document_chunks_document_id_idx ON document_chunks(document_id);
CREATE INDEX IF NOT EXISTS document_chunks_embedding_idx ON document_chunks USING ivfflat (embedding vector_cosine_ops);
-- Create function to enable pgvector (for RPC calls)
CREATE OR REPLACE FUNCTION enable_pgvector()
RETURNS VOID AS $$
BEGIN
CREATE EXTENSION IF NOT EXISTS vector;
END;
$$ LANGUAGE plpgsql;
-- Create function to create document_chunks table (for RPC calls)
CREATE OR REPLACE FUNCTION create_document_chunks_table()
RETURNS VOID AS $$
BEGIN
CREATE TABLE IF NOT EXISTS document_chunks (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
document_id VARCHAR(255) NOT NULL,
chunk_index INTEGER NOT NULL,
content TEXT NOT NULL,
embedding vector(1536),
metadata JSONB DEFAULT '{}',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS document_chunks_document_id_idx ON document_chunks(document_id);
CREATE INDEX IF NOT EXISTS document_chunks_embedding_idx ON document_chunks USING ivfflat (embedding vector_cosine_ops);
END;
$$ LANGUAGE plpgsql;
-- Create function to match documents based on vector similarity
CREATE OR REPLACE FUNCTION match_documents(
query_embedding vector(1536),
match_threshold float DEFAULT 0.7,
match_count int DEFAULT 10
)
RETURNS TABLE(
id UUID,
content TEXT,
metadata JSONB,
document_id VARCHAR(255),
similarity FLOAT
) AS $$
BEGIN
RETURN QUERY
SELECT
document_chunks.id,
document_chunks.content,
document_chunks.metadata,
document_chunks.document_id,
1 - (document_chunks.embedding <=> query_embedding) AS similarity
FROM document_chunks
WHERE 1 - (document_chunks.embedding <=> query_embedding) > match_threshold
ORDER BY document_chunks.embedding <=> query_embedding
LIMIT match_count;
END;
$$ LANGUAGE plpgsql;
-- Enable Row Level Security (RLS) if needed
-- ALTER TABLE document_chunks ENABLE ROW LEVEL SECURITY;
-- Create policies for RLS (adjust as needed for your auth requirements)
-- CREATE POLICY "Users can view all document chunks" ON document_chunks FOR SELECT USING (true);
-- CREATE POLICY "Users can insert document chunks" ON document_chunks FOR INSERT WITH CHECK (true);
-- CREATE POLICY "Users can update document chunks" ON document_chunks FOR UPDATE USING (true);
-- CREATE POLICY "Users can delete document chunks" ON document_chunks FOR DELETE USING (true);
-- Grant necessary permissions
GRANT ALL ON document_chunks TO authenticated;
GRANT ALL ON document_chunks TO anon;
GRANT EXECUTE ON FUNCTION match_documents TO authenticated;
GRANT EXECUTE ON FUNCTION match_documents TO anon;

View File

@@ -1,4 +1,4 @@
VITE_API_BASE_URL=https://api-y56ccs6wva-uc.a.run.app/api
VITE_API_BASE_URL=https://api-y56ccs6wva-uc.a.run.app
VITE_FIREBASE_API_KEY=AIzaSyBoV04YHkbCSUIU6sXki57um4xNsvLV_jY
VITE_FIREBASE_AUTH_DOMAIN=cim-summarizer.firebaseapp.com
VITE_FIREBASE_PROJECT_ID=cim-summarizer

17
frontend/.gcloudignore Normal file
View File

@@ -0,0 +1,17 @@
# This file specifies files that are *not* uploaded to Google Cloud
# using gcloud. It follows the same syntax as .gitignore, with the addition of
# "#!include" directives (which insert the entries of the given .gitignore-style
# file at that point).
#
# For more information, run:
# $ gcloud topic gcloudignore
#
.gcloudignore
# If you would like to upload your .git directory, .gitignore file or files
# from your .gitignore file, remove the corresponding line
# below:
.git
.gitignore
node_modules
#!include:.gitignore

View File

@@ -9,6 +9,7 @@ import DocumentViewer from './components/DocumentViewer';
import Analytics from './components/Analytics';
import LogoutButton from './components/LogoutButton';
import { documentService } from './services/documentService';
import {
Home,
Upload,
@@ -22,9 +23,9 @@ import { cn } from './utils/cn';
// Dashboard component
const Dashboard: React.FC = () => {
const { user } = useAuth();
const { user, token } = useAuth();
const [documents, setDocuments] = useState<any[]>([]);
const [loading, setLoading] = useState(true);
const [loading, setLoading] = useState(false);
const [viewingDocument, setViewingDocument] = useState<string | null>(null);
const [searchTerm, setSearchTerm] = useState('');
const [activeTab, setActiveTab] = useState<'overview' | 'documents' | 'upload' | 'analytics'>('overview');
@@ -51,13 +52,24 @@ const Dashboard: React.FC = () => {
const fetchDocuments = useCallback(async () => {
try {
setLoading(true);
const response = await fetch('/api/documents', {
console.log('Fetching documents with token:', token ? 'Token available' : 'No token');
console.log('User state:', user);
console.log('Token preview:', token ? `${token.substring(0, 20)}...` : 'No token');
if (!token) {
console.error('No authentication token available');
return;
}
const response = await fetch('https://us-central1-cim-summarizer.cloudfunctions.net/api/api/documents', {
headers: {
'Authorization': `Bearer ${localStorage.getItem('auth_token')}`,
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json',
},
});
console.log('API response status:', response.status);
if (response.ok) {
const result = await response.json();
// The API returns an array directly, not wrapped in success/data
@@ -78,13 +90,17 @@ const Dashboard: React.FC = () => {
}));
setDocuments(transformedDocs);
}
} else {
console.error('API request failed:', response.status, response.statusText);
const errorText = await response.text();
console.error('Error response body:', errorText);
}
} catch (error) {
console.error('Failed to fetch documents:', error);
} finally {
setLoading(false);
}
}, [user?.name, user?.email]);
}, [user?.name, user?.email, token]);
// Poll for status updates on documents that are being processed
const pollDocumentStatus = useCallback(async (documentId: string) => {
@@ -95,9 +111,14 @@ const Dashboard: React.FC = () => {
}
try {
const response = await fetch(`/api/documents/${documentId}/progress`, {
if (!token) {
console.error('No authentication token available');
return false;
}
const response = await fetch(`https://us-central1-cim-summarizer.cloudfunctions.net/api/api/documents/${documentId}/progress`, {
headers: {
'Authorization': `Bearer ${localStorage.getItem('auth_token')}`,
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json',
},
});
@@ -143,7 +164,7 @@ const Dashboard: React.FC = () => {
}
return true; // Continue polling
}, []);
}, [token]);
// Set up polling for documents that are being processed or uploaded (might be processing)
useEffect(() => {

View File

@@ -1,8 +1,9 @@
import React, { useCallback, useState, useRef, useEffect } from 'react';
import React, { useState, useCallback, useRef, useEffect } from 'react';
import { useDropzone } from 'react-dropzone';
import { Upload, FileText, X, CheckCircle, AlertCircle } from 'lucide-react';
import { cn } from '../utils/cn';
import { documentService } from '../services/documentService';
import { useAuth } from '../contexts/AuthContext';
interface UploadedFile {
id: string;
@@ -24,6 +25,7 @@ const DocumentUpload: React.FC<DocumentUploadProps> = ({
onUploadComplete,
onUploadError,
}) => {
const { token } = useAuth();
const [uploadedFiles, setUploadedFiles] = useState<UploadedFile[]>([]);
const [isUploading, setIsUploading] = useState(false);
const abortControllers = useRef<Map<string, AbortController>>(new Map());
@@ -160,11 +162,18 @@ const DocumentUpload: React.FC<DocumentUploadProps> = ({
return;
}
// Validate UUID format
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
if (!uuidRegex.test(documentId)) {
console.warn('Attempted to monitor progress for document with invalid UUID format:', documentId);
return;
}
const checkProgress = async () => {
try {
const response = await fetch(`/api/documents/${documentId}/progress`, {
const response = await fetch(`https://us-central1-cim-summarizer.cloudfunctions.net/api/api/documents/${documentId}/progress`, {
headers: {
'Authorization': `Bearer ${localStorage.getItem('auth_token')}`,
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json',
},
});
@@ -198,9 +207,18 @@ const DocumentUpload: React.FC<DocumentUploadProps> = ({
if (newStatus === 'completed' || newStatus === 'error') {
return;
}
} else if (response.status === 404) {
// Document not found, stop monitoring
console.warn(`Document ${documentId} not found, stopping progress monitoring`);
return;
} else if (response.status === 401) {
// Unauthorized, stop monitoring
console.warn('Unauthorized access to document progress, stopping monitoring');
return;
}
} catch (error) {
console.error('Failed to fetch processing progress:', error);
// Don't stop monitoring on network errors, just log and continue
}
// Continue monitoring
@@ -209,7 +227,7 @@ const DocumentUpload: React.FC<DocumentUploadProps> = ({
// Start monitoring
setTimeout(checkProgress, 1000);
}, []);
}, [token]);
const { getRootProps, getInputProps, isDragActive } = useDropzone({
onDrop,

View File

@@ -1,5 +1,6 @@
import React, { useState, useEffect } from 'react';
import { CheckCircle, AlertCircle, Clock, FileText, TrendingUp, Save } from 'lucide-react';
import { useAuth } from '../contexts/AuthContext';
interface ProcessingProgressProps {
documentId: string;
@@ -26,6 +27,7 @@ const ProcessingProgress: React.FC<ProcessingProgressProps> = ({
onComplete,
onError,
}) => {
const { token } = useAuth();
const [progress, setProgress] = useState<ProgressData | null>(null);
const [isPolling, setIsPolling] = useState(true);
@@ -62,33 +64,45 @@ const ProcessingProgress: React.FC<ProcessingProgressProps> = ({
const pollProgress = async () => {
try {
const response = await fetch(`/api/documents/${documentId}/progress`, {
const response = await fetch(`https://us-central1-cim-summarizer.cloudfunctions.net/api/api/documents/${documentId}/progress`, {
headers: {
'Authorization': `Bearer ${localStorage.getItem('auth_token')}`,
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json',
},
});
if (response.ok) {
const result = await response.json();
if (result.success) {
setProgress(result.data);
// Handle completion
if (result.data.status === 'completed') {
setIsPolling(false);
const data = await response.json();
setProgress({
documentId,
jobId: data.jobId || '',
status: data.status,
step: data.step || 'validation',
progress: data.progress || 0,
message: data.message || '',
startTime: data.startTime || new Date().toISOString(),
estimatedTimeRemaining: data.estimatedTimeRemaining,
currentChunk: data.currentChunk,
totalChunks: data.totalChunks,
error: data.error,
});
if (data.status === 'completed' || data.status === 'failed') {
setIsPolling(false);
if (data.status === 'completed') {
onComplete?.();
}
// Handle error
if (result.data.status === 'error') {
setIsPolling(false);
onError?.(result.data.error || 'Processing failed');
} else {
onError?.(data.error || 'Processing failed');
}
}
}
} catch (error) {
console.error('Failed to fetch progress:', error);
console.error('Failed to check progress:', error);
setProgress(prev => prev ? {
...prev,
message: 'Failed to check progress',
error: 'Network error'
} : null);
}
};
@@ -103,7 +117,7 @@ const ProcessingProgress: React.FC<ProcessingProgressProps> = ({
pollProgress();
return () => clearInterval(interval);
}, [documentId, isPolling, onComplete, onError]);
}, [documentId, isPolling, onComplete, onError, token]);
if (!progress) {
return (

View File

@@ -1,5 +1,6 @@
import React, { useState, useEffect } from 'react';
import { Clock, CheckCircle, AlertCircle, PlayCircle } from 'lucide-react';
import { useAuth } from '../contexts/AuthContext';
interface QueueStatusProps {
refreshTrigger?: number;
@@ -27,25 +28,29 @@ interface ProcessingJob {
}
const QueueStatus: React.FC<QueueStatusProps> = ({ refreshTrigger }) => {
const { token } = useAuth();
const [stats, setStats] = useState<QueueStats | null>(null);
const [activeJobs, setActiveJobs] = useState<ProcessingJob[]>([]);
const [loading, setLoading] = useState(true);
const fetchQueueStatus = async () => {
try {
const response = await fetch('/api/documents/queue/status', {
if (!token) {
console.error('No authentication token available');
return;
}
const response = await fetch('https://us-central1-cim-summarizer.cloudfunctions.net/api/api/documents/queue/status', {
headers: {
'Authorization': `Bearer ${localStorage.getItem('auth_token')}`,
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json',
},
});
if (response.ok) {
const result = await response.json();
if (result.success) {
setStats(result.data.stats);
setActiveJobs(result.data.activeJobs || []);
}
const data = await response.json();
setStats(data.stats);
setActiveJobs(data.activeJobs || []);
}
} catch (error) {
console.error('Failed to fetch queue status:', error);

View File

@@ -13,6 +13,8 @@ import { LoginCredentials, AuthResult, User } from '../types/auth';
class AuthService {
private currentUser: FirebaseUser | null = null;
private authStateListeners: Array<(user: FirebaseUser | null) => void> = [];
private tokenRefreshInterval: NodeJS.Timeout | null = null;
private isRefreshing = false;
constructor() {
// Listen for auth state changes
@@ -21,13 +23,36 @@ class AuthService {
this.updateAxiosHeaders(user);
// Notify all listeners
this.authStateListeners.forEach(listener => listener(user));
// Clear existing interval and set up new one
if (this.tokenRefreshInterval) {
clearInterval(this.tokenRefreshInterval);
}
if (user) {
// Set up periodic token refresh (every 45 minutes to be safe)
this.tokenRefreshInterval = setInterval(async () => {
if (this.currentUser && !this.isRefreshing) {
try {
this.isRefreshing = true;
await this.updateAxiosHeaders(this.currentUser);
} catch (error) {
console.error('Periodic token refresh failed:', error);
// Don't logout on refresh failure, let the next request handle it
} finally {
this.isRefreshing = false;
}
}
}, 45 * 60 * 1000); // 45 minutes
}
});
}
private async updateAxiosHeaders(user: FirebaseUser | null) {
if (user) {
try {
const token = await getIdToken(user);
// Force token refresh to ensure we have a fresh token
const token = await getIdToken(user, true);
axios.defaults.headers.common['Authorization'] = `Bearer ${token}`;
} catch (error) {
console.error('Failed to get ID token:', error);
@@ -40,12 +65,10 @@ class AuthService {
onAuthStateChanged(callback: (user: FirebaseUser | null) => void) {
this.authStateListeners.push(callback);
// Immediately call with current state
callback(this.currentUser);
// Return unsubscribe function
return () => {
this.authStateListeners = this.authStateListeners.filter(listener => listener !== callback);
this.authStateListeners = this.authStateListeners.filter(
listener => listener !== callback
);
};
}
@@ -103,6 +126,10 @@ class AuthService {
async logout(): Promise<void> {
try {
if (this.tokenRefreshInterval) {
clearInterval(this.tokenRefreshInterval);
this.tokenRefreshInterval = null;
}
await signOut(auth);
} catch (error) {
console.error('Logout failed:', error);
@@ -146,7 +173,21 @@ class AuthService {
}
try {
return await getIdToken(this.currentUser);
// Only force refresh if we're not already refreshing
if (!this.isRefreshing) {
this.isRefreshing = true;
try {
const token = await getIdToken(this.currentUser, true);
this.isRefreshing = false;
return token;
} catch (error) {
this.isRefreshing = false;
throw error;
}
} else {
// If already refreshing, just get the current token
return await getIdToken(this.currentUser, false);
}
} catch (error) {
console.error('Failed to get ID token:', error);
return null;
@@ -157,10 +198,12 @@ class AuthService {
return !!this.currentUser;
}
getFirebaseUser(): FirebaseUser | null {
return this.currentUser;
// Cleanup method
destroy() {
if (this.tokenRefreshInterval) {
clearInterval(this.tokenRefreshInterval);
}
}
}
export const authService = new AuthService();
export default authService;
export const authService = new AuthService();

View File

@@ -11,22 +11,40 @@ const apiClient = axios.create({
});
// Add auth token to requests
apiClient.interceptors.request.use((config) => {
const token = authService.getToken();
apiClient.interceptors.request.use(async (config) => {
const token = await authService.getToken();
if (token) {
config.headers.Authorization = `Bearer ${token}`;
}
return config;
});
// Handle auth errors
// Handle auth errors with retry logic
apiClient.interceptors.response.use(
(response) => response,
(error) => {
if (error.response?.status === 401) {
async (error) => {
const originalRequest = error.config;
if (error.response?.status === 401 && !originalRequest._retry) {
originalRequest._retry = true;
try {
// Attempt to refresh the token
const newToken = await authService.getToken();
if (newToken) {
// Retry the original request with the new token
originalRequest.headers.Authorization = `Bearer ${newToken}`;
return apiClient(originalRequest);
}
} catch (refreshError) {
console.error('Token refresh failed:', refreshError);
}
// If token refresh fails, logout the user
authService.logout();
window.location.href = '/login';
}
return Promise.reject(error);
}
);
@@ -145,7 +163,7 @@ class DocumentService {
// Always use optimized agentic RAG processing - no strategy selection needed
formData.append('processingStrategy', 'optimized_agentic_rag');
const response = await apiClient.post('/documents', formData, {
const response = await apiClient.post('/api/documents', formData, {
headers: {
'Content-Type': 'multipart/form-data',
},
@@ -165,7 +183,7 @@ class DocumentService {
* Get all documents for the current user
*/
async getDocuments(): Promise<Document[]> {
const response = await apiClient.get('/documents');
const response = await apiClient.get('/api/documents');
return response.data;
}
@@ -173,7 +191,7 @@ class DocumentService {
* Get a specific document by ID
*/
async getDocument(documentId: string): Promise<Document> {
const response = await apiClient.get(`/documents/${documentId}`);
const response = await apiClient.get(`/api/documents/${documentId}`);
return response.data;
}
@@ -181,7 +199,7 @@ class DocumentService {
* Get document processing status
*/
async getDocumentStatus(documentId: string): Promise<{ status: string; progress: number; message?: string }> {
const response = await apiClient.get(`/documents/${documentId}/progress`);
const response = await apiClient.get(`/api/documents/${documentId}/progress`);
return response.data;
}
@@ -189,7 +207,7 @@ class DocumentService {
* Download a processed document
*/
async downloadDocument(documentId: string): Promise<Blob> {
const response = await apiClient.get(`/documents/${documentId}/download`, {
const response = await apiClient.get(`/api/documents/${documentId}/download`, {
responseType: 'blob',
});
return response.data;
@@ -199,14 +217,14 @@ class DocumentService {
* Delete a document
*/
async deleteDocument(documentId: string): Promise<void> {
await apiClient.delete(`/documents/${documentId}`);
await apiClient.delete(`/api/documents/${documentId}`);
}
/**
* Retry processing for a failed document
*/
async retryProcessing(documentId: string): Promise<Document> {
const response = await apiClient.post(`/documents/${documentId}/retry`);
const response = await apiClient.post(`/api/documents/${documentId}/retry`);
return response.data;
}
@@ -214,14 +232,14 @@ class DocumentService {
* Save CIM review data
*/
async saveCIMReview(documentId: string, reviewData: CIMReviewData): Promise<void> {
await apiClient.post(`/documents/${documentId}/review`, reviewData);
await apiClient.post(`/api/documents/${documentId}/review`, reviewData);
}
/**
* Get CIM review data for a document
*/
async getCIMReview(documentId: string): Promise<CIMReviewData> {
const response = await apiClient.get(`/documents/${documentId}/review`);
const response = await apiClient.get(`/api/documents/${documentId}/review`);
return response.data;
}
@@ -229,7 +247,7 @@ class DocumentService {
* Export CIM review as PDF
*/
async exportCIMReview(documentId: string): Promise<Blob> {
const response = await apiClient.get(`/documents/${documentId}/export`, {
const response = await apiClient.get(`/api/documents/${documentId}/export`, {
responseType: 'blob',
});
return response.data;
@@ -239,7 +257,7 @@ class DocumentService {
* Get document analytics and insights
*/
async getDocumentAnalytics(documentId: string): Promise<any> {
const response = await apiClient.get(`/documents/${documentId}/analytics`);
const response = await apiClient.get(`/api/documents/${documentId}/analytics`);
return response.data;
}
@@ -247,7 +265,7 @@ class DocumentService {
* Get global analytics data
*/
async getAnalytics(days: number = 30): Promise<any> {
const response = await apiClient.get('/documents/analytics', {
const response = await apiClient.get('/api/documents/analytics', {
params: { days }
});
return response.data;
@@ -257,7 +275,7 @@ class DocumentService {
* Get processing statistics
*/
async getProcessingStats(): Promise<any> {
const response = await apiClient.get('/documents/processing-stats');
const response = await apiClient.get('/api/documents/processing-stats');
return response.data;
}
@@ -265,7 +283,7 @@ class DocumentService {
* Get agentic RAG sessions for a document
*/
async getAgenticRAGSessions(documentId: string): Promise<any> {
const response = await apiClient.get(`/documents/${documentId}/agentic-rag-sessions`);
const response = await apiClient.get(`/api/documents/${documentId}/agentic-rag-sessions`);
return response.data;
}
@@ -273,7 +291,7 @@ class DocumentService {
* Get detailed agentic RAG session information
*/
async getAgenticRAGSessionDetails(sessionId: string): Promise<any> {
const response = await apiClient.get(`/documents/agentic-rag-sessions/${sessionId}`);
const response = await apiClient.get(`/api/documents/agentic-rag-sessions/${sessionId}`);
return response.data;
}
@@ -297,7 +315,7 @@ class DocumentService {
* Search documents
*/
async searchDocuments(query: string): Promise<Document[]> {
const response = await apiClient.get('/documents/search', {
const response = await apiClient.get('/api/documents/search', {
params: { q: query },
});
return response.data;
@@ -307,7 +325,7 @@ class DocumentService {
* Get processing queue status
*/
async getQueueStatus(): Promise<{ pending: number; processing: number; completed: number; failed: number }> {
const response = await apiClient.get('/documents/queue/status');
const response = await apiClient.get('/api/documents/queue/status');
return response.data;
}