Files
cim_summary/backend/src/utils/logger.ts
Jon 6057d1d7fd 🔧 Fix authentication and document upload issues
## What was done:
 Fixed Firebase Admin initialization to use default credentials for Firebase Functions
 Updated frontend to use correct Firebase Functions URL (was using Cloud Run URL)
 Added comprehensive debugging to authentication middleware
 Added debugging to file upload middleware and CORS handling
 Added debug buttons to frontend for troubleshooting authentication
 Enhanced error handling and logging throughout the stack

## Current issues:
 Document upload still returns 400 Bad Request despite authentication working
 GET requests work fine (200 OK) but POST upload requests fail
 Frontend authentication is working correctly (valid JWT tokens)
 Backend authentication middleware is working (rejects invalid tokens)
 CORS is configured correctly and allowing requests

## Root cause analysis:
- Authentication is NOT the issue (tokens are valid, GET requests work)
- The problem appears to be in the file upload handling or multer configuration
- Request reaches the server but fails during upload processing
- Need to identify exactly where in the upload pipeline the failure occurs

## TODO next steps:
1. 🔍 Check Firebase Functions logs after next upload attempt to see debugging output
2. 🔍 Verify if request reaches upload middleware (look for '�� Upload middleware called' logs)
3. 🔍 Check if file validation is triggered (look for '🔍 File filter called' logs)
4. 🔍 Identify specific error in upload pipeline (multer, file processing, etc.)
5. 🔍 Test with smaller file or different file type to isolate issue
6. 🔍 Check if issue is with Firebase Functions file size limits or timeout
7. 🔍 Verify multer configuration and file handling in Firebase Functions environment

## Technical details:
- Frontend: https://cim-summarizer.web.app
- Backend: https://us-central1-cim-summarizer.cloudfunctions.net/api
- Authentication: Firebase Auth with JWT tokens (working correctly)
- File upload: Multer with memory storage for immediate GCS upload
- Debug buttons available in production frontend for troubleshooting
2025-07-31 16:18:53 -04:00

232 lines
6.7 KiB
TypeScript

import winston from 'winston';
import { config } from '../config/env';
import path from 'path';
// Create logs directory if it doesn't exist
import fs from 'fs';
// Skip file logging entirely in test environment
const isTestEnvironment = process.env['NODE_ENV'] === 'test' || process.env['JEST_WORKER_ID'] !== undefined;
let logsDir = '';
if (!isTestEnvironment && config.logging.file) {
logsDir = path.dirname(config.logging.file);
if (!fs.existsSync(logsDir)) {
try {
fs.mkdirSync(logsDir, { recursive: true });
} catch (error) {
// In test environment, logs directory might not be writable
console.warn('Could not create logs directory:', error);
}
}
}
// Define log format with correlation ID support
const logFormat = winston.format.combine(
winston.format.timestamp(),
winston.format.errors({ stack: true }),
winston.format((info: any) => {
// Add correlation ID if available
if (info['correlationId']) {
info['correlationId'] = info['correlationId'];
}
// Add service name for better identification
info['service'] = 'cim-summary-backend';
// Add environment
info['environment'] = config.env;
return info;
})(),
winston.format.json()
);
// Create logger instance
const transports: winston.transport[] = [];
// Add file transports only if not in test environment and logs directory is writable
if (!isTestEnvironment && logsDir) {
try {
// Test if we can write to the logs directory
const testFile = path.join(logsDir, 'test.log');
fs.writeFileSync(testFile, 'test');
fs.unlinkSync(testFile);
transports.push(
// Write all logs with level 'error' and below to error.log
new winston.transports.File({
filename: path.join(logsDir, 'error.log'),
level: 'error',
}),
// Write upload-specific logs to upload.log
new winston.transports.File({
filename: path.join(logsDir, 'upload.log'),
level: 'info',
format: winston.format.combine(
winston.format.timestamp(),
winston.format((info: any) => {
// Only log upload-related messages
if (info['category'] === 'upload' || info['operation'] === 'upload') {
return info;
}
return false;
})(),
winston.format.json()
),
}),
// Write all logs with level 'info' and below to combined.log
new winston.transports.File({
filename: config.logging.file,
})
);
} catch (error) {
// Skip file transports if directory is not writable
console.warn('Could not create file transports for logger:', error);
}
}
export const logger = winston.createLogger({
level: config.logging.level,
format: logFormat,
transports,
});
// If we're not in production, log to the console as well
if (config.env !== 'production') {
logger.add(new winston.transports.Console({
format: winston.format.combine(
winston.format.colorize(),
winston.format.simple()
),
}));
}
// Enhanced logger with structured logging methods
export class StructuredLogger {
private correlationId: string | undefined;
constructor(correlationId?: string) {
this.correlationId = correlationId;
}
private addCorrelationId(meta: any): any {
if (this.correlationId) {
return { ...meta, correlationId: this.correlationId };
}
return meta;
}
// Upload pipeline specific logging methods
uploadStart(fileInfo: any, userId: string): void {
logger.info('Upload started', this.addCorrelationId({
category: 'upload',
operation: 'upload_start',
fileInfo,
userId,
timestamp: new Date().toISOString(),
}));
}
uploadSuccess(fileInfo: any, userId: string, processingTime: number): void {
logger.info('Upload completed successfully', this.addCorrelationId({
category: 'upload',
operation: 'upload_success',
fileInfo,
userId,
processingTime,
timestamp: new Date().toISOString(),
}));
}
uploadError(error: any, fileInfo: any, userId: string, stage: string): void {
logger.error('Upload failed', this.addCorrelationId({
category: 'upload',
operation: 'upload_error',
error: error.message || error,
errorCode: error.code,
errorStack: error.stack,
fileInfo,
userId,
stage,
timestamp: new Date().toISOString(),
}));
}
processingStart(documentId: string, userId: string, options: any): void {
logger.info('Document processing started', this.addCorrelationId({
category: 'processing',
operation: 'processing_start',
documentId,
userId,
options,
timestamp: new Date().toISOString(),
}));
}
processingSuccess(documentId: string, userId: string, processingTime: number, steps: any[]): void {
logger.info('Document processing completed', this.addCorrelationId({
category: 'processing',
operation: 'processing_success',
documentId,
userId,
processingTime,
stepsCount: steps.length,
timestamp: new Date().toISOString(),
}));
}
processingError(error: any, documentId: string, userId: string, stage: string): void {
logger.error('Document processing failed', this.addCorrelationId({
category: 'processing',
operation: 'processing_error',
error: error.message || error,
errorCode: error.code,
errorStack: error.stack,
documentId,
userId,
stage,
timestamp: new Date().toISOString(),
}));
}
storageOperation(operation: string, filePath: string, success: boolean, error?: any): void {
const logMethod = success ? logger.info : logger.error;
logMethod('Storage operation', this.addCorrelationId({
category: 'storage',
operation,
filePath,
success,
error: error?.message || error,
timestamp: new Date().toISOString(),
}));
}
jobQueueOperation(operation: string, jobId: string, status: string, error?: any): void {
const logMethod = error ? logger.error : logger.info;
logMethod('Job queue operation', this.addCorrelationId({
category: 'job_queue',
operation,
jobId,
status,
error: error?.message || error,
timestamp: new Date().toISOString(),
}));
}
// General structured logging methods
info(message: string, meta: any = {}): void {
logger.info(message, this.addCorrelationId(meta));
}
warn(message: string, meta: any = {}): void {
logger.warn(message, this.addCorrelationId(meta));
}
error(message: string, meta: any = {}): void {
logger.error(message, this.addCorrelationId(meta));
}
debug(message: string, meta: any = {}): void {
logger.debug(message, this.addCorrelationId(meta));
}
}
export default logger;