-- Minimal Database Setup - Just what's needed for uploads to work -- This won't conflict with existing tables -- 1. Create update function if it doesn't exist CREATE OR REPLACE FUNCTION update_updated_at_column() RETURNS TRIGGER AS $$ BEGIN NEW.updated_at = CURRENT_TIMESTAMP; RETURN NEW; END; $$ language 'plpgsql'; -- 2. Drop and recreate documents table (to ensure clean state) DROP TABLE IF EXISTS processing_jobs CASCADE; DROP TABLE IF EXISTS documents CASCADE; -- 3. Create documents table (user_id as VARCHAR to match Firebase UID) CREATE TABLE documents ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), user_id VARCHAR(255) NOT NULL, original_file_name VARCHAR(500) NOT NULL, file_path VARCHAR(1000) NOT NULL, file_size BIGINT NOT NULL CHECK (file_size > 0), uploaded_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, status VARCHAR(50) NOT NULL DEFAULT 'uploaded', extracted_text TEXT, generated_summary TEXT, summary_markdown_path VARCHAR(1000), summary_pdf_path VARCHAR(1000), processing_started_at TIMESTAMP WITH TIME ZONE, processing_completed_at TIMESTAMP WITH TIME ZONE, error_message TEXT, analysis_data JSONB, created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP ); CREATE INDEX idx_documents_user_id ON documents(user_id); CREATE INDEX idx_documents_status ON documents(status); CREATE INDEX idx_documents_uploaded_at ON documents(uploaded_at); CREATE INDEX idx_documents_user_status ON documents(user_id, status); CREATE TRIGGER update_documents_updated_at BEFORE UPDATE ON documents FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -- 4. Create processing_jobs table CREATE TABLE processing_jobs ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), document_id UUID NOT NULL REFERENCES documents(id) ON DELETE CASCADE, user_id VARCHAR(255) NOT NULL, status VARCHAR(50) NOT NULL DEFAULT 'pending', attempts INTEGER NOT NULL DEFAULT 0, max_attempts INTEGER NOT NULL DEFAULT 3, options JSONB, created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, started_at TIMESTAMP WITH TIME ZONE, completed_at TIMESTAMP WITH TIME ZONE, updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP, error TEXT, last_error_at TIMESTAMP WITH TIME ZONE, result JSONB ); CREATE INDEX idx_processing_jobs_status ON processing_jobs(status); CREATE INDEX idx_processing_jobs_created_at ON processing_jobs(created_at); CREATE INDEX idx_processing_jobs_document_id ON processing_jobs(document_id); CREATE INDEX idx_processing_jobs_user_id ON processing_jobs(user_id); CREATE INDEX idx_processing_jobs_pending ON processing_jobs(status, created_at) WHERE status = 'pending'; CREATE TRIGGER update_processing_jobs_updated_at BEFORE UPDATE ON processing_jobs FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -- 5. Verify tables were created SELECT table_name, (SELECT COUNT(*) FROM information_schema.columns WHERE table_name = t.table_name) as column_count FROM information_schema.tables t WHERE table_schema = 'public' AND table_name IN ('documents', 'processing_jobs') ORDER BY table_name;