Initial commit: Virtual Board Member AI System foundation

This commit is contained in:
Jonathan Pressnell
2025-08-07 16:11:14 -04:00
commit fbfe940a45
47 changed files with 7332 additions and 0 deletions

11
app/__init__.py Normal file
View File

@@ -0,0 +1,11 @@
"""
Virtual Board Member AI System
An enterprise-grade AI assistant that provides document analysis,
commitment tracking, strategic insights, and decision support for
board members and executives.
"""
__version__ = "0.1.0"
__author__ = "Virtual Board Member Team"
__description__ = "Enterprise-grade AI assistant for board governance"

3
app/api/__init__.py Normal file
View File

@@ -0,0 +1,3 @@
"""
API package for the Virtual Board Member AI System.
"""

3
app/api/v1/__init__.py Normal file
View File

@@ -0,0 +1,3 @@
"""
API v1 package for the Virtual Board Member AI System.
"""

24
app/api/v1/api.py Normal file
View File

@@ -0,0 +1,24 @@
"""
Main API router for v1 endpoints.
"""
from fastapi import APIRouter
from app.api.v1.endpoints import (
auth,
documents,
queries,
commitments,
analytics,
health,
)
api_router = APIRouter()
# Include all endpoint routers
api_router.include_router(auth.router, prefix="/auth", tags=["Authentication"])
api_router.include_router(documents.router, prefix="/documents", tags=["Documents"])
api_router.include_router(queries.router, prefix="/queries", tags=["Queries"])
api_router.include_router(commitments.router, prefix="/commitments", tags=["Commitments"])
api_router.include_router(analytics.router, prefix="/analytics", tags=["Analytics"])
api_router.include_router(health.router, prefix="/health", tags=["Health"])

View File

@@ -0,0 +1,3 @@
"""
API endpoints for the Virtual Board Member AI System.
"""

View File

@@ -0,0 +1,14 @@
"""
Analytics and reporting endpoints for the Virtual Board Member AI System.
"""
from fastapi import APIRouter
router = APIRouter()
# TODO: Implement analytics endpoints
# - Executive dashboard
# - Custom report generation
# - Strategic insights and recommendations
# - Risk analysis and identification
# - Performance metrics and KPIs

View File

@@ -0,0 +1,13 @@
"""
Authentication endpoints for the Virtual Board Member AI System.
"""
from fastapi import APIRouter
router = APIRouter()
# TODO: Implement authentication endpoints
# - OAuth 2.0/OIDC integration
# - JWT token management
# - User registration and management
# - Role-based access control

View File

@@ -0,0 +1,14 @@
"""
Commitment tracking endpoints for the Virtual Board Member AI System.
"""
from fastapi import APIRouter
router = APIRouter()
# TODO: Implement commitment endpoints
# - Commitment extraction and tracking
# - Commitment dashboard and filtering
# - Follow-up automation
# - Progress tracking and milestones
# - Notification management

View File

@@ -0,0 +1,14 @@
"""
Document management endpoints for the Virtual Board Member AI System.
"""
from fastapi import APIRouter
router = APIRouter()
# TODO: Implement document endpoints
# - Document upload and processing
# - Document organization and metadata
# - Document search and retrieval
# - Document version control
# - Batch document operations

View File

@@ -0,0 +1,76 @@
"""
Health check endpoints for monitoring system status.
"""
from typing import Dict, Any
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db, check_db_health
from app.core.config import settings
import structlog
logger = structlog.get_logger()
router = APIRouter()
@router.get("/")
async def health_check() -> Dict[str, Any]:
"""Basic health check endpoint."""
return {
"status": "healthy",
"version": settings.APP_VERSION,
"environment": settings.ENVIRONMENT,
}
@router.get("/detailed")
async def detailed_health_check(
db: AsyncSession = Depends(get_db)
) -> Dict[str, Any]:
"""Detailed health check with database connectivity."""
# Check database health
db_healthy = await check_db_health()
# TODO: Add checks for other services (Redis, Qdrant, etc.)
overall_status = "healthy" if db_healthy else "unhealthy"
return {
"status": overall_status,
"version": settings.APP_VERSION,
"environment": settings.ENVIRONMENT,
"services": {
"database": "healthy" if db_healthy else "unhealthy",
"redis": "unknown", # TODO: Implement Redis health check
"qdrant": "unknown", # TODO: Implement Qdrant health check
"llm": "unknown", # TODO: Implement LLM health check
},
"features": {
"commitment_tracking": settings.FEATURE_COMMITMENT_TRACKING,
"risk_analysis": settings.FEATURE_RISK_ANALYSIS,
"meeting_support": settings.FEATURE_MEETING_SUPPORT,
"real_time_queries": settings.FEATURE_REAL_TIME_QUERIES,
"batch_processing": settings.FEATURE_BATCH_PROCESSING,
},
}
@router.get("/ready")
async def readiness_check() -> Dict[str, Any]:
"""Readiness check for Kubernetes."""
# TODO: Implement comprehensive readiness check
return {
"status": "ready",
"timestamp": "2025-01-01T00:00:00Z",
}
@router.get("/live")
async def liveness_check() -> Dict[str, Any]:
"""Liveness check for Kubernetes."""
return {
"status": "alive",
"timestamp": "2025-01-01T00:00:00Z",
}

View File

@@ -0,0 +1,14 @@
"""
Natural language query endpoints for the Virtual Board Member AI System.
"""
from fastapi import APIRouter
router = APIRouter()
# TODO: Implement query endpoints
# - Natural language query processing
# - RAG pipeline integration
# - Query history and context
# - Multi-document analysis
# - Query result caching

3
app/core/__init__.py Normal file
View File

@@ -0,0 +1,3 @@
"""
Core configuration and utilities for the Virtual Board Member AI System.
"""

140
app/core/celery.py Normal file
View File

@@ -0,0 +1,140 @@
"""
Celery configuration for background task processing.
"""
from celery import Celery
from celery.schedules import crontab
import structlog
from app.core.config import settings
logger = structlog.get_logger()
# Create Celery app
celery_app = Celery(
"virtual_board_member",
broker=settings.CELERY_BROKER_URL,
backend=settings.CELERY_RESULT_BACKEND,
include=[
"app.tasks.document_processing",
"app.tasks.commitment_extraction",
"app.tasks.notifications",
"app.tasks.analytics",
]
)
# Configure Celery
celery_app.conf.update(
task_serializer=settings.CELERY_TASK_SERIALIZER,
result_serializer=settings.CELERY_RESULT_SERIALIZER,
accept_content=settings.CELERY_ACCEPT_CONTENT,
timezone=settings.CELERY_TIMEZONE,
enable_utc=settings.CELERY_ENABLE_UTC,
task_track_started=True,
task_time_limit=30 * 60, # 30 minutes
task_soft_time_limit=25 * 60, # 25 minutes
worker_prefetch_multiplier=1,
worker_max_tasks_per_child=1000,
result_expires=3600, # 1 hour
task_always_eager=settings.TESTING, # Run tasks synchronously in tests
)
# Configure periodic tasks
celery_app.conf.beat_schedule = {
# Daily commitment reminders
"daily-commitment-reminders": {
"task": "app.tasks.notifications.send_commitment_reminders",
"schedule": crontab(hour=9, minute=0), # 9 AM daily
},
# Weekly analytics report
"weekly-analytics-report": {
"task": "app.tasks.analytics.generate_weekly_report",
"schedule": crontab(day_of_week=1, hour=8, minute=0), # Monday 8 AM
},
# Daily document processing cleanup
"daily-document-cleanup": {
"task": "app.tasks.document_processing.cleanup_old_documents",
"schedule": crontab(hour=2, minute=0), # 2 AM daily
},
# Hourly health check
"hourly-health-check": {
"task": "app.tasks.system.health_check",
"schedule": crontab(minute=0), # Every hour
},
# Daily audit log cleanup
"daily-audit-cleanup": {
"task": "app.tasks.system.cleanup_audit_logs",
"schedule": crontab(hour=3, minute=0), # 3 AM daily
},
}
# Task routing
celery_app.conf.task_routes = {
"app.tasks.document_processing.*": {"queue": "document_processing"},
"app.tasks.commitment_extraction.*": {"queue": "commitment_extraction"},
"app.tasks.notifications.*": {"queue": "notifications"},
"app.tasks.analytics.*": {"queue": "analytics"},
"app.tasks.system.*": {"queue": "system"},
}
# Task annotations for specific configurations
celery_app.conf.task_annotations = {
"app.tasks.document_processing.process_large_document": {
"rate_limit": "10/m", # 10 per minute
"time_limit": 1800, # 30 minutes
},
"app.tasks.commitment_extraction.extract_commitments": {
"rate_limit": "50/m", # 50 per minute
"time_limit": 300, # 5 minutes
},
"app.tasks.analytics.generate_weekly_report": {
"rate_limit": "1/h", # 1 per hour
"time_limit": 600, # 10 minutes
},
}
# Error handling
@celery_app.task(bind=True)
def debug_task(self):
"""Debug task for testing."""
logger.info(f"Request: {self.request!r}")
# Task failure handling
@celery_app.task(bind=True, autoretry_for=(Exception,), retry_kwargs={'max_retries': 3})
def retry_task(self, *args, **kwargs):
"""Base task with retry logic."""
try:
# Task logic here
pass
except Exception as exc:
logger.error(
"Task failed",
task_name=self.name,
task_id=self.request.id,
error=str(exc),
retry_count=self.request.retries
)
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
# Health check task
@celery_app.task
def health_check():
"""Health check task for monitoring."""
logger.info("Celery health check completed")
return {"status": "healthy", "timestamp": "2025-01-01T00:00:00Z"}
# Cleanup task
@celery_app.task
def cleanup_old_data():
"""Cleanup old data and temporary files."""
logger.info("Starting data cleanup")
# TODO: Implement cleanup logic
logger.info("Data cleanup completed")
return {"status": "completed", "cleaned_items": 0}

210
app/core/config.py Normal file
View File

@@ -0,0 +1,210 @@
"""
Configuration settings for the Virtual Board Member AI System.
"""
import os
from typing import List, Optional
from pydantic import Field, validator
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
"""Application settings."""
# Application Configuration
APP_NAME: str = "Virtual Board Member AI"
APP_VERSION: str = "0.1.0"
ENVIRONMENT: str = "development"
DEBUG: bool = True
LOG_LEVEL: str = "INFO"
# Server Configuration
HOST: str = "0.0.0.0"
PORT: int = 8000
WORKERS: int = 4
RELOAD: bool = True
# Security Configuration
SECRET_KEY: str = Field(..., description="Secret key for JWT tokens")
ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60
REFRESH_TOKEN_EXPIRE_DAYS: int = 7
# Database Configuration
DATABASE_URL: str = Field(..., description="PostgreSQL database URL")
DATABASE_POOL_SIZE: int = 20
DATABASE_MAX_OVERFLOW: int = 30
DATABASE_POOL_TIMEOUT: int = 30
# Redis Configuration
REDIS_URL: str = Field(..., description="Redis connection URL")
REDIS_PASSWORD: Optional[str] = None
REDIS_DB: int = 0
REDIS_POOL_SIZE: int = 10
# Qdrant Vector Database
QDRANT_HOST: str = "localhost"
QDRANT_PORT: int = 6333
QDRANT_API_KEY: Optional[str] = None
QDRANT_COLLECTION_NAME: str = "board_documents"
QDRANT_VECTOR_SIZE: int = 1024
# LLM Configuration (OpenRouter)
OPENROUTER_API_KEY: str = Field(..., description="OpenRouter API key")
OPENROUTER_BASE_URL: str = "https://openrouter.ai/api/v1"
OPENROUTER_MODEL: str = "gpt-4o-mini"
OPENROUTER_FALLBACK_MODEL: str = "gpt-3.5-turbo"
OPENROUTER_MAX_TOKENS: int = 4000
OPENROUTER_TEMPERATURE: float = 0.1
# Document Processing
MAX_FILE_SIZE: int = 104857600 # 100MB in bytes
SUPPORTED_FORMATS: str = "pdf,xlsx,csv,pptx,txt"
BATCH_UPLOAD_LIMIT: int = 50
OCR_ENABLED: bool = True
TESSERACT_CMD: str = "/usr/bin/tesseract"
# Storage Configuration (S3/MinIO)
STORAGE_TYPE: str = "minio" # minio or s3
MINIO_ENDPOINT: str = "localhost:9000"
MINIO_ACCESS_KEY: str = "minioadmin"
MINIO_SECRET_KEY: str = "minioadmin"
MINIO_BUCKET: str = "vbm-documents"
MINIO_SECURE: bool = False
# AWS S3 Configuration (if using S3)
AWS_ACCESS_KEY_ID: Optional[str] = None
AWS_SECRET_ACCESS_KEY: Optional[str] = None
AWS_REGION: str = "us-east-1"
S3_BUCKET: str = "vbm-documents"
# Authentication (OAuth 2.0/OIDC)
AUTH_PROVIDER: str = "auth0" # auth0, cognito, or custom
AUTH0_DOMAIN: Optional[str] = None
AUTH0_CLIENT_ID: Optional[str] = None
AUTH0_CLIENT_SECRET: Optional[str] = None
AUTH0_AUDIENCE: Optional[str] = None
# AWS Cognito Configuration (if using Cognito)
COGNITO_USER_POOL_ID: Optional[str] = None
COGNITO_CLIENT_ID: Optional[str] = None
COGNITO_REGION: str = "us-east-1"
# Email Configuration
SMTP_HOST: Optional[str] = None
SMTP_PORT: int = 587
SMTP_USERNAME: Optional[str] = None
SMTP_PASSWORD: Optional[str] = None
SMTP_TLS: bool = True
EMAIL_FROM: str = "noreply@yourcompany.com"
# Monitoring and Logging
PROMETHEUS_ENABLED: bool = True
PROMETHEUS_PORT: int = 9090
GRAFANA_PORT: int = 3000
LOG_FORMAT: str = "json"
LOG_FILE: str = "logs/app.log"
# Message Queue (Kafka)
KAFKA_BOOTSTRAP_SERVERS: str = "localhost:9092"
KAFKA_TOPIC_DOCUMENT_PROCESSING: str = "vbm-document-processing"
KAFKA_TOPIC_COMMITMENT_EXTRACTION: str = "vbm-commitment-extraction"
KAFKA_TOPIC_NOTIFICATIONS: str = "vbm-notifications"
# Celery Configuration
CELERY_BROKER_URL: str = "redis://localhost:6379/1"
CELERY_RESULT_BACKEND: str = "redis://localhost:6379/2"
CELERY_TASK_SERIALIZER: str = "json"
CELERY_RESULT_SERIALIZER: str = "json"
CELERY_ACCEPT_CONTENT: str = "json"
CELERY_TIMEZONE: str = "UTC"
CELERY_ENABLE_UTC: bool = True
@property
def celery_accept_content_list(self) -> List[str]:
"""Get CELERY_ACCEPT_CONTENT as a list."""
return [self.CELERY_ACCEPT_CONTENT]
# External Integrations
SHAREPOINT_CLIENT_ID: Optional[str] = None
SHAREPOINT_CLIENT_SECRET: Optional[str] = None
SHAREPOINT_TENANT_ID: Optional[str] = None
SHAREPOINT_SITE_URL: Optional[str] = None
GOOGLE_DRIVE_CLIENT_ID: Optional[str] = None
GOOGLE_DRIVE_CLIENT_SECRET: Optional[str] = None
GOOGLE_DRIVE_REDIRECT_URI: str = "http://localhost:8000/auth/google/callback"
SLACK_BOT_TOKEN: Optional[str] = None
SLACK_SIGNING_SECRET: Optional[str] = None
SLACK_WEBHOOK_URL: Optional[str] = None
# Microsoft Graph API
MICROSOFT_CLIENT_ID: Optional[str] = None
MICROSOFT_CLIENT_SECRET: Optional[str] = None
MICROSOFT_TENANT_ID: Optional[str] = None
# Performance Configuration
CACHE_TTL: int = 3600 # 1 hour
RATE_LIMIT_REQUESTS: int = 100
RATE_LIMIT_WINDOW: int = 60 # seconds
MAX_CONCURRENT_REQUESTS: int = 50
# Feature Flags
FEATURE_COMMITMENT_TRACKING: bool = True
FEATURE_RISK_ANALYSIS: bool = True
FEATURE_MEETING_SUPPORT: bool = True
FEATURE_REAL_TIME_QUERIES: bool = True
FEATURE_BATCH_PROCESSING: bool = True
# Compliance and Security
ENABLE_AUDIT_LOGGING: bool = True
ENABLE_PII_DETECTION: bool = True
DATA_RETENTION_DAYS: int = 2555 # 7 years
ENCRYPTION_ENABLED: bool = True
BACKUP_ENABLED: bool = True
# Development and Testing
TESTING: bool = False
MOCK_LLM_RESPONSES: bool = False
SYNTHETIC_DATA_ENABLED: bool = True
SEED_DATA_ENABLED: bool = True
# CORS and Security
ALLOWED_HOSTS: List[str] = ["*"]
@validator("SUPPORTED_FORMATS", pre=True)
def parse_supported_formats(cls, v: str) -> str:
"""Parse supported formats string."""
if isinstance(v, str):
return v.lower()
return v
@property
def supported_formats_list(self) -> List[str]:
"""Get list of supported file formats."""
return [fmt.strip() for fmt in self.SUPPORTED_FORMATS.split(",")]
@property
def is_production(self) -> bool:
"""Check if running in production environment."""
return self.ENVIRONMENT.lower() == "production"
@property
def is_development(self) -> bool:
"""Check if running in development environment."""
return self.ENVIRONMENT.lower() == "development"
@property
def is_testing(self) -> bool:
"""Check if running in testing environment."""
return self.ENVIRONMENT.lower() == "testing"
class Config:
env_file = ".env"
env_file_encoding = "utf-8"
case_sensitive = True
# Create settings instance
settings = Settings()

97
app/core/database.py Normal file
View File

@@ -0,0 +1,97 @@
"""
Database configuration and connection setup for the Virtual Board Member AI System.
"""
import asyncio
from typing import AsyncGenerator
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import StaticPool
import structlog
from app.core.config import settings
logger = structlog.get_logger()
# Create async engine
async_engine = create_async_engine(
settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://"),
echo=settings.DEBUG,
pool_size=settings.DATABASE_POOL_SIZE,
max_overflow=settings.DATABASE_MAX_OVERFLOW,
pool_timeout=settings.DATABASE_POOL_TIMEOUT,
pool_pre_ping=True,
)
# Create sync engine for migrations
sync_engine = create_engine(
settings.DATABASE_URL,
echo=settings.DEBUG,
poolclass=StaticPool if settings.TESTING else None,
)
# Create session factory
AsyncSessionLocal = async_sessionmaker(
async_engine,
class_=AsyncSession,
expire_on_commit=False,
)
# Create base class for models
Base = declarative_base()
# Metadata for migrations
metadata = MetaData()
async def get_db() -> AsyncGenerator[AsyncSession, None]:
"""Dependency to get database session."""
async with AsyncSessionLocal() as session:
try:
yield session
except Exception as e:
await session.rollback()
logger.error("Database session error", error=str(e))
raise
finally:
await session.close()
async def init_db() -> None:
"""Initialize database tables."""
try:
async with async_engine.begin() as conn:
# Import all models to ensure they are registered
from app.models import user, document, commitment, audit_log # noqa
# Create all tables
await conn.run_sync(Base.metadata.create_all)
logger.info("Database tables created successfully")
except Exception as e:
logger.error("Failed to initialize database", error=str(e))
raise
async def close_db() -> None:
"""Close database connections."""
await async_engine.dispose()
logger.info("Database connections closed")
def get_sync_db():
"""Get synchronous database session for migrations."""
return sync_engine
# Database health check
async def check_db_health() -> bool:
"""Check database connectivity."""
try:
async with AsyncSessionLocal() as session:
await session.execute("SELECT 1")
return True
except Exception as e:
logger.error("Database health check failed", error=str(e))
return False

157
app/core/logging.py Normal file
View File

@@ -0,0 +1,157 @@
"""
Structured logging configuration for the Virtual Board Member AI System.
"""
import logging
import sys
from typing import Any, Dict
import structlog
from structlog.stdlib import LoggerFactory
from structlog.processors import (
TimeStamper,
JSONRenderer,
format_exc_info,
add_log_level,
StackInfoRenderer,
)
from structlog.types import Processor
from app.core.config import settings
def setup_logging() -> None:
"""Setup structured logging configuration."""
# Configure standard library logging
logging.basicConfig(
format="%(message)s",
stream=sys.stdout,
level=getattr(logging, settings.LOG_LEVEL.upper()),
)
# Configure structlog
structlog.configure(
processors=[
# Add timestamp
TimeStamper(fmt="iso"),
# Add log level
add_log_level,
# Add stack info
StackInfoRenderer(),
# Add exception info
format_exc_info,
# Add caller info
structlog.processors.CallsiteParameterAdder(
parameters={
structlog.processors.CallsiteParameter.FILENAME,
structlog.processors.CallsiteParameter.FUNC_NAME,
structlog.processors.CallsiteParameter.LINENO,
}
),
# Add process info
structlog.stdlib.add_log_level_number,
# Add service info
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
context_class=dict,
logger_factory=LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
# Configure formatter
formatter = structlog.stdlib.ProcessorFormatter(
processor=structlog.dev.ConsoleRenderer() if settings.DEBUG else JSONRenderer(),
foreign_pre_chain=[
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
],
)
# Configure root logger
root_logger = logging.getLogger()
root_logger.handlers.clear()
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(getattr(logging, settings.LOG_LEVEL.upper()))
def get_logger(name: str = None) -> structlog.BoundLogger:
"""Get a structured logger instance."""
return structlog.get_logger(name)
class AuditLogger:
"""Audit logging for compliance and security events."""
def __init__(self):
self.logger = get_logger("audit")
def log_user_login(self, user_id: str, ip_address: str, success: bool, **kwargs) -> None:
"""Log user login attempt."""
self.logger.info(
"User login attempt",
event_type="user_login",
user_id=user_id,
ip_address=ip_address,
success=success,
**kwargs
)
def log_document_access(self, user_id: str, document_id: str, action: str, **kwargs) -> None:
"""Log document access."""
self.logger.info(
"Document access",
event_type="document_access",
user_id=user_id,
document_id=document_id,
action=action,
**kwargs
)
def log_query_execution(self, user_id: str, query: str, response_time: float, **kwargs) -> None:
"""Log query execution."""
self.logger.info(
"Query execution",
event_type="query_execution",
user_id=user_id,
query=query,
response_time=response_time,
**kwargs
)
def log_commitment_extraction(self, document_id: str, commitments_count: int, **kwargs) -> None:
"""Log commitment extraction."""
self.logger.info(
"Commitment extraction",
event_type="commitment_extraction",
document_id=document_id,
commitments_count=commitments_count,
**kwargs
)
def log_security_event(self, event_type: str, severity: str, details: Dict[str, Any]) -> None:
"""Log security events."""
self.logger.warning(
"Security event",
event_type="security_event",
security_event_type=event_type,
severity=severity,
details=details
)
# Create audit logger instance
audit_logger = AuditLogger()

204
app/core/middleware.py Normal file
View File

@@ -0,0 +1,204 @@
"""
Middleware components for the Virtual Board Member AI System.
"""
import time
from typing import Callable
from fastapi import Request, Response
from starlette.middleware.base import BaseHTTPMiddleware
from prometheus_client import Counter, Histogram
import structlog
from app.core.config import settings
logger = structlog.get_logger()
# Prometheus metrics
REQUEST_COUNT = Counter(
"http_requests_total",
"Total HTTP requests",
["method", "endpoint", "status"]
)
REQUEST_LATENCY = Histogram(
"http_request_duration_seconds",
"HTTP request latency",
["method", "endpoint"]
)
class RequestLoggingMiddleware(BaseHTTPMiddleware):
"""Middleware for logging HTTP requests."""
async def dispatch(self, request: Request, call_next: Callable) -> Response:
start_time = time.time()
# Log request
logger.info(
"HTTP request started",
method=request.method,
url=str(request.url),
client_ip=request.client.host if request.client else None,
user_agent=request.headers.get("user-agent"),
)
# Process request
response = await call_next(request)
# Calculate duration
duration = time.time() - start_time
# Log response
logger.info(
"HTTP request completed",
method=request.method,
url=str(request.url),
status_code=response.status_code,
duration=duration,
)
return response
class PrometheusMiddleware(BaseHTTPMiddleware):
"""Middleware for Prometheus metrics."""
async def dispatch(self, request: Request, call_next: Callable) -> Response:
start_time = time.time()
# Process request
response = await call_next(request)
# Calculate duration
duration = time.time() - start_time
# Extract endpoint (remove query parameters and path parameters)
endpoint = request.url.path
# Record metrics
REQUEST_COUNT.labels(
method=request.method,
endpoint=endpoint,
status=response.status_code
).inc()
REQUEST_LATENCY.labels(
method=request.method,
endpoint=endpoint
).observe(duration)
return response
class SecurityHeadersMiddleware(BaseHTTPMiddleware):
"""Middleware for adding security headers."""
async def dispatch(self, request: Request, call_next: Callable) -> Response:
response = await call_next(request)
# Add security headers
response.headers["X-Content-Type-Options"] = "nosniff"
response.headers["X-Frame-Options"] = "DENY"
response.headers["X-XSS-Protection"] = "1; mode=block"
response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
response.headers["Permissions-Policy"] = "geolocation=(), microphone=(), camera=()"
# Add CSP header in production
if settings.is_production:
response.headers["Content-Security-Policy"] = (
"default-src 'self'; "
"script-src 'self' 'unsafe-inline' 'unsafe-eval'; "
"style-src 'self' 'unsafe-inline'; "
"img-src 'self' data: https:; "
"font-src 'self' data:; "
"connect-src 'self' https:; "
"frame-ancestors 'none';"
)
return response
class RateLimitMiddleware(BaseHTTPMiddleware):
"""Middleware for rate limiting."""
def __init__(self, app, requests_per_minute: int = 100):
super().__init__(app)
self.requests_per_minute = requests_per_minute
self.request_counts = {}
async def dispatch(self, request: Request, call_next: Callable) -> Response:
client_ip = request.client.host if request.client else "unknown"
current_time = time.time()
# Clean old entries
self._clean_old_entries(current_time)
# Check rate limit
if not self._check_rate_limit(client_ip, current_time):
logger.warning(
"Rate limit exceeded",
client_ip=client_ip,
requests_per_minute=self.requests_per_minute
)
return Response(
content="Rate limit exceeded",
status_code=429,
headers={"Retry-After": "60"}
)
# Process request
response = await call_next(request)
# Record request
self._record_request(client_ip, current_time)
return response
def _clean_old_entries(self, current_time: float) -> None:
"""Remove entries older than 1 minute."""
cutoff_time = current_time - 60
for client_ip in list(self.request_counts.keys()):
self.request_counts[client_ip] = [
timestamp for timestamp in self.request_counts[client_ip]
if timestamp > cutoff_time
]
if not self.request_counts[client_ip]:
del self.request_counts[client_ip]
def _check_rate_limit(self, client_ip: str, current_time: float) -> bool:
"""Check if client has exceeded rate limit."""
if client_ip not in self.request_counts:
return True
requests_in_window = len([
timestamp for timestamp in self.request_counts[client_ip]
if current_time - timestamp < 60
])
return requests_in_window < self.requests_per_minute
def _record_request(self, client_ip: str, current_time: float) -> None:
"""Record a request for the client."""
if client_ip not in self.request_counts:
self.request_counts[client_ip] = []
self.request_counts[client_ip].append(current_time)
class CORSMiddleware(BaseHTTPMiddleware):
"""Custom CORS middleware."""
async def dispatch(self, request: Request, call_next: Callable) -> Response:
response = await call_next(request)
# Add CORS headers
origin = request.headers.get("origin")
if origin and origin in settings.ALLOWED_HOSTS:
response.headers["Access-Control-Allow-Origin"] = origin
else:
response.headers["Access-Control-Allow-Origin"] = "*"
response.headers["Access-Control-Allow-Methods"] = "GET, POST, PUT, DELETE, OPTIONS"
response.headers["Access-Control-Allow-Headers"] = "Content-Type, Authorization"
response.headers["Access-Control-Allow-Credentials"] = "true"
return response

137
app/main.py Normal file
View File

@@ -0,0 +1,137 @@
"""
Main FastAPI application entry point for the Virtual Board Member AI System.
"""
import logging
from contextlib import asynccontextmanager
from typing import Any
from fastapi import FastAPI, Request, status
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.trustedhost import TrustedHostMiddleware
from fastapi.responses import JSONResponse
from prometheus_client import Counter, Histogram
import structlog
from app.core.config import settings
from app.core.database import init_db
from app.core.logging import setup_logging
from app.api.v1.api import api_router
from app.core.middleware import (
RequestLoggingMiddleware,
PrometheusMiddleware,
SecurityHeadersMiddleware,
)
# Setup structured logging
setup_logging()
logger = structlog.get_logger()
# Prometheus metrics are defined in middleware.py
@asynccontextmanager
async def lifespan(app: FastAPI) -> Any:
"""Application lifespan manager."""
# Startup
logger.info("Starting Virtual Board Member AI System", version=settings.APP_VERSION)
# Initialize database
await init_db()
logger.info("Database initialized successfully")
# Initialize other services (Redis, Qdrant, etc.)
# TODO: Add service initialization
yield
# Shutdown
logger.info("Shutting down Virtual Board Member AI System")
def create_application() -> FastAPI:
"""Create and configure the FastAPI application."""
app = FastAPI(
title=settings.APP_NAME,
description="Enterprise-grade AI assistant for board members and executives",
version=settings.APP_VERSION,
docs_url="/docs" if settings.DEBUG else None,
redoc_url="/redoc" if settings.DEBUG else None,
openapi_url="/openapi.json" if settings.DEBUG else None,
lifespan=lifespan,
)
# Add middleware
app.add_middleware(
CORSMiddleware,
allow_origins=settings.ALLOWED_HOSTS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.add_middleware(TrustedHostMiddleware, allowed_hosts=settings.ALLOWED_HOSTS)
app.add_middleware(RequestLoggingMiddleware)
app.add_middleware(PrometheusMiddleware)
app.add_middleware(SecurityHeadersMiddleware)
# Include API routes
app.include_router(api_router, prefix="/api/v1")
# Health check endpoint
@app.get("/health", tags=["Health"])
async def health_check() -> dict[str, Any]:
"""Health check endpoint."""
return {
"status": "healthy",
"version": settings.APP_VERSION,
"environment": settings.ENVIRONMENT,
}
# Root endpoint
@app.get("/", tags=["Root"])
async def root() -> dict[str, Any]:
"""Root endpoint with API information."""
return {
"message": "Virtual Board Member AI System",
"version": settings.APP_VERSION,
"docs": "/docs" if settings.DEBUG else None,
"health": "/health",
}
# Exception handlers
@app.exception_handler(Exception)
async def global_exception_handler(request: Request, exc: Exception) -> JSONResponse:
"""Global exception handler."""
logger.error(
"Unhandled exception",
exc_info=exc,
path=request.url.path,
method=request.method,
)
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={
"detail": "Internal server error",
"type": "internal_error",
},
)
return app
# Create the application instance
app = create_application()
if __name__ == "__main__":
import uvicorn
uvicorn.run(
"app.main:app",
host=settings.HOST,
port=settings.PORT,
reload=settings.RELOAD,
log_level=settings.LOG_LEVEL.lower(),
)

18
app/models/__init__.py Normal file
View File

@@ -0,0 +1,18 @@
"""
Data models for the Virtual Board Member AI System.
"""
from .user import User
from .document import Document, DocumentVersion, DocumentTag
from .commitment import Commitment, CommitmentStatus
from .audit_log import AuditLog
__all__ = [
"User",
"Document",
"DocumentVersion",
"DocumentTag",
"Commitment",
"CommitmentStatus",
"AuditLog",
]

161
app/models/audit_log.py Normal file
View File

@@ -0,0 +1,161 @@
"""
Audit log models for the Virtual Board Member AI System.
"""
from datetime import datetime
from typing import Optional, Dict, Any
from sqlalchemy import Column, String, DateTime, Text, Integer, ForeignKey, Index
from sqlalchemy.dialects.postgresql import UUID, JSONB
import uuid
import enum
from app.core.database import Base
class AuditEventType(str, enum.Enum):
"""Audit event types."""
USER_LOGIN = "user_login"
USER_LOGOUT = "user_logout"
USER_CREATED = "user_created"
USER_UPDATED = "user_updated"
USER_DELETED = "user_deleted"
DOCUMENT_UPLOADED = "document_uploaded"
DOCUMENT_ACCESSED = "document_accessed"
DOCUMENT_DOWNLOADED = "document_downloaded"
DOCUMENT_DELETED = "document_deleted"
DOCUMENT_PROCESSED = "document_processed"
COMMITMENT_CREATED = "commitment_created"
COMMITMENT_UPDATED = "commitment_updated"
COMMITMENT_COMPLETED = "commitment_completed"
COMMITMENT_DELETED = "commitment_deleted"
QUERY_EXECUTED = "query_executed"
REPORT_GENERATED = "report_generated"
SYSTEM_CONFIGURATION_CHANGED = "system_configuration_changed"
SECURITY_EVENT = "security_event"
COMPLIANCE_EVENT = "compliance_event"
class AuditLog(Base):
"""Audit log model for compliance and security tracking."""
__tablename__ = "audit_logs"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
# Event information
event_type = Column(String(100), nullable=False, index=True)
event_description = Column(Text, nullable=True)
# User information
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
user_email = Column(String(255), nullable=True) # Denormalized for performance
user_role = Column(String(50), nullable=True) # Denormalized for performance
# Resource information
resource_type = Column(String(50), nullable=True) # document, commitment, user, etc.
resource_id = Column(UUID(as_uuid=True), nullable=True)
resource_name = Column(String(500), nullable=True)
# Request information
ip_address = Column(String(45), nullable=True) # IPv4 or IPv6
user_agent = Column(Text, nullable=True)
request_method = Column(String(10), nullable=True)
request_url = Column(Text, nullable=True)
request_headers = Column(JSONB, nullable=True)
# Response information
response_status_code = Column(Integer, nullable=True)
response_time_ms = Column(Integer, nullable=True)
# Additional data
event_metadata = Column(JSONB, nullable=True) # Additional event-specific data
severity = Column(String(20), default="info") # info, warning, error, critical
# Compliance fields
compliance_category = Column(String(100), nullable=True) # SOX, GDPR, etc.
data_classification = Column(String(50), nullable=True) # public, internal, confidential, restricted
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
# Indexes for performance
__table_args__ = (
Index('idx_audit_logs_user_event', 'user_id', 'event_type'),
Index('idx_audit_logs_resource', 'resource_type', 'resource_id'),
Index('idx_audit_logs_created_at', 'created_at'),
Index('idx_audit_logs_compliance', 'compliance_category', 'created_at'),
)
def __repr__(self):
return f"<AuditLog(id={self.id}, event_type='{self.event_type}', user_id={self.user_id})>"
@classmethod
def log_user_login(cls, user_id: str, user_email: str, user_role: str,
ip_address: str, user_agent: str, success: bool,
**kwargs) -> "AuditLog":
"""Log user login event."""
return cls(
event_type=AuditEventType.USER_LOGIN,
event_description=f"User login attempt - {'successful' if success else 'failed'}",
user_id=user_id,
user_email=user_email,
user_role=user_role,
ip_address=ip_address,
user_agent=user_agent,
severity="warning" if not success else "info",
event_metadata={"success": success, **kwargs}
)
@classmethod
def log_document_access(cls, user_id: str, user_email: str, document_id: str,
document_name: str, action: str, ip_address: str,
**kwargs) -> "AuditLog":
"""Log document access event."""
return cls(
event_type=AuditEventType.DOCUMENT_ACCESSED,
event_description=f"Document {action}: {document_name}",
user_id=user_id,
user_email=user_email,
resource_type="document",
resource_id=document_id,
resource_name=document_name,
ip_address=ip_address,
event_metadata={"action": action, **kwargs}
)
@classmethod
def log_query_execution(cls, user_id: str, user_email: str, query: str,
response_time_ms: int, result_count: int,
**kwargs) -> "AuditLog":
"""Log query execution event."""
return cls(
event_type=AuditEventType.QUERY_EXECUTED,
event_description=f"Query executed: {query[:100]}...",
user_id=user_id,
user_email=user_email,
ip_address=kwargs.get("ip_address"),
response_time_ms=response_time_ms,
event_metadata={
"query": query,
"result_count": result_count,
**kwargs
}
)
@classmethod
def log_security_event(cls, event_type: str, severity: str,
description: str, user_id: str = None,
ip_address: str = None, **kwargs) -> "AuditLog":
"""Log security event."""
return cls(
event_type=event_type,
event_description=description,
user_id=user_id,
ip_address=ip_address,
severity=severity,
event_metadata=kwargs
)

101
app/models/commitment.py Normal file
View File

@@ -0,0 +1,101 @@
"""
Commitment models for the Virtual Board Member AI System.
"""
from datetime import datetime
from typing import Optional
from sqlalchemy import Column, String, DateTime, Boolean, Text, Integer, ForeignKey, Date
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import relationship
import uuid
import enum
from app.core.database import Base
class CommitmentStatus(str, enum.Enum):
"""Commitment status enumeration."""
PENDING = "pending"
IN_PROGRESS = "in_progress"
COMPLETED = "completed"
OVERDUE = "overdue"
CANCELLED = "cancelled"
DEFERRED = "deferred"
class CommitmentPriority(str, enum.Enum):
"""Commitment priority levels."""
LOW = "low"
MEDIUM = "medium"
HIGH = "high"
CRITICAL = "critical"
class Commitment(Base):
"""Commitment model for tracking board and executive commitments."""
__tablename__ = "commitments"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
# Commitment details
title = Column(String(500), nullable=False, index=True)
description = Column(Text, nullable=True)
commitment_text = Column(Text, nullable=False) # Original text from document
# Status and priority
status = Column(String(50), default=CommitmentStatus.PENDING, nullable=False)
priority = Column(String(20), default=CommitmentPriority.MEDIUM, nullable=False)
# Dates
due_date = Column(Date, nullable=True)
completion_date = Column(Date, nullable=True)
# Assignment
assigned_to = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
assigned_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
# Source information
document_id = Column(UUID(as_uuid=True), ForeignKey("documents.id"), nullable=False)
document_page = Column(Integer, nullable=True) # Page number in document
document_section = Column(String(200), nullable=True) # Section/context
# AI extraction metadata
confidence_score = Column(Integer, nullable=True) # 0-100 confidence in extraction
extraction_method = Column(String(50), nullable=True) # LLM, rule-based, etc.
extraction_metadata = Column(JSONB, nullable=True) # Additional extraction info
# Progress tracking
progress_notes = Column(Text, nullable=True)
progress_percentage = Column(Integer, default=0) # 0-100
# Notifications
reminder_enabled = Column(Boolean, default=True)
reminder_frequency = Column(String(50), default="weekly") # daily, weekly, monthly
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
# Relationships
document = relationship("Document", back_populates="commitments")
assigned_user = relationship("User", foreign_keys=[assigned_to])
assigned_by_user = relationship("User", foreign_keys=[assigned_by])
def __repr__(self):
return f"<Commitment(id={self.id}, title='{self.title}', status='{self.status}')>"
@property
def is_overdue(self) -> bool:
"""Check if commitment is overdue."""
if self.due_date and self.status not in [CommitmentStatus.COMPLETED, CommitmentStatus.CANCELLED]:
return datetime.now().date() > self.due_date
return False
@property
def days_until_due(self) -> Optional[int]:
"""Get days until due date."""
if self.due_date:
delta = self.due_date - datetime.now().date()
return delta.days
return None

136
app/models/document.py Normal file
View File

@@ -0,0 +1,136 @@
"""
Document models for the Virtual Board Member AI System.
"""
from datetime import datetime
from typing import Optional
from sqlalchemy import Column, String, DateTime, Boolean, Text, Integer, ForeignKey, Table
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.orm import relationship
import uuid
import enum
from app.core.database import Base
class DocumentType(str, enum.Enum):
"""Document types."""
BOARD_PACK = "board_pack"
MINUTES = "minutes"
STRATEGIC_PLAN = "strategic_plan"
FINANCIAL_REPORT = "financial_report"
COMPLIANCE_REPORT = "compliance_report"
POLICY_DOCUMENT = "policy_document"
CONTRACT = "contract"
PRESENTATION = "presentation"
SPREADSHEET = "spreadsheet"
OTHER = "other"
# Association table for document tags
document_tag_association = Table(
"document_tag_association",
Base.metadata,
Column("document_id", UUID(as_uuid=True), ForeignKey("documents.id"), primary_key=True),
Column("tag_id", UUID(as_uuid=True), ForeignKey("document_tags.id"), primary_key=True),
)
class Document(Base):
"""Document model."""
__tablename__ = "documents"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
title = Column(String(500), nullable=False, index=True)
description = Column(Text, nullable=True)
document_type = Column(String(50), nullable=False, default=DocumentType.OTHER)
# File information
filename = Column(String(255), nullable=False)
file_path = Column(String(500), nullable=False)
file_size = Column(Integer, nullable=False)
mime_type = Column(String(100), nullable=False)
# Processing status
processing_status = Column(String(50), default="pending") # pending, processing, completed, failed
processing_error = Column(Text, nullable=True)
# Content extraction
extracted_text = Column(Text, nullable=True)
text_embedding = Column(JSONB, nullable=True) # Vector embedding
# Metadata
document_metadata = Column(JSONB, nullable=True) # Additional metadata
source_system = Column(String(100), nullable=True) # SharePoint, email, upload, etc.
external_id = Column(String(255), nullable=True) # ID from external system
# Relationships
uploaded_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
organization_id = Column(UUID(as_uuid=True), nullable=True) # For multi-tenant support
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
# Relationships
versions = relationship("DocumentVersion", back_populates="document", cascade="all, delete-orphan")
tags = relationship("DocumentTag", secondary=document_tag_association, back_populates="documents")
commitments = relationship("Commitment", back_populates="document")
def __repr__(self):
return f"<Document(id={self.id}, title='{self.title}', type='{self.document_type}')>"
class DocumentVersion(Base):
"""Document version model."""
__tablename__ = "document_versions"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
document_id = Column(UUID(as_uuid=True), ForeignKey("documents.id"), nullable=False)
version_number = Column(Integer, nullable=False)
# File information
filename = Column(String(255), nullable=False)
file_path = Column(String(500), nullable=False)
file_size = Column(Integer, nullable=False)
checksum = Column(String(64), nullable=False) # SHA-256 hash
# Content
extracted_text = Column(Text, nullable=True)
text_embedding = Column(JSONB, nullable=True)
# Metadata
change_description = Column(Text, nullable=True)
created_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
# Relationships
document = relationship("Document", back_populates="versions")
def __repr__(self):
return f"<DocumentVersion(id={self.id}, document_id={self.document_id}, version={self.version_number})>"
class DocumentTag(Base):
"""Document tag model."""
__tablename__ = "document_tags"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
name = Column(String(100), nullable=False, unique=True, index=True)
description = Column(Text, nullable=True)
color = Column(String(7), nullable=True) # Hex color code
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
# Relationships
documents = relationship("Document", secondary=document_tag_association, back_populates="tags")
def __repr__(self):
return f"<DocumentTag(id={self.id}, name='{self.name}')>"

94
app/models/user.py Normal file
View File

@@ -0,0 +1,94 @@
"""
User model for authentication and user management.
"""
from datetime import datetime
from typing import Optional
from sqlalchemy import Column, String, DateTime, Boolean, Text, Enum
from sqlalchemy.dialects.postgresql import UUID
import uuid
import enum
from app.core.database import Base
class UserRole(str, enum.Enum):
"""User roles for access control."""
BOARD_MEMBER = "board_member"
EXECUTIVE = "executive"
EXECUTIVE_ASSISTANT = "executive_assistant"
ANALYST = "analyst"
AUDITOR = "auditor"
ADMIN = "admin"
class User(Base):
"""User model for authentication and user management."""
__tablename__ = "users"
# Primary key
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
# User identification
email = Column(String(255), unique=True, nullable=False, index=True)
username = Column(String(100), unique=True, nullable=True, index=True)
# Authentication
hashed_password = Column(String(255), nullable=True) # Null for OAuth users
is_active = Column(Boolean, default=True)
is_verified = Column(Boolean, default=False)
# User information
first_name = Column(String(100), nullable=False)
last_name = Column(String(100), nullable=False)
full_name = Column(String(200), nullable=False)
# Role and permissions
role = Column(Enum(UserRole), nullable=False, default=UserRole.EXECUTIVE)
department = Column(String(100), nullable=True)
permissions = Column(Text, nullable=True) # JSON string of permissions
# Contact information
phone = Column(String(20), nullable=True)
company = Column(String(200), nullable=True)
job_title = Column(String(100), nullable=True)
# OAuth information
oauth_provider = Column(String(50), nullable=True) # auth0, cognito, etc.
oauth_id = Column(String(255), nullable=True)
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
last_login_at = Column(DateTime, nullable=True)
# Preferences
timezone = Column(String(50), default="UTC")
language = Column(String(10), default="en")
notification_preferences = Column(Text, nullable=True) # JSON string
def __repr__(self) -> str:
return f"<User(id={self.id}, email='{self.email}', role='{self.role}')>"
@property
def display_name(self) -> str:
"""Get user's display name."""
return self.full_name or f"{self.first_name} {self.last_name}"
def has_permission(self, permission: str) -> bool:
"""Check if user has specific permission."""
# TODO: Implement permission checking logic
return True
def is_board_member(self) -> bool:
"""Check if user is a board member."""
return self.role == UserRole.BOARD_MEMBER
def is_executive(self) -> bool:
"""Check if user is an executive."""
return self.role in [UserRole.BOARD_MEMBER, UserRole.EXECUTIVE]
def is_admin(self) -> bool:
"""Check if user is an admin."""
return self.role == UserRole.ADMIN