Initial commit: sales analysis template

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
Jonathan Pressnell
2026-02-06 09:16:34 -05:00
commit cf0b596449
38 changed files with 8001 additions and 0 deletions

197
logger_config.py Normal file
View File

@@ -0,0 +1,197 @@
"""
Logging configuration for analysis scripts
Provides structured logging with file and console output
Usage:
from logger_config import get_logger
logger = get_logger('my_analysis')
logger.info("Analysis started")
logger.warning("Low data quality detected")
logger.error("Failed to load data")
"""
import logging
import sys
from pathlib import Path
from datetime import datetime
from config import COMPANY_NAME, OUTPUT_DIR
# Global logger instance
_logger = None
def setup_logging(log_level=logging.INFO, log_file=None, analysis_name=None):
"""
Setup logging configuration
Args:
log_level: Logging level (DEBUG, INFO, WARNING, ERROR)
log_file: Path to log file (defaults to logs/analysis_YYYYMMDD_HHMMSS.log)
analysis_name: Name of analysis for log file naming
Returns:
logging.Logger: Configured logger instance
"""
global _logger
# Create logs directory
logs_dir = Path('logs')
logs_dir.mkdir(exist_ok=True)
# Default log file name
if log_file is None:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
if analysis_name:
safe_name = analysis_name.lower().replace(' ', '_').replace('/', '_')
log_file = logs_dir / f"{safe_name}_{timestamp}.log"
else:
log_file = logs_dir / f"analysis_{timestamp}.log"
else:
log_file = Path(log_file)
log_file.parent.mkdir(parents=True, exist_ok=True)
# Create logger
logger = logging.getLogger(analysis_name or 'analysis')
logger.setLevel(log_level)
# Remove existing handlers to avoid duplicates
logger.handlers = []
# Create formatters
detailed_formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
console_formatter = logging.Formatter(
'%(levelname)s - %(message)s'
)
# File handler (detailed)
file_handler = logging.FileHandler(log_file, encoding='utf-8')
file_handler.setLevel(log_level)
file_handler.setFormatter(detailed_formatter)
logger.addHandler(file_handler)
# Console handler (simpler)
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(log_level)
console_handler.setFormatter(console_formatter)
logger.addHandler(console_handler)
# Log startup message
logger.info(f"="*60)
logger.info(f"Analysis: {analysis_name or 'Unknown'}")
logger.info(f"Company: {COMPANY_NAME}")
logger.info(f"Log File: {log_file}")
logger.info(f"="*60)
_logger = logger
return logger
def get_logger(analysis_name=None, log_level=logging.INFO):
"""
Get or create logger instance
Args:
analysis_name: Name of analysis
log_level: Logging level (default: INFO)
Returns:
logging.Logger: Logger instance
"""
global _logger
if _logger is None:
_logger = setup_logging(log_level=log_level, analysis_name=analysis_name)
return _logger
def log_analysis_start(analysis_name, logger=None):
"""
Log analysis start
Args:
analysis_name: Name of analysis
logger: Logger instance (creates one if None)
"""
if logger is None:
logger = get_logger(analysis_name)
logger.info(f"Starting analysis: {analysis_name}")
logger.info(f"Timestamp: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
def log_analysis_end(analysis_name, success=True, logger=None):
"""
Log analysis completion
Args:
analysis_name: Name of analysis
success: Whether analysis completed successfully
logger: Logger instance (creates one if None)
"""
if logger is None:
logger = get_logger(analysis_name)
if success:
logger.info(f"Analysis completed successfully: {analysis_name}")
else:
logger.error(f"Analysis failed: {analysis_name}")
logger.info(f"Timestamp: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
logger.info("="*60)
def log_data_loading(df, logger=None):
"""
Log data loading summary
Args:
df: Loaded DataFrame
logger: Logger instance (creates one if None)
"""
if logger is None:
logger = get_logger()
logger.info(f"Data loaded: {len(df):,} rows, {len(df.columns)} columns")
from config import REVENUE_COLUMN, DATE_COLUMN
if REVENUE_COLUMN in df.columns:
revenue = df[REVENUE_COLUMN].sum()
logger.info(f"Total revenue: ${revenue / 1e6:.2f}m")
if DATE_COLUMN in df.columns:
date_coverage = df[DATE_COLUMN].notna().sum() / len(df) * 100
logger.info(f"Date coverage: {date_coverage:.1f}%")
def log_error(error, logger=None, context=None):
"""
Log error with context
Args:
error: Exception or error message
logger: Logger instance (creates one if None)
context: Additional context string
"""
if logger is None:
logger = get_logger()
error_msg = str(error)
if context:
error_msg = f"{context}: {error_msg}"
logger.error(error_msg, exc_info=True)
# ============================================================================
# EXAMPLE USAGE
# ============================================================================
if __name__ == "__main__":
"""Example usage"""
logger = setup_logging(log_level=logging.DEBUG, analysis_name="Example Analysis")
logger.debug("This is a debug message")
logger.info("This is an info message")
logger.warning("This is a warning message")
logger.error("This is an error message")
log_analysis_start("Example Analysis", logger)
log_analysis_end("Example Analysis", success=True, logger)