Tech Stack Advisor - Code Viewer

← Back to File Tree

logging.py

Language: python | Path: backend/src/core/logging.py | Lines: 107
"""Structured logging configuration using structlog."""
import logging
import sys
from typing import Any
import structlog
from prometheus_client import Counter, Gauge
from .config import settings


def setup_logging() -> None:
    """Configure structured logging for the application."""

    # Configure structlog
    structlog.configure(
        processors=[
            structlog.contextvars.merge_contextvars,
            structlog.processors.add_log_level,
            structlog.processors.StackInfoRenderer(),
            structlog.dev.set_exc_info,
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.processors.JSONRenderer() if settings.environment == "production"
            else structlog.dev.ConsoleRenderer(),
        ],
        wrapper_class=structlog.make_filtering_bound_logger(
            logging.getLevelName(settings.log_level)
        ),
        context_class=dict,
        logger_factory=structlog.PrintLoggerFactory(file=sys.stdout),
        cache_logger_on_first_use=True,
    )


def get_logger(name: str) -> structlog.stdlib.BoundLogger:
    """Get a logger instance for a module.

    Args:
        name: Logger name (typically __name__)

    Returns:
        Configured logger instance
    """
    return structlog.get_logger(name)


# Prometheus metrics for LLM usage (shared across modules)
llm_daily_tokens = Gauge('llm_daily_tokens', 'Daily LLM tokens used')
llm_daily_cost = Gauge('llm_daily_cost_usd', 'Daily LLM cost in USD')
llm_daily_queries = Gauge('llm_daily_queries', 'Daily LLM queries')

# Usage tracking for cost monitoring
class UsageTracker:
    """Track API usage and costs."""

    def __init__(self) -> None:
        self.daily_queries = 0
        self.daily_tokens = 0
        self.daily_cost = 0.0
        self.logger = get_logger(__name__)

    def log_request(
        self,
        input_tokens: int,
        output_tokens: int,
        model: str,
    ) -> None:
        """Log a request with token usage.

        Args:
            input_tokens: Number of input tokens
            output_tokens: Number of output tokens
            model: Model name used
        """
        self.daily_queries += 1
        self.daily_tokens += input_tokens + output_tokens

        # Cost calculation (Claude 3 Haiku rates)
        input_cost = input_tokens * 0.25 / 1_000_000  # $0.25 per 1M input tokens
        output_cost = output_tokens * 1.25 / 1_000_000  # $1.25 per 1M output tokens
        request_cost = input_cost + output_cost
        self.daily_cost += request_cost

        # Update Prometheus metrics
        llm_daily_tokens.set(self.daily_tokens)
        llm_daily_cost.set(self.daily_cost)
        llm_daily_queries.set(self.daily_queries)

        self.logger.info(
            "api_request",
            model=model,
            input_tokens=input_tokens,
            output_tokens=output_tokens,
            cost_usd=round(request_cost, 4),
            daily_total_cost=round(self.daily_cost, 4),
            daily_queries=self.daily_queries,
        )

        # Alert if approaching budget
        if self.daily_cost >= settings.daily_budget_usd * 0.8:
            self.logger.warning(
                "approaching_daily_budget",
                current_cost=round(self.daily_cost, 4),
                budget=settings.daily_budget_usd,
            )


# Global usage tracker
usage_tracker = UsageTracker()