tyndale-ai-service/app/llm/exceptions.py

60 lines
1.7 KiB
Python

"""Custom exceptions for LLM adapters."""
from fastapi import HTTPException
class LLMError(Exception):
"""Base exception for all LLM-related errors."""
def __init__(self, message: str, status_code: int = 500):
self.message = message
self.status_code = status_code
super().__init__(message)
class LLMAuthenticationError(LLMError):
"""Raised when API authentication fails."""
def __init__(self, message: str = "LLM authentication failed"):
super().__init__(message, status_code=401)
class LLMRateLimitError(LLMError):
"""Raised when rate limit is exceeded."""
def __init__(self, message: str = "LLM rate limit exceeded"):
super().__init__(message, status_code=429)
class LLMConnectionError(LLMError):
"""Raised when connection to LLM service fails."""
def __init__(self, message: str = "Could not connect to LLM service"):
super().__init__(message, status_code=503)
class LLMConfigurationError(LLMError):
"""Raised when LLM configuration is invalid."""
def __init__(self, message: str = "Invalid LLM configuration"):
super().__init__(message, status_code=500)
class LLMResponseError(LLMError):
"""Raised when LLM returns an invalid or unexpected response."""
def __init__(self, message: str = "Invalid response from LLM"):
super().__init__(message, status_code=502)
def llm_exception_to_http(exc: LLMError) -> HTTPException:
"""Convert an LLMError to a FastAPI HTTPException.
Args:
exc: The LLMError to convert
Returns:
An HTTPException with appropriate status code and detail
"""
return HTTPException(status_code=exc.status_code, detail=exc.message)