import os from functools import lru_cache from typing import Optional from pydantic_settings import BaseSettings os.environ["WEAVE_CAPTURE_CODE"] = "false" if os.getenv("HUGGINGFACE_DEMO"): OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY") REDIS_PASSWORD = "" REDIS_USE_SSL = False else: from app.aws.secrets import get_secret ENV = os.getenv("ENV", "LOCAL") secrets = get_secret() if ENV != "PROD": print("not WB") #os.environ["WANDB_API_KEY"] = secrets["WANDB_API_KEY"] OPENAI_API_KEY = secrets["OPENAI_API_KEY"] GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") ANTHROPIC_API_KEY = secrets["ANTHROPIC_API_KEY"] REDIS_PASSWORD = secrets["REDIS_PASSWORD"] if ENV == "PROD" else "" REDIS_USE_SSL = True if ENV == "PROD" or ENV == "UAT" else False os.environ["WANDB_BASE_URL"] = "https://api.wandb.ai" class Settings(BaseSettings): # Supported openai models OPENAI_MODELS: list = [ "gpt-4o", # first model is the default of the vendor "gpt-4o-2024-11-20", "gpt-4o-mini", "gpt-4.1-2025-04-14", "chatgpt-4o-latest", "ft:gpt-4o-2024-08-06:dreem:selfridges-dresses-test:BVYmIA6e", "ft:gpt-4o-2024-08-06:dreem:500dresses-01:BWPLL6Qj" ] # Supported anthropic models ANTHROPIC_MODELS: list = [ "claude-3-5-sonnet-latest" # first model is the default of the vendor ] # Supported anthropic models GEMINI_MODELS: list = [ "gemini-2.5-flash-preview-04-17" , # first model is the default of the vendor "gemini-1.5-flash" ] # Supprted AI Services SUPPORTED_MODELS: list = OPENAI_MODELS + ANTHROPIC_MODELS + GEMINI_MODELS # API Keys OPENAI_API_KEY: str ANTHROPIC_API_KEY: str GEMINI_API_KEY: str DEFAULT_MAX_ATTEMPTS: int = 1 # AI Service Configuration DEFAULT_MODEL: str = OPENAI_MODELS[0] MAX_TOKENS: int = 2000 TEMPERATURE: float = 0.0 # CORS Configuration CORS_ALLOW_ORIGINS: bool = True # API Configuration API_V1_PREFIX: str = "/api/v1" PROJECT_NAME: str = "Dreem Attribution" DEBUG: bool = False # Rate Limiting RATE_LIMIT_CALLS: int = 100 RATE_LIMIT_PERIOD: int = 60 # Cache Configuration REDIS_PASSWORD: Optional[str] = REDIS_PASSWORD REDIS_USE_SSL: Optional[bool] = REDIS_USE_SSL # Logging LOG_LEVEL: str = "INFO" LOG_FORMAT: str = "json" # API Keys OPENAI_API_KEY: str = OPENAI_API_KEY ANTHROPIC_API_KEY: str = ANTHROPIC_API_KEY GEMINI_API_KEY: str = GEMINI_API_KEY # MAX_DOWNLOAD_RETRY: int = 10 # times def validate_api_keys(self): """Validate that required API keys are present.""" if not self.OPENAI_API_KEY: raise ValueError("OPENAI_API_KEY is required") if not self.ANTHROPIC_API_KEY: raise ValueError("ANTHROPIC_API_KEY is required") if not self.GEMINI_API_KEY: raise ValueError("GEMINI_API_KEY is required") # Create a cached instance of settings @lru_cache def get_settings() -> Settings: """ Create and cache a Settings instance. Returns the same instance for subsequent calls. """ settings = Settings() settings.validate_api_keys() return settings