import logging from typing import List, Dict, Optional from core.llm_factory import llm_factory, ProviderNotAvailableError logger = logging.getLogger(__name__) class LLMClient: """Simple LLM client using factory pattern""" def __init__(self): try: self.provider = llm_factory.get_provider() except ProviderNotAvailableError: self.provider = None logger.error("No LLM providers available") def generate(self, prompt: str, conversation_history: List[Dict], stream: bool = False) -> Optional[str]: """Generate a response""" if not self.provider: raise ProviderNotAvailableError("No LLM provider available") try: if stream: result = self.provider.stream_generate(prompt, conversation_history) if isinstance(result, list): return "".join(result) return result else: return self.provider.generate(prompt, conversation_history) except Exception as e: logger.error(f"LLM generation failed: {e}") raise