File size: 1,155 Bytes
5b5f50c 86b116d 5b5f50c 7878c29 5b5f50c 75f72a7 5b5f50c 2cb4727 737aa03 5b5f50c 737aa03 5b5f50c 2cb4727 737aa03 2cb4727 5b5f50c 2cb4727 86b116d 5b5f50c 737aa03 5b5f50c 86b116d 5b5f50c 2cb4727 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
import logging
from typing import List, Dict, Optional
from core.llm_factory import llm_factory, ProviderNotAvailableError
logger = logging.getLogger(__name__)
class LLMClient:
"""Simple LLM client using factory pattern"""
def __init__(self):
try:
self.provider = llm_factory.get_provider()
except ProviderNotAvailableError:
self.provider = None
logger.error("No LLM providers available")
def generate(self, prompt: str, conversation_history: List[Dict], stream: bool = False) -> Optional[str]:
"""Generate a response"""
if not self.provider:
raise ProviderNotAvailableError("No LLM provider available")
try:
if stream:
result = self.provider.stream_generate(prompt, conversation_history)
if isinstance(result, list):
return "".join(result)
return result
else:
return self.provider.generate(prompt, conversation_history)
except Exception as e:
logger.error(f"LLM generation failed: {e}")
raise
|