Spaces:
Running
Running
File size: 1,531 Bytes
5e1a30c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
"""
LLM Adapters for Answer Generator.
This module provides adapters for various LLM providers, converting
between the unified interface and provider-specific formats.
Available adapters:
- OllamaAdapter: For local Ollama models
- OpenAIAdapter: For OpenAI API (GPT models)
- HuggingFaceAdapter: For HuggingFace models and Inference API
"""
from .base_adapter import (
BaseLLMAdapter,
RateLimitError,
AuthenticationError,
ModelNotFoundError
)
from .ollama_adapter import OllamaAdapter
from .huggingface_adapter import HuggingFaceAdapter
from .mock_adapter import MockLLMAdapter
# Future adapters will be imported here
# from .openai_adapter import OpenAIAdapter
__all__ = [
'BaseLLMAdapter',
'OllamaAdapter',
'HuggingFaceAdapter',
'MockLLMAdapter',
# 'OpenAIAdapter',
'RateLimitError',
'AuthenticationError',
'ModelNotFoundError'
]
# Adapter registry for easy lookup
ADAPTER_REGISTRY = {
'ollama': OllamaAdapter,
'huggingface': HuggingFaceAdapter,
'mock': MockLLMAdapter,
# 'openai': OpenAIAdapter,
}
def get_adapter_class(provider: str):
"""
Get adapter class by provider name.
Args:
provider: Provider name (e.g., 'ollama', 'openai')
Returns:
Adapter class
Raises:
ValueError: If provider not found
"""
if provider not in ADAPTER_REGISTRY:
raise ValueError(f"Unknown LLM provider: {provider}. Available: {list(ADAPTER_REGISTRY.keys())}")
return ADAPTER_REGISTRY[provider] |