Advocate_Life_Style / ai_client.py
DocUA's picture
Updated integration with Google Gemini AI, replacing the library with the new google-genai. Added new AI models, updated provider configuration, and improved query processing. Fixed bugs in response generation and logging. Changed default values for models in the configuration. Testing was successful.
38ec34c
#!/usr/bin/env python3
"""
Universal AI Client for Lifestyle Journey Application
This module provides a unified interface for different AI providers (Google Gemini, Anthropic Claude)
with automatic fallback and provider-specific optimizations.
"""
import os
import json
import logging
from datetime import datetime
from typing import Optional, Dict, Any
from abc import ABC, abstractmethod
# Import configurations
from ai_providers_config import (
AIProvider, AIModel, get_agent_config, get_provider_config,
is_provider_available, get_available_providers
)
# Import provider-specific clients
try:
import google.genai as genai
from google.genai import types
GEMINI_AVAILABLE = True
except ImportError:
GEMINI_AVAILABLE = False
try:
import anthropic
ANTHROPIC_AVAILABLE = True
except ImportError:
ANTHROPIC_AVAILABLE = False
class BaseAIClient(ABC):
"""Abstract base class for AI clients"""
def __init__(self, provider: AIProvider, model: AIModel, temperature: float = 0.3):
self.provider = provider
self.model = model
self.temperature = temperature
self.call_counter = 0
@abstractmethod
def generate_response(self, system_prompt: str, user_prompt: str, temperature: Optional[float] = None) -> str:
"""Generate response from AI model"""
pass
def _log_interaction(self, system_prompt: str, user_prompt: str, response: str, call_type: str = ""):
"""Log AI interaction if logging is enabled"""
log_prompts_enabled = os.getenv("LOG_PROMPTS", "false").lower() == "true"
if not log_prompts_enabled:
return
logger = logging.getLogger(f"{__name__}.{self.provider.value}")
if not logger.handlers:
logger.setLevel(logging.INFO)
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
logger.addHandler(console_handler)
file_handler = logging.FileHandler('ai_interactions.log', encoding='utf-8')
file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
logger.addHandler(file_handler)
self.call_counter += 1
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
log_message = f"""
{'='*80}
πŸ€– {self.provider.value.upper()} API CALL #{self.call_counter} [{call_type}] - {timestamp}
{'='*80}
πŸ“€ SYSTEM PROMPT:
{'-'*40}
{system_prompt}
πŸ“€ USER PROMPT:
{'-'*40}
{user_prompt}
πŸ“₯ AI RESPONSE:
{'-'*40}
{response}
πŸ”§ MODEL: {self.model.value}
🌑️ TEMPERATURE: {self.temperature}
{'='*80}
"""
logger.info(log_message)
class GeminiClient(BaseAIClient):
"""Google Gemini AI client using the new google-genai library"""
def __init__(self, model: AIModel, temperature: float = 0.3):
super().__init__(AIProvider.GEMINI, model, temperature)
if not GEMINI_AVAILABLE:
raise ImportError("Google GenAI library not available. Install with: pip install google-genai")
api_key = os.getenv("GEMINI_API_KEY")
if not api_key:
raise ValueError("GEMINI_API_KEY environment variable not set")
self.client = genai.Client(api_key=api_key)
self.model_name = model.value
def generate_response(self, system_prompt: str, user_prompt: str, temperature: Optional[float] = None) -> str:
"""Generate response from Gemini using the new API"""
if temperature is None:
temperature = self.temperature
try:
# Prepare the content parts
contents = [
types.Content(
role="user",
parts=[types.Part.from_text(text=user_prompt)],
)
]
# Configure generation settings
config = types.GenerateContentConfig(
temperature=temperature,
thinking_config=types.ThinkingConfig(thinking_budget=0),
)
# Add system prompt if provided
if system_prompt:
config.system_instruction = [
types.Part.from_text(text=system_prompt)
]
# Generate the response
response_text = ""
for chunk in self.client.models.generate_content_stream(
model=self.model_name,
contents=contents,
config=config,
):
if chunk.text:
response_text += chunk.text
# Log the interaction
self._log_interaction(system_prompt, user_prompt, response_text, "gemini")
return response_text
except Exception as e:
error_msg = f"Gemini API error: {str(e)}"
logging.error(error_msg)
raise RuntimeError(error_msg) from e
class AnthropicClient(BaseAIClient):
"""Anthropic Claude AI client"""
def __init__(self, model: AIModel, temperature: float = 0.3):
super().__init__(AIProvider.ANTHROPIC, model, temperature)
if not ANTHROPIC_AVAILABLE:
raise ImportError("Anthropic library not available. Install with: pip install anthropic")
api_key = os.getenv("ANTHROPIC_API_KEY")
if not api_key:
raise ValueError("ANTHROPIC_API_KEY environment variable not set")
self.client = anthropic.Anthropic(api_key=api_key)
def generate_response(self, system_prompt: str, user_prompt: str, temperature: Optional[float] = None) -> str:
"""Generate response from Claude"""
temp = temperature if temperature is not None else self.temperature
try:
message = self.client.messages.create(
model=self.model.value,
max_tokens=20000,
temperature=temp,
system=system_prompt,
messages=[
{
"role": "user",
"content": [
{
"type": "text",
"text": user_prompt
}
]
}
]
)
# Extract text content from response
response = ""
for content_block in message.content:
if hasattr(content_block, 'text'):
response += content_block.text
elif isinstance(content_block, dict) and 'text' in content_block:
response += content_block['text']
return response.strip()
except Exception as e:
raise RuntimeError(f"Anthropic API error: {str(e)}")
class UniversalAIClient:
"""
Universal AI client that automatically selects the appropriate provider
based on agent configuration and availability
"""
def __init__(self, agent_name: str):
self.agent_name = agent_name
self.config = get_agent_config(agent_name)
self.client = None
self.fallback_client = None
self._initialize_clients()
def _initialize_clients(self):
"""Initialize primary and fallback clients"""
primary_provider = self.config["provider"]
primary_model = self.config["model"]
temperature = self.config.get("temperature", 0.3)
# Try to initialize primary client
try:
if primary_provider == AIProvider.GEMINI and is_provider_available(AIProvider.GEMINI):
self.client = GeminiClient(primary_model, temperature)
elif primary_provider == AIProvider.ANTHROPIC and is_provider_available(AIProvider.ANTHROPIC):
self.client = AnthropicClient(primary_model, temperature)
except Exception as e:
print(f"⚠️ Failed to initialize primary client for {self.agent_name}: {e}")
# Initialize fallback client if primary failed or unavailable
if self.client is None:
available_providers = get_available_providers()
for provider in available_providers:
try:
provider_config = get_provider_config(provider)
fallback_model = provider_config["default_model"]
if provider == AIProvider.GEMINI:
self.fallback_client = GeminiClient(fallback_model, temperature)
print(f"πŸ”„ Using Gemini fallback for {self.agent_name}")
break
elif provider == AIProvider.ANTHROPIC:
self.fallback_client = AnthropicClient(fallback_model, temperature)
print(f"πŸ”„ Using Anthropic fallback for {self.agent_name}")
break
except Exception as e:
print(f"⚠️ Failed to initialize fallback {provider.value}: {e}")
continue
# Final check
if self.client is None and self.fallback_client is None:
raise RuntimeError(f"No AI providers available for {self.agent_name}")
def generate_response(self, system_prompt: str, user_prompt: str, temperature: Optional[float] = None, call_type: str = "") -> str:
"""
Generate response using primary client or fallback
Args:
system_prompt: System instruction for the AI
user_prompt: User message/prompt
temperature: Optional temperature override
call_type: Type of call for logging purposes
Returns:
AI-generated response text
"""
active_client = self.client or self.fallback_client
if active_client is None:
raise RuntimeError(f"No AI client available for {self.agent_name}")
try:
response = active_client.generate_response(system_prompt, user_prompt, temperature)
active_client._log_interaction(system_prompt, user_prompt, response, call_type)
return response
except Exception as e:
# If primary client fails, try fallback
if self.client is not None and self.fallback_client is not None and active_client == self.client:
print(f"⚠️ Primary client failed for {self.agent_name}, trying fallback: {e}")
try:
response = self.fallback_client.generate_response(system_prompt, user_prompt, temperature)
self.fallback_client._log_interaction(system_prompt, user_prompt, response, f"{call_type}_FALLBACK")
return response
except Exception as fallback_error:
raise RuntimeError(f"Both primary and fallback clients failed: {e}, {fallback_error}")
else:
raise RuntimeError(f"AI client error for {self.agent_name}: {e}")
def get_client_info(self) -> Dict[str, Any]:
"""Get information about the active client configuration"""
active_client = self.client or self.fallback_client
return {
"agent_name": self.agent_name,
"configured_provider": self.config["provider"].value,
"configured_model": self.config["model"].value,
"active_provider": active_client.provider.value if active_client else None,
"active_model": active_client.model.value if active_client else None,
"using_fallback": self.client is None and self.fallback_client is not None,
"reasoning": self.config.get("reasoning", "No reasoning provided")
}
# Factory function for easy client creation
def create_ai_client(agent_name: str) -> UniversalAIClient:
"""
Create an AI client for a specific agent
Args:
agent_name: Name of the agent (e.g., "MainLifestyleAssistant")
Returns:
Configured UniversalAIClient instance
"""
return UniversalAIClient(agent_name)
if __name__ == "__main__":
print("πŸ€– AI Client Test")
print("=" * 50)
# Test different agents
test_agents = ["MainLifestyleAssistant", "EntryClassifier", "MedicalAssistant"]
for agent_name in test_agents:
print(f"\n🎯 Testing {agent_name}:")
try:
client = create_ai_client(agent_name)
info = client.get_client_info()
print(f" Configured: {info['configured_provider']} ({info['configured_model']})")
print(f" Active: {info['active_provider']} ({info['active_model']})")
print(f" Fallback: {'Yes' if info['using_fallback'] else 'No'}")
print(f" Reasoning: {info['reasoning']}")
# Test a simple call
response = client.generate_response(
"You are a helpful assistant.",
"Say hello in one sentence.",
call_type="TEST"
)
print(f" Test response: {response[:100]}...")
except Exception as e:
print(f" ❌ Error: {e}")