import streamlit as st import time import logging from typing import Optional from src.llm.coordinated_provider import coordinated_provider from core.session import session_manager logger = logging.getLogger(__name__) class ChatHandler: """Handles chat interactions with coordinated AI approach""" def __init__(self): self.is_processing = False def process_user_message(self, user_input: str, selected_model: str): """Process user message with coordinated AI approach""" if not user_input or not user_input.strip(): st.warning("Please enter a message") return # Prevent duplicate processing if st.session_state.get('last_processed_message') == user_input: logger.info("Preventing duplicate message processing") return st.session_state.is_processing = True st.session_state.last_processed_message = user_input try: # Show user message immediately timestamp = time.strftime("%H:%M:%S") with st.chat_message("user"): st.markdown(user_input) st.caption(f"🕒 {timestamp}") # Add to session state history if "messages" not in st.session_state: st.session_state.messages = [] st.session_state.messages.append({ "role": "user", "content": user_input, "timestamp": timestamp }) # Force UI update st.experimental_rerun() except Exception as e: logger.error(f"Error in initial message display: {e}", exc_info=True) st.session_state.is_processing = False st.session_state.last_processed_message = "" def process_ai_response(self, user_input: str, selected_model: str): """Process AI response with coordinated approach""" if not user_input or not user_input.strip(): return try: # Show processing status with st.chat_message("assistant"): status_placeholder = st.empty() response_placeholder = st.empty() try: # Show coordination status status_placeholder.info("🚀 Coordinating AI responses...") # Generate coordinated response response = None try: # Get session and conversation history user_session = session_manager.get_session("default_user") conversation_history = user_session.get("conversation", []).copy() conversation_history.append({"role": "user", "content": user_input}) response = coordinated_provider.generate(user_input, conversation_history) except Exception as e: logger.error(f"Coordinated response error: {e}") raise if response and response.strip(): status_placeholder.success("✅ Coordinated response received!") response_placeholder.markdown(response) # Add to session history timestamp = time.strftime("%H:%M:%S") st.session_state.messages.append({ "role": "assistant", "content": response, "timestamp": timestamp, "provider": "coordinated" }) else: status_placeholder.warning("⚠️ Empty coordinated response received") response_placeholder.markdown("*No coordinated response generated. Please try again.*") timestamp = time.strftime("%H:%M:%S") st.session_state.messages.append({ "role": "assistant", "content": "*No coordinated response generated. Please try again.*", "timestamp": timestamp, "provider": "coordinated" }) except Exception as e: status_placeholder.error("❌ Coordination failed") # User-friendly error messages error_message = f"Sorry, I encountered an error: {str(e)[:100]}..." response_placeholder.markdown(error_message) timestamp = time.strftime("%H:%M:%S") st.session_state.messages.append({ "role": "assistant", "content": error_message, "timestamp": timestamp, "provider": "coordinated" }) logger.error(f"Chat processing error: {e}", exc_info=True) except Exception as e: logger.error(f"Unexpected error in process_ai_response: {e}", exc_info=True) st.error("An unexpected error occurred. Please try again.") finally: st.session_state.is_processing = False st.session_state.last_processed_message = "" time.sleep(0.1) # Global instance chat_handler = ChatHandler()