File size: 5,655 Bytes
a3e0ade 9da7658 a3e0ade 9da7658 a3e0ade 9da7658 a3e0ade 9da7658 f992e80 9da7658 a3e0ade 972f028 a3e0ade 9da7658 a3e0ade 972f028 f992e80 9da7658 f992e80 9da7658 f992e80 a3e0ade 9da7658 5c1efea 9da7658 cbac037 5c1efea 9da7658 cbac037 9da7658 8da6264 a3e0ade cbac037 9da7658 a3e0ade 9da7658 f992e80 a3e0ade 9da7658 a3e0ade 9da7658 f992e80 9da7658 f992e80 9da7658 8da6264 a3e0ade cbac037 8da6264 cbac037 a3e0ade 9da7658 f992e80 a3e0ade |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 |
import streamlit as st
import time
import logging
from typing import Optional
from src.llm.coordinated_provider import coordinated_provider
from core.session import session_manager
logger = logging.getLogger(__name__)
class ChatHandler:
"""Handles chat interactions with coordinated AI approach"""
def __init__(self):
self.is_processing = False
def process_user_message(self, user_input: str, selected_model: str):
"""Process user message with coordinated AI approach"""
if not user_input or not user_input.strip():
st.warning("Please enter a message")
return
# Prevent duplicate processing
if st.session_state.get('last_processed_message') == user_input:
logger.info("Preventing duplicate message processing")
return
st.session_state.is_processing = True
st.session_state.last_processed_message = user_input
try:
# Show user message immediately
timestamp = time.strftime("%H:%M:%S")
with st.chat_message("user"):
st.markdown(user_input)
st.caption(f"🕒 {timestamp}")
# Add to session state history
if "messages" not in st.session_state:
st.session_state.messages = []
st.session_state.messages.append({
"role": "user",
"content": user_input,
"timestamp": timestamp
})
# Force UI update
st.experimental_rerun()
except Exception as e:
logger.error(f"Error in initial message display: {e}", exc_info=True)
st.session_state.is_processing = False
st.session_state.last_processed_message = ""
def process_ai_response(self, user_input: str, selected_model: str):
"""Process AI response with coordinated approach"""
if not user_input or not user_input.strip():
return
try:
# Show processing status
with st.chat_message("assistant"):
status_placeholder = st.empty()
response_placeholder = st.empty()
try:
# Show coordination status
status_placeholder.info("🚀 Coordinating AI responses...")
# Generate coordinated response
response = None
try:
# Get session and conversation history
user_session = session_manager.get_session("default_user")
conversation_history = user_session.get("conversation", []).copy()
conversation_history.append({"role": "user", "content": user_input})
response = coordinated_provider.generate(user_input, conversation_history)
except Exception as e:
logger.error(f"Coordinated response error: {e}")
raise
if response and response.strip():
status_placeholder.success("✅ Coordinated response received!")
response_placeholder.markdown(response)
# Add to session history
timestamp = time.strftime("%H:%M:%S")
st.session_state.messages.append({
"role": "assistant",
"content": response,
"timestamp": timestamp,
"provider": "coordinated"
})
else:
status_placeholder.warning("⚠️ Empty coordinated response received")
response_placeholder.markdown("*No coordinated response generated. Please try again.*")
timestamp = time.strftime("%H:%M:%S")
st.session_state.messages.append({
"role": "assistant",
"content": "*No coordinated response generated. Please try again.*",
"timestamp": timestamp,
"provider": "coordinated"
})
except Exception as e:
status_placeholder.error("❌ Coordination failed")
# User-friendly error messages
error_message = f"Sorry, I encountered an error: {str(e)[:100]}..."
response_placeholder.markdown(error_message)
timestamp = time.strftime("%H:%M:%S")
st.session_state.messages.append({
"role": "assistant",
"content": error_message,
"timestamp": timestamp,
"provider": "coordinated"
})
logger.error(f"Chat processing error: {e}", exc_info=True)
except Exception as e:
logger.error(f"Unexpected error in process_ai_response: {e}", exc_info=True)
st.error("An unexpected error occurred. Please try again.")
finally:
st.session_state.is_processing = False
st.session_state.last_processed_message = ""
time.sleep(0.1)
# Global instance
chat_handler = ChatHandler()
|