import os from dotenv import load_dotenv # Load environment variables from root .env file dotenv_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".env") load_dotenv(dotenv_path=dotenv_path) from Rag_conversation import rag_chain # Now import the RAG pipeline import gradio as gr from langchain_core.messages import HumanMessage, SystemMessage def chatbot(user_message, history): chat_history = [] for h in history: chat_history.append(HumanMessage(content=h[0])) # User message chat_history.append(SystemMessage(content=h[1])) # AI response chat_history.append(HumanMessage(content=user_message)) # Add new message # Get response from RAG-based chatbot result = rag_chain.invoke({"input": user_message, "chat_history": chat_history}) bot_reply = result["answer"] return bot_reply # Create Gradio UI demo = gr.ChatInterface( chatbot, title="Binghamton RAG Chatbot", description="Ask questions about Binghamton University.", ) if __name__ == "__main__": demo.launch()