import streamlit as st import os from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace from langchain_core.messages import HumanMessage, SystemMessage # Set environment variables hf = os.getenv('Data_science') os.environ['HUGGINGFACEHUB_API_TOKEN'] = hf os.environ['HF_TOKEN'] = hf # Page setup st.set_page_config(page_title="Statistics Mentor Chat", layout="centered") # Custom style st.markdown(""" """, unsafe_allow_html=True) # Title st.title("📊 Statistics Mentor Chat") # Sidebar st.sidebar.title("Mentor Preferences") exp = st.sidebar.selectbox("Select your experience level:", ["Beginner", "Intermediate", "Expert"]) # Model setup stats_model_skeleton = HuggingFaceEndpoint( repo_id='THUDM/GLM-4-32B-0414', provider='novita', temperature=0.7, max_new_tokens=110, task='conversational' ) stats_mentor = ChatHuggingFace( llm=stats_model_skeleton, repo_id='THUDM/GLM-4-32B-0414', provider='novita', temperature=0.7, max_new_tokens=110, task='conversational' ) # Session key PAGE_KEY = "chat_history_stats" if PAGE_KEY not in st.session_state: st.session_state[PAGE_KEY] = [] # Chat form with st.form(key="chat_form"): user_input = st.text_input("Ask your question:") submit = st.form_submit_button("Send") # Chat logic if submit and user_input: system_prompt = ( f"Act as a statistics mentor with {exp.lower()} expertise. " f"Answer in a friendly tone and within 150 words. " f"If the question is not statistics-related, politely say it's out of scope." ) messages = [SystemMessage(content=system_prompt), HumanMessage(content=user_input)] result = stats_mentor.invoke(messages) st.session_state[PAGE_KEY].append((user_input, result.content)) # Display chat history st.subheader("🗨️ Chat History") for user, bot in st.session_state[PAGE_KEY]: st.markdown(f"**You:** {user}") st.markdown(f"**Mentor:** {bot}") st.markdown("---")