import streamlit as st import os from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace from langchain_core.messages import HumanMessage, AIMessage, SystemMessage # Set Hugging Face tokens hf = os.getenv('Data_science') os.environ['HUGGINGFACEHUB_API_TOKEN'] = hf os.environ['HF_TOKEN'] = hf # --- Config --- st.set_page_config(page_title="ML Mentor Chat", layout="centered") st.title("🤖 Machine Learning Mentor Chat") # --- Sidebar for selections --- st.sidebar.title("Mentor Preferences") exp_options = ['Beginner', 'Intermediate', 'Experienced'] exp = st.sidebar.selectbox("Select your experience level:", exp_options) # --- Initialize Chat Model --- ml_model_skeleton = HuggingFaceEndpoint( repo_id='Qwen/Qwen3-14B', provider='nebius', temperature=0.7, max_new_tokens=150, task='conversational' ) ml_mentor = ChatHuggingFace( llm=ml_model_skeleton, repo_id='Qwen/Qwen3-14B', provider='nebius', temperature=0.7, max_new_tokens=150, task='conversational' ) # --- Session State --- if "chat_history_ml" not in st.session_state: st.session_state.chat_history_ml = [] # --- Chat Form --- with st.form(key="chat_form"): user_input = st.text_input("Ask your question:") submit = st.form_submit_button("Send") # --- Chat Logic --- if submit and user_input: # Add system context system_prompt = ( f"You are a machine learning mentor with {exp.lower()} experience level. " f"Answer only machine learning-related questions in a friendly tone and under 150 words. " f"If the question is out of topic, politely say it's out of scope." ) messages = [SystemMessage(content=system_prompt), HumanMessage(content=user_input)] result = ml_mentor.invoke(messages) st.session_state.chat_history_ml.append((user_input, result.content)) # --- Display Chat History --- st.subheader("🗨️ Chat History") for user, bot in st.session_state.chat_history_ml: st.markdown(f"**You:** {user}") st.markdown(f"**Mentor:** {bot}") st.markdown("---")