import streamlit as st import os from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace from langchain_core.messages import HumanMessage, AIMessage, SystemMessage # Load API token from environment hf = os.getenv('Data_science') os.environ['HUGGINGFACEHUB_API_TOKEN'] = hf os.environ['HF_TOKEN'] = hf # --- Config --- st.set_page_config(page_title="Python Mentor Chat", layout="centered") st.title("🐍 Python Mentor Chat") # --- Sidebar for selections --- st.sidebar.title("Mentor Preferences") exp_options = ['Beginner', 'Intermediate', 'Experienced'] exp = st.sidebar.selectbox("Select your experience level:", exp_options) # --- Initialize Chat Model --- python_model_skeleton = HuggingFaceEndpoint( repo_id='meta-llama/Llama-3.2-3B-Instruct', provider='sambanova', temperature=0.7, max_new_tokens=150, task='conversational' ) python_mentor = ChatHuggingFace( llm=python_model_skeleton, repo_id='meta-llama/Llama-3.2-3B-Instruct', provider='sambanova', temperature=0.7, max_new_tokens=150, task='conversational' ) # --- Session State --- if "chat_history_python" not in st.session_state: st.session_state.chat_history_python = [] # --- Chat Form --- with st.form(key="chat_form"): user_input = st.text_input("Ask your question:") submit = st.form_submit_button("Send") # --- Chat Logic --- if submit and user_input: system_prompt = ( f"You are a Python mentor with {exp.lower()} experience level. " f"Answer only Python-related questions in a very friendly tone and under 150 words. " f"If the question is not about Python, politely say it's out of scope." ) messages = [SystemMessage(content=system_prompt), HumanMessage(content=user_input)] result = python_mentor.invoke(messages) st.session_state.chat_history_python.append((user_input, result.content)) # --- Display Chat History --- st.subheader("🗨️ Chat History") for user, bot in st.session_state.chat_history_python: st.markdown(f"**You:** {user}") st.markdown(f"**Mentor:** {bot}") st.markdown("---")