Spaces:
Sleeping
Sleeping
import streamlit as st | |
import os | |
from langchain_community.chat_models import ChatHuggingFace | |
from langchain_community.llms import HuggingFaceEndpoint | |
from langchain_core.messages import HumanMessage, SystemMessage, AIMessage | |
# --- Load API Token --- | |
hf_token = os.getenv("Data_science") | |
if not hf_token: | |
st.error("β Hugging Face token not found. Please set the 'Data_science' environment variable.") | |
st.stop() | |
os.environ["HUGGINGFACEHUB_API_KEY"] = hf_token | |
os.environ["HF_TOKEN"] = hf_token | |
# --- Load Hugging Face model (Qwen) --- | |
model = HuggingFaceEndpoint( | |
repo_id="Qwen/Qwen3-32B", | |
provider="nebius", | |
temperature=0.6, | |
max_new_tokens=500, | |
task="text-generation" | |
) | |
chat_model = ChatHuggingFace(llm=model) | |
# --- Streamlit styles --- | |
st.markdown(""" | |
<style> | |
.subject-btn { | |
display: inline-block; | |
margin: 0.3em; | |
} | |
.output-box { | |
background-color: #f9f9f9; | |
border-radius: 10px; | |
padding: 20px; | |
margin-top: 20px; | |
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.05); | |
} | |
</style> | |
""", unsafe_allow_html=True) | |
# --- Session state --- | |
if "message_history" not in st.session_state: | |
st.session_state.message_history = [] | |
if "selected_subject" not in st.session_state: | |
st.session_state.selected_subject = "Python" | |
# --- UI Header --- | |
st.title("π Data Science Mentor") | |
st.markdown("Ask subject-specific questions and get guidance based on your experience level.") | |
# --- Experience level --- | |
experience = st.selectbox("π€ Select your experience level:", ["Beginner", "Intermediate", "Expert"]) | |
# --- Subject buttons --- | |
st.markdown("### π Choose a Subject:") | |
cols = st.columns(4) | |
subjects = ["Python", "SQL", "Power BI", "Statistics", "Machine Learning", "Deep Learning", "Generative AI"] | |
for i, subject in enumerate(subjects): | |
if cols[i % 4].button(subject): | |
st.session_state.selected_subject = subject | |
st.session_state.message_history = [] # Reset chat on subject change | |
# --- Set system message based on subject & experience --- | |
if not st.session_state.message_history: | |
system_prompt = f""" | |
You are a highly knowledgeable data science mentor specialized in {st.session_state.selected_subject}. | |
Your job is to guide a {experience.lower()} learner with clear, concise, and actionable advice. | |
Explain concepts, best practices, and answer questions with patience and professionalism. | |
If relevant, include example code, use-cases, or tips. | |
""" | |
st.session_state.message_history.append(SystemMessage(content=system_prompt.strip())) | |
# --- Chat Input --- | |
user_question = st.text_input(f"π¬ Ask your {st.session_state.selected_subject} question:") | |
if st.button("Ask Mentor"): | |
if user_question.strip(): | |
with st.spinner("Thinking..."): | |
st.session_state.message_history.append(HumanMessage(content=user_question)) | |
try: | |
response = chat_model.invoke(st.session_state.message_history) | |
st.session_state.message_history.append(AIMessage(content=response.content)) | |
st.markdown('<div class="output-box">', unsafe_allow_html=True) | |
st.markdown("### π§ Mentor's Response:") | |
st.markdown(response.content) | |
st.markdown("</div>", unsafe_allow_html=True) | |
except Exception as e: | |
st.error(f"β Error: {e}") | |
else: | |
st.warning("β οΈ Please enter a question before submitting.") | |