67Ayush87 commited on
Commit
aee139e
·
verified ·
1 Parent(s): 0f681f2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -106
app.py CHANGED
@@ -1,106 +1,6 @@
1
- import streamlit as st
2
- from langchain.chat_models import ChatHuggingFace
3
- from langchain.schema import SystemMessage, HumanMessage
4
- import torch
5
- from fpdf import FPDF
6
- import io
7
-
8
- st.set_page_config(page_title="Data Science Mentor", layout="wide")
9
-
10
- # Cache LangChain models per topic
11
- @st.cache_resource
12
- def load_langchain_model(topic):
13
- device = 0 if torch.cuda.is_available() else -1
14
- if topic == "Python":
15
- return ChatHuggingFace(repo_id="tiiuae/falcon-7b-instruct", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
16
- elif topic == "GenAI":
17
- return ChatHuggingFace(repo_id="google/flan-t5-large", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
18
- elif topic == "Statistics":
19
- return ChatHuggingFace(repo_id="databricks/dolly-v2-3b", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
20
- elif topic == "SQL":
21
- return ChatHuggingFace(repo_id="google/flan-t5-base", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
22
- else:
23
- # Fallback for Power BI, Machine Learning, Deep Learning
24
- return ChatHuggingFace(repo_id="tiiuae/falcon-7b-instruct", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
25
-
26
- def generate_answer(model, topic, level, question):
27
- system_prompt = f"You are a {level} level mentor in {topic}. Answer the user's question accordingly."
28
- messages = [
29
- SystemMessage(content=system_prompt),
30
- HumanMessage(content=question)
31
- ]
32
- response = model.invoke(messages)
33
- return response.content
34
-
35
- def create_pdf(chat_history):
36
- pdf = FPDF()
37
- pdf.add_page()
38
- pdf.set_auto_page_break(auto=True, margin=15)
39
- pdf.set_font("Arial", size=12)
40
-
41
- pdf.cell(0, 10, "Data Science Mentor Chat History", ln=True, align='C')
42
- pdf.ln(10)
43
-
44
- for i in range(0, len(chat_history), 2):
45
- user_msg = chat_history[i][1]
46
- mentor_msg = chat_history[i+1][1] if i+1 < len(chat_history) else ""
47
-
48
- pdf.set_font("Arial", 'B', 12)
49
- pdf.multi_cell(0, 10, f"You: {user_msg}")
50
- pdf.set_font("Arial", '', 12)
51
- pdf.multi_cell(0, 10, f"Mentor: {mentor_msg}")
52
- pdf.ln(5)
53
-
54
- pdf_buffer = io.BytesIO()
55
- pdf.output(pdf_buffer)
56
- pdf_buffer.seek(0)
57
- return pdf_buffer
58
-
59
- # --- Streamlit UI ---
60
-
61
- st.title("🤖 Data Science Mentor")
62
-
63
- with st.sidebar:
64
- st.header("Configure Your Mentor")
65
- topic = st.radio("Select Topic:", ["Python", "GenAI", "Statistics", "Power BI", "SQL", "Machine Learning", "Deep Learning"])
66
- level = st.radio("Select Experience Level:", ["Beginner", "Intermediate", "Advanced"])
67
-
68
- # Load LangChain model for selected topic
69
- model = load_langchain_model(topic)
70
-
71
- if "chat_history" not in st.session_state:
72
- st.session_state.chat_history = []
73
-
74
- st.subheader(f"Ask your {topic} question:")
75
- user_input = st.text_area("Type your question here:", height=100)
76
-
77
- if st.button("Get Answer"):
78
- if user_input.strip() == "":
79
- st.warning("Please enter a question.")
80
- else:
81
- with st.spinner("Mentor is thinking..."):
82
- answer = generate_answer(model, topic, level, user_input)
83
- st.session_state.chat_history.append(("You", user_input))
84
- st.session_state.chat_history.append(("Mentor", answer))
85
-
86
- # Display chat history
87
- if st.session_state.chat_history:
88
- for i in range(0, len(st.session_state.chat_history), 2):
89
- user_msg = st.session_state.chat_history[i][1]
90
- mentor_msg = st.session_state.chat_history[i+1][1] if i+1 < len(st.session_state.chat_history) else ""
91
- st.markdown(f"**You:** {user_msg}")
92
- st.markdown(f"**Mentor:** {mentor_msg}")
93
- st.markdown("---")
94
-
95
- # PDF Download Button
96
- if st.button("Download Chat as PDF"):
97
- pdf_bytes = create_pdf(st.session_state.chat_history)
98
- st.download_button(
99
- label="Download PDF",
100
- data=pdf_bytes,
101
- file_name="chat_history.pdf",
102
- mime="application/pdf"
103
- )
104
-
105
- if st.button("Clear Chat"):
106
- st.session_state.chat_history = []
 
1
+ import os
2
+ import langchain
3
+ import langchain_huggingface
4
+ from langchain_huggingface import HuggingFaceEndpoint,HuggingFacePipeline, ChatHuggingFace
5
+ from langchain_google_genai import GoogleGenerativeAI, ChatGoogleGenerativeAI
6
+ from langchain_core.messages import HumanMessage, SystemMessage, AIMessage