File size: 2,382 Bytes
dcf15df
0d01084
 
ffb87dd
dcf15df
 
 
0d01084
dcf15df
 
9d67cfc
dcf15df
 
0d01084
97bfc18
3a377e8
dcf15df
0d01084
49cc1f1
dcf15df
6e78fc1
dcf15df
49cc1f1
dcf15df
 
 
 
 
0d01084
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dcf15df
0d01084
dcf15df
 
 
 
 
0d01084
dcf15df
 
 
 
 
80a1a37
 
dcf15df
 
 
80a1a37
dcf15df
0d01084
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import os
from io import BytesIO

import streamlit as st
from langchain.prompts import PromptTemplate
from langchain.chains.llm import LLMChain
from langchain_google_genai import ChatGoogleGenerativeAI
from fpdf import FPDF

# 1️⃣ Configure with your API key
os.environ["GOOGLE_API_KEY"] = os.getenv("G_API")


# 2️⃣ Initialize Gemini via LangChain
model = ChatGoogleGenerativeAI(model="gemini-2.0-flash", 
                               temperature=0.0)

# 3️⃣ Prompt template

prompt = PromptTemplate.from_template(
    "You are an expert and motivational AI Mentor. Provide detailed, thoughtful, and practical guidance in response to the following query. Avoid unnecessary fluff or emojis.\n\n{input}"
)

chain = LLMChain(llm=model, prompt=prompt)

def ai_mentor(prompt_input: str) -> str:
    return chain.run(input=prompt_input)

def create_pdf_buffer(messages) -> BytesIO:
    buffer = BytesIO()
    pdf = FPDF()
    pdf.add_page()
    pdf.set_font("Helvetica", size=14)
    pdf.cell(200, 20, "Chat History with AI Mentor", ln=1, align="C")
    pdf.ln(10)
    pdf.set_font("Helvetica", size=12)
    for msg in messages:
        role = msg["role"].capitalize()
        pdf.multi_cell(0, 8, f"{role}: {msg['content']}")
        pdf.ln(2)
    # Write PDF to memory as bytes
    pdf_bytes = pdf.output(dest="S").encode("latin-1")
    buffer.write(pdf_bytes)
    buffer.seek(0)
    return buffer

# ── Streamlit UI ──
st.title("AI Mentor (Gemini with LangChain)")
st.sidebar.write("Chat with your AI Mentor. Type questions or worries below 😊")

if "messages" not in st.session_state:
    st.session_state.messages = []

for msg in st.session_state.messages:
    st.chat_message(msg["role"]).write(msg["content"])

prompt_input = st.chat_input("Write your message here...")

if prompt_input:
    st.session_state.messages.append({"role": "user", "content": prompt_input})
    with st.spinner("AI Mentor is thinking..."):
        response = ai_mentor(prompt_input)
    st.session_state.messages.append({"role": "assistant", "content": response})
    st.chat_message("assistant").write(response)


if st.session_state.messages:
    pdf_buffer = create_pdf_buffer(st.session_state.messages)
    st.download_button(
        label="Download chat history as PDF",
        data=pdf_buffer,
        file_name="chat_history.pdf",
        mime="application/pdf",
    )