pythonchatbot / app.py
musawar32ali's picture
Create app.py
96dbdda verified
# app.py
import os
import gradio as gr
from google import genai
# Read API key from env (set this as a Space secret on HF: GEMINI_API_KEY)
api_key = os.environ.get("GEMINI_API_KEY")
client = genai.Client(api_key=api_key)
MODEL = os.environ.get("GEMINI_MODEL", "gemini-2.5-flash")
def call_gemini(prompt: str) -> str:
"""Call Gemini synchronously and return text reply (safe fallback)."""
try:
response = client.models.generate_content(
model=MODEL,
contents=prompt,
)
# SDK commonly exposes textual output as .text
return getattr(response, "text", str(response))
except Exception as e:
return f"[Error calling Gemini API: {e}]"
def generate_reply(message: str, history: list) -> list:
"""
Given a user message and current `history` (list of role/content dicts),
append the user's message, call Gemini, append the assistant reply, and
return the updated history (in Gradio 'messages' format).
"""
if history is None:
history = []
user_message = message.strip()
if not user_message:
return history
# Append user message (messages format)
history.append({"role": "user", "content": user_message})
# Option A: send only the current user message as prompt:
# reply_text = call_gemini(user_message)
# Option B: (commented) send the full conversation as a single prompt for more context.
# Uncomment if you want multi-turn context included.
# combined_prompt = "\n".join(
# f"{m['role']}: {m['content']}" for m in history
# )
# reply_text = call_gemini(combined_prompt)
# Using Option A by default:
reply_text = call_gemini(user_message)
# Append assistant response
history.append({"role": "assistant", "content": reply_text})
return history
with gr.Blocks(title="Gemini Chatbot") as demo:
gr.Markdown("# Gemini Chatbot (Gradio — messages format)")
# Use the new 'messages' type so Gradio won't warn about tuples->messages deprecation
chatbot = gr.Chatbot(label="Gemini", type="messages")
state = gr.State([]) # will hold the list of {"role","content"} dicts
with gr.Row():
txt = gr.Textbox(
show_label=False,
placeholder="Type your message and press Enter...",
lines=1,
)
def user_submit(message, history):
# Ensure history is a list
if history is None:
history = []
updated = generate_reply(message, history)
# Gradio expects (chatbot, state) outputs; we return the updated messages list for both
return updated, updated
txt.submit(fn=user_submit, inputs=[txt, state], outputs=[chatbot, state])
gr.Button("Clear").click(lambda: ([], []), None, [chatbot, state], queue=False)
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=int(os.environ.get("PORT", 7860)))