Somnath3570's picture
Update app.py
a2f89f6 verified
import gradio as gr
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
import os
# Initialize the model using the secret token
llm = HuggingFaceEndpoint(
repo_id="TinyLlama/TinyLlama-1.1B-Chat-v1.0",
task="text-generation",
huggingfacehub_api_token=os.environ.get('HF_TOKEN') # Access token from environment secret
)
model = ChatHuggingFace(llm=llm)
def process_query(query):
"""
Process the user query and return the model's response
"""
try:
result = model.invoke(query)
return result.content
except Exception as e:
return f"An error occurred: {str(e)}"
# Create the Gradio interface
interface = gr.Interface(
fn=process_query,
inputs=gr.Textbox(
lines=2,
placeholder="Enter your question here...",
label="Question"
),
outputs=gr.Textbox(
label="Answer",
lines=5
),
title="TinyLlama Chat Assistant",
description="Ask any question and get answers from TinyLlama-1.1B-Chat model",
examples=[
["Who is the Prime Minister of India?"],
["What is artificial intelligence?"],
["Explain quantum computing in simple terms."]
],
theme=gr.themes.Soft()
)
# Launch the interface
if __name__ == "__main__":
interface.launch()