CJHauser's picture
Create app.py
1263d52 verified
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# Load your model
model_name = "CJHauser/PrisimAI-t5"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
def answer_question(context, question):
input_text = f"question: {question} context: {context}"
inputs = tokenizer.encode(input_text, return_tensors="pt", truncation=True)
outputs = model.generate(inputs, max_length=128)
answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
return answer
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("# πŸ€– PrisimAI Q&A\nAsk questions based on a given context.")
with gr.Row():
context = gr.Textbox(label="Context", placeholder="Paste your reference text here...", lines=8)
question = gr.Textbox(label="Your Question", placeholder="What do you want to know?")
answer = gr.Textbox(label="Answer", interactive=False)
btn = gr.Button("Get Answer")
btn.click(fn=answer_question, inputs=[context, question], outputs=answer)
demo.launch()