Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, pipeline | |
from optimum.onnxruntime import ORTModelForQuestionAnswering | |
import gradio as gr | |
model = ORTModelForQuestionAnswering.from_pretrained("optimum/roberta-base-squad2") | |
tokenizer = AutoTokenizer.from_pretrained("deepset/roberta-base-squad2") | |
onnx_qa = pipeline("question-answering", model=model, tokenizer=tokenizer) | |
# question = "What's my name??" | |
# context = "My name is Philipp and I live in Nuremberg." | |
def get_answer(context, question): | |
# question, context = inputs | |
pred = onnx_qa(question, context) | |
return pred | |
examples = [ | |
["""In supervised learning, input data is provided to the model along with the output. In unsupervised learning, only input data is provided to the model. The goal of supervised learning is to train the model so that it can predict the output when it is given new data.""", | |
"In what learning only data is provided",], | |
# [] # You can add context examples without questions | |
] | |
demo = gr.Blocks() | |
with demo: | |
with gr.Row(): | |
context = gr.Textbox(label='Document', lines=10, scale=2) | |
question = gr.Textbox(label='Question', lines= 3,scale=1) | |
answer = gr.Textbox(label='Answer', lines=4) | |
b1 = gr.Button('Get Answer', size='sm', scale=0.5) | |
gr.Examples(examples=examples, inputs=[context, question], outputs=answer) | |
# with gr.Column(): | |
# gr.Markdown('A example input on the left') | |
b1.click(fn = get_answer, inputs=[context, question], outputs=answer) | |
demo.launch() | |