File size: 1,205 Bytes
f490006
 
 
21004c4
f490006
 
 
21004c4
 
f490006
 
 
21004c4
f490006
21004c4
 
 
 
 
6300e91
21004c4
f490006
21004c4
 
 
 
 
 
 
 
f490006
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import gradio as gr
from transformers import T5ForConditionalGeneration, T5Tokenizer

# Load model and tokenizer
model = T5ForConditionalGeneration.from_pretrained("gcuomo/open-source-ai-t5-liar-lens")
tokenizer = T5Tokenizer.from_pretrained("gcuomo/open-source-ai-t5-liar-lens")

# Shared prediction function
def classify(statement):
    prompt = f"summarize: {statement}"
    inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=128)
    output = model.generate(**inputs, max_new_tokens=8)
    return tokenizer.decode(output[0], skip_special_tokens=True).strip().lower()

# Build UI with Blocks
with gr.Blocks() as demo:
    gr.Markdown("## 🤥 Open Source AI – LIAR Lens")
    
    with gr.Row():
        inp = gr.Textbox(label="Enter a statement", lines=2, placeholder="e.g. The book 'Open Source AI' explores Hugging Face and T5 models.")
        out = gr.Textbox(label="Predicted label")

    btn = gr.Button("Classify")
    btn.click(fn=classify, inputs=inp, outputs=out)

    # Register for remote access via gradio_client
    demo.predict = classify  # 👈 this makes remote .predict(...) possible

# Enable queueing and launch
demo.queue()
demo.launch()