Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from transformers import AutoTokenizer, AutoModelForTokenClassification, pipeline | |
# Load model and tokenizer from Hugging Face Hub | |
tokenizer = AutoTokenizer.from_pretrained("Mhammad2023/bert-finetuned-ner-torch") | |
model = AutoModelForTokenClassification.from_pretrained("Mhammad2023/bert-finetuned-ner-torch") | |
# Use aggregation_strategy="simple" to group B/I tokens | |
classifier = pipeline( | |
"token-classification", | |
model=model, | |
tokenizer=tokenizer, | |
aggregation_strategy="simple" | |
) | |
def predict(text): | |
results = classifier(text) | |
if not results: | |
return "No entities found" | |
output = [] | |
for entity in results: | |
output.append(f"{entity['word']}: {entity['entity_group']} ({round(entity['score']*100, 2)}%)") | |
return "\n".join(output) | |
gr.Interface( | |
fn=predict, | |
inputs="text", | |
outputs="text", | |
title="Named Entity Recognition" | |
).launch() | |