gradio_test_001 / app.py
Xylor's picture
Fixed getLogger, switched level to info from debug
a5fcb71 verified
raw
history blame
2.25 kB
import gradio
import logging
import time
from transformers import pipeline
logger = logging.getLogger("gradio_test_001")
logger.setLevel(logging.INFO)
logging.debug("Starting logging for gradio_test_001.")
classifier = pipeline("zero-shot-classification",
model="facebook/bart-large-mnli")
# sequence_to_classify = "one day I will see the world"
# candidate_labels = ['travel', 'cooking', 'dancing']
# CATEGORIES = ['doc_type.jur', 'doc_type.Spec', 'doc_type.ZDF', 'doc_type.Publ',
# 'doc_type.Scheme', 'content_type.Alt', 'content_type.Krypto',
# 'content_type.Karte', 'content_type.Banking', 'content_type.Reg',
# 'content_type.Konto']
categories = [
"Legal", "Specification", "Facts and Figures",
"Publication", "Payment Scheme",
"Alternative Payment Systems", "Crypto Payments",
"Card Payments", "Banking", "Regulations", "Account Payments"
]
def transform_output(res: dict) -> list:
return list(
sorted(
zip(res["labels"], res["scores"]),
key=lambda tpl: tpl[1],
reverse=True
)
)
def clf_text(txt: str | list[str]):
logger.info("Classify: " + repr(txt))
t0 = time.time()
res = classifier(txt, categories, multi_label=True)
elapsed = time.time() - t0
logger.info(f"Done. {elapsed:.02f}s")
logger.info(f"Result(s): " + repr(res))
if isinstance(res, list):
return [ transform_output(dct) for dct in res ]
else:
return transform_output(res)
# items = sorted(zip(res["labels"], res["scores"]), key=lambda tpl: tpl[1], reverse=True)
# d = dict(zip(res["labels"], res["scores"]))
# output = [f"{lbl}:\t{score}" for lbl, score in items]
# return "\n".join(output)
# return list(items)
# classifier(sequence_to_classify, candidate_labels)
#{'labels': ['travel', 'dancing', 'cooking'],
# 'scores': [0.9938651323318481, 0.0032737774308770895, 0.002861034357920289],
# 'sequence': 'one day I will see the world'}
def my_inference_function(name):
return "Hello " + name + "!"
gradio_interface = gradio.Interface(
# fn = my_inference_function,
fn = clf_text,
inputs = "text",
outputs = gradio.JSON()
)
logger.debug("Launch app.")
gradio_interface.launch()