loom / app.py
greykaizen's picture
Update app.py
80edbd1 verified
raw
history blame
394 Bytes
from fastapi import FastAPI
from pydantic import BaseModel
from transformers import pipeline
app = FastAPI()
moderate_pipe = pipeline("text-classification", model="KoalaAI/Text-Moderation")
class TextInput(BaseModel):
text: str
@app.post("/moderate")
async def moderate_text(input: TextInput):
results = moderate_pipe(input.text)
return {r["label"]: r["score"] for r in results}