Spaces:
Sleeping
Sleeping
from fastapi import FastAPI, Query | |
from fastapi.staticfiles import StaticFiles | |
from fastapi.responses import FileResponse | |
from transformers import pipeline | |
import logging | |
app = FastAPI() | |
# Set up logging | |
logging.basicConfig(level=logging.INFO) | |
# Load the model without cache_dir | |
pipe_flan = pipeline("text2text-generation", model="ai1-test/finance-chatbot-flan-t5-large") | |
def t5(input: str = Query(..., description="Input text for the chatbot")): | |
try: | |
logging.info(f"Received input: {input}") # Log input | |
output = pipe_flan(input) | |
generated_text = output[0]["generated_text"] | |
logging.info(f"Generated text: {generated_text}") # Log generated output | |
return {"output": generated_text} | |
except Exception as e: | |
logging.error(f"Error occurred during inference: {str(e)}") # Log the error | |
return {"error": f"An error occurred: {str(e)}"} | |
app.mount("/", StaticFiles(directory="static", html=True), name="static") | |
def index() -> FileResponse: | |
return FileResponse(path="/app/static/index.html", media_type="text/html") |