auto_space_1 / app.py
broadfield-dev's picture
Upload app.py with huggingface_hub
1267f62 verified
raw
history blame contribute delete
618 Bytes
import gradio as gr
from huggingface_hub.inference_client import InferenceClient
import torch
# Initialize the Inference Client
client = InferenceClient(token="your-hf-token", repo_id="your-repo-id/model-id")
def chatbot(input_text):
# Use the Inference Client to generate a response
response = client.predict_step(input_text)
return response
# Create a Gradio interface for the chatbot
interface = gr.Interface(
fn=chatbot,
inputs="text",
outputs="text",
title="Chatbot",
description="A simple chatbot using Hugging Face Inference Client"
)
# Launch the chatbot
interface.launch()