|
import gradio as gr |
|
from gradio_client import Client |
|
|
|
|
|
client = Client("Qwen/Qwen2.5-72B-Instruct") |
|
history = [] |
|
|
|
def respond(prompt, fhistory): |
|
global history |
|
|
|
messages = [{"role": "system", "content": "You are a helpful assistant."}] |
|
|
|
for user_text, assistant_text in history: |
|
messages.append({"role": "user", "content": user_text}) |
|
messages.append({"role": "assistant", "content": assistant_text}) |
|
|
|
messages.append({"role": "user", "content": prompt}) |
|
|
|
result = client.predict( |
|
query=messages, |
|
history=history, |
|
system="You are a helpful assistant.", |
|
api_name="/model_chat" |
|
) |
|
|
|
print(result) |
|
|
|
response = result[1][-1][1] |
|
|
|
history.append((prompt, response)) |
|
|
|
|
|
|
|
return [{"role": "assistant", "content": response}] |
|
|
|
|
|
|
|
|
|
demo = gr.ChatInterface( |
|
fn=respond, |
|
title="Qwen2.5-72B-Instruct Demo", |
|
description="้้ Hugging Face Space API ่ Qwen 2.5 Max ๆจกๅไบๅใ", |
|
type='messages' |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |