|
from openai import OpenAI |
|
import gradio as gr |
|
import json |
|
import os |
|
|
|
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY")) |
|
|
|
assistant = os.environ.get("ASSISTANT") |
|
|
|
thread = client.beta.threads.create() |
|
|
|
def predict(input,history): |
|
message = client.beta.threads.messages.create( |
|
thread_id=thread.id, |
|
role="user", |
|
content=input |
|
) |
|
run = client.beta.threads.runs.create( |
|
thread_id=thread.id, |
|
assistant_id=assistant |
|
) |
|
while True: |
|
run = client.beta.threads.runs.retrieve( thread_id=thread.id,run_id=run.id) |
|
if run.status=='completed': |
|
messages = client.beta.threads.messages.list(thread_id=thread.id) |
|
current_message= messages.data[0] |
|
return(current_message.content[0].text.value) |
|
|
|
chatbot = gr.Chatbot(show_copy_button=True) |
|
|
|
gr.ChatInterface(fn=predict, chatbot=chatbot).launch(share=True) |