File size: 1,759 Bytes
2e54a02
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cea6169
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import gradio as gr
from chat import get_response

def clear_session():
    return "", []

def add_query(chat_history, input):
    if not input:
        raise gr.Error("Please enter a question.")
    chat_history.append((input, None))
    return chat_history

def response(history, query, model):
    chat_history = []
    for i in history:
        if i[0]:
            chat_history.append(i[0])
        if i[1]:
            chat_history.append(i[1])

    messages = [{"role": "user", "content": chat_history[0]}]
    for i in range(1, len(chat_history), 2):
        messages.append({"role": "assistant", "content": chat_history[i]})
        messages.append({"role": "user", "content": chat_history[i + 1]})

    res_msg = get_response(model, messages)
    history[-1] = (query, res_msg)
    return "", history

demo = gr.Blocks(title= "Chatbot", theme="Soft")
with demo:
    with gr.Column("Chatbot - Family Relationships"):
        model = gr.Radio(["gpt-3.5-turbo", "gpt-4-turbo", "ft:gpt-3.5-turbo-0125:personal::9hiINdK8"], 
                label="model",
                info="Kindly choose a model before initiating the chat and clear the chat history before switching models. The last one is fine-tuned models.")
        chatbot = gr.Chatbot(value=[], elem_id='chatbot')

        text_input = gr.Textbox(
            show_label=False,
            placeholder="Ask me anything!",
        container=False)

        clear_btn = gr.Button("🧹 Clear")
        text_input.submit(add_query, inputs=[chatbot, text_input], outputs=[chatbot], concurrency_limit=1).\
            success(response, inputs=[chatbot, text_input, model], outputs=[text_input, chatbot])
        clear_btn.click(clear_session, inputs=[], outputs=[text_input, chatbot])

demo.launch()