ALVHB95 commited on
Commit
f41a82a
·
1 Parent(s): 8963db6
Files changed (1) hide show
  1. app.py +20 -6
app.py CHANGED
@@ -104,14 +104,28 @@ qa_chain = ConversationalRetrievalChain.from_llm(
104
  get_chat_history = lambda h : h
105
  )
106
 
107
- def qa_response(user_message, chat_history, context):
108
- response = qa_chain.predict(user_message, chat_history, context=context)
109
- return response
110
 
111
- chatbot_gradio_app = gr.ChatInterface(fn=qa_response)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
  # Combine both interfaces into a single app
114
- gr.TabbedInterface(
115
  [image_gradio_app, chatbot_gradio_app],
116
  tab_names=["image","chatbot"]
117
- ).launch()
 
 
 
 
104
  get_chat_history = lambda h : h
105
  )
106
 
 
 
 
107
 
108
+ def chat_interface(inputs):
109
+ question = inputs["question"]
110
+ chat_history = inputs["chat_history"]
111
+ # Assuming `chain` is your instance of ConversationalRetrievalChain
112
+ result = qa_chain.run({"question": question, "chat_history": chat_history})
113
+ return result["answer"]
114
+
115
+ chatbot_gradio_app = gr.Interface(
116
+ fn=chat_interface,
117
+ inputs=[
118
+ gr.inputs.Textbox(lines=1, label="Question"),
119
+ gr.inputs.Textbox(lines=5, label="Chat History"),
120
+ ],
121
+ outputs="text"
122
+ )
123
 
124
  # Combine both interfaces into a single app
125
+ app=gr.TabbedInterface(
126
  [image_gradio_app, chatbot_gradio_app],
127
  tab_names=["image","chatbot"]
128
+ )
129
+
130
+ app.queue()
131
+ app.launch()