Spaces:
Running
Running
app13
Browse files
app.py
CHANGED
@@ -104,8 +104,8 @@ qa_chain = ConversationalRetrievalChain.from_llm(
|
|
104 |
get_chat_history = lambda h : h
|
105 |
)
|
106 |
|
107 |
-
def qa_response(user_message,
|
108 |
-
response = llm_chain.predict(user_message = user_message)
|
109 |
return response
|
110 |
|
111 |
chatbot_gradio_app = gr.ChatInterface(fn=qa_response)
|
|
|
104 |
get_chat_history = lambda h : h
|
105 |
)
|
106 |
|
107 |
+
def qa_response(user_message, chat_history, context):
|
108 |
+
response = llm_chain.predict(user_message = user_message, chat_history, context)
|
109 |
return response
|
110 |
|
111 |
chatbot_gradio_app = gr.ChatInterface(fn=qa_response)
|