Spaces:
Running
Running
app16
Browse files
app.py
CHANGED
@@ -104,12 +104,16 @@ qa_chain = ConversationalRetrievalChain.from_llm(
|
|
104 |
get_chat_history = lambda h : h
|
105 |
)
|
106 |
|
|
|
107 |
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
|
|
|
|
|
|
113 |
|
114 |
chatbot_gradio_app = gr.Interface(
|
115 |
fn=chat_interface,
|
|
|
104 |
get_chat_history = lambda h : h
|
105 |
)
|
106 |
|
107 |
+
def chat_interface(question):
|
108 |
|
109 |
+
result = qa_chain.run({"question": question})
|
110 |
+
print("Debug: Result from qa_chain.run:", result)
|
111 |
+
|
112 |
+
# Check the structure of the result
|
113 |
+
if isinstance(result, str):
|
114 |
+
return result # If the result is a string, return it directly
|
115 |
+
else:
|
116 |
+
return "Unexpected result format"
|
117 |
|
118 |
chatbot_gradio_app = gr.Interface(
|
119 |
fn=chat_interface,
|