Spaces:
Running
on
Zero
Running
on
Zero
fix bug in prompt preview display
Browse files
app.py
CHANGED
@@ -167,7 +167,7 @@ def chat_response(user_msg, chat_history, system_prompt,
|
|
167 |
|
168 |
pipe = load_pipeline(model_name)
|
169 |
prompt = format_conversation(history, enriched, pipe.tokenizer)
|
170 |
-
|
171 |
streamer = TextIteratorStreamer(pipe.tokenizer,
|
172 |
skip_prompt=True,
|
173 |
skip_special_tokens=True)
|
@@ -193,11 +193,12 @@ def chat_response(user_msg, chat_history, system_prompt,
|
|
193 |
assistant_text += chunk
|
194 |
history[-1]['content'] = assistant_text
|
195 |
# Show debug only once
|
196 |
-
yield history, debug
|
197 |
gen_thread.join()
|
|
|
198 |
except Exception as e:
|
199 |
history[-1]['content'] = f"Error: {e}"
|
200 |
-
yield history, debug
|
201 |
finally:
|
202 |
gc.collect()
|
203 |
|
|
|
167 |
|
168 |
pipe = load_pipeline(model_name)
|
169 |
prompt = format_conversation(history, enriched, pipe.tokenizer)
|
170 |
+
prompt_debug = f"\n\n--- Prompt Preview ---\n```\n{prompt}\n```"
|
171 |
streamer = TextIteratorStreamer(pipe.tokenizer,
|
172 |
skip_prompt=True,
|
173 |
skip_special_tokens=True)
|
|
|
193 |
assistant_text += chunk
|
194 |
history[-1]['content'] = assistant_text
|
195 |
# Show debug only once
|
196 |
+
yield history, debug # ← Show search results during streaming
|
197 |
gen_thread.join()
|
198 |
+
yield history, debug + prompt_debug
|
199 |
except Exception as e:
|
200 |
history[-1]['content'] = f"Error: {e}"
|
201 |
+
yield history, debug + prompt_debug
|
202 |
finally:
|
203 |
gc.collect()
|
204 |
|