Spaces:
Running on Zero

Ruurd commited on
Commit
12738e5
·
verified ·
1 Parent(s): d84223d

Fix time_elapsed issue

Browse files
Files changed (1) hide show
  1. app.py +8 -1
app.py CHANGED
@@ -223,12 +223,19 @@ def diffusion_chat(question, max_it, pause_length, sharpness, clustering, noise_
223
  last_tokens = []
224
  prev_decoded_tokens = []
225
 
 
 
226
  for i in range(max_it):
227
  print('Generating output')
228
 
229
  # Model step
230
  generated_tokens, confidences = generate_diffusion_text(current_tokens)
231
 
 
 
 
 
 
232
  # Save full output for noising step
233
  current_tokens = ori_input_tokens[:answer_start] + generated_tokens[answer_start:]
234
 
@@ -289,7 +296,7 @@ def diffusion_chat(question, max_it, pause_length, sharpness, clustering, noise_
289
  current_tokens = ori_input_tokens[:answer_start] + noised_answer[answer_start:]
290
 
291
  yield f"<b>Iteration {i+1}/{max_it} (before noising):</b><br>" + "".join(highlighted).replace('\n', '<br>')
292
- # time.sleep(pause_length)
293
 
294
 
295
  answer_ids = current_tokens[answer_start:]
 
223
  last_tokens = []
224
  prev_decoded_tokens = []
225
 
226
+ generation_start = time.time()
227
+
228
  for i in range(max_it):
229
  print('Generating output')
230
 
231
  # Model step
232
  generated_tokens, confidences = generate_diffusion_text(current_tokens)
233
 
234
+ elapsed = time.time() - generation_start
235
+ remaining = pause_length - elapsed
236
+ if remaining > 0:
237
+ time.sleep(remaining)
238
+
239
  # Save full output for noising step
240
  current_tokens = ori_input_tokens[:answer_start] + generated_tokens[answer_start:]
241
 
 
296
  current_tokens = ori_input_tokens[:answer_start] + noised_answer[answer_start:]
297
 
298
  yield f"<b>Iteration {i+1}/{max_it} (before noising):</b><br>" + "".join(highlighted).replace('\n', '<br>')
299
+ generation_start = time.time()
300
 
301
 
302
  answer_ids = current_tokens[answer_start:]