Spaces:
Running
on
Zero
Running
on
Zero
Show red highlighting before actual noising
Browse files
app.py
CHANGED
@@ -202,6 +202,8 @@ def diffusion_chat(question, eot_weight, max_it, sharpness, noise_clipping, use_
|
|
202 |
yield f"<b>Stopped early after {i+1} iterations.</b>"
|
203 |
break
|
204 |
|
|
|
|
|
205 |
# --- NOISING STEP ---
|
206 |
threshold = get_noising_schedule(i, max_it, sharpness=sharpness)
|
207 |
if use_confidence_noising:
|
@@ -218,7 +220,7 @@ def diffusion_chat(question, eot_weight, max_it, sharpness, noise_clipping, use_
|
|
218 |
current_tokens = ori_input_tokens[:answer_start] + noised_answer[answer_start:]
|
219 |
|
220 |
# --- RED HIGHLIGHT ---
|
221 |
-
decoded_tokens = tokenizer.convert_ids_to_tokens(
|
222 |
highlighted = []
|
223 |
for j, tok in enumerate(decoded_tokens):
|
224 |
tok_id = tokenizer.convert_tokens_to_ids(tok)
|
@@ -231,7 +233,7 @@ def diffusion_chat(question, eot_weight, max_it, sharpness, noise_clipping, use_
|
|
231 |
else:
|
232 |
highlighted.append(token_str)
|
233 |
|
234 |
-
yield f"<b>Iteration {i+1}/{max_it} (
|
235 |
time.sleep(0.1)
|
236 |
|
237 |
|
|
|
202 |
yield f"<b>Stopped early after {i+1} iterations.</b>"
|
203 |
break
|
204 |
|
205 |
+
previous_tokens = current_tokens.copy()
|
206 |
+
|
207 |
# --- NOISING STEP ---
|
208 |
threshold = get_noising_schedule(i, max_it, sharpness=sharpness)
|
209 |
if use_confidence_noising:
|
|
|
220 |
current_tokens = ori_input_tokens[:answer_start] + noised_answer[answer_start:]
|
221 |
|
222 |
# --- RED HIGHLIGHT ---
|
223 |
+
decoded_tokens = tokenizer.convert_ids_to_tokens(previous_tokens[answer_start:])
|
224 |
highlighted = []
|
225 |
for j, tok in enumerate(decoded_tokens):
|
226 |
tok_id = tokenizer.convert_tokens_to_ids(tok)
|
|
|
233 |
else:
|
234 |
highlighted.append(token_str)
|
235 |
|
236 |
+
yield f"<b>Iteration {i+1}/{max_it} (before noising):</b><br>" + "".join(highlighted).replace('\n', '<br>')
|
237 |
time.sleep(0.1)
|
238 |
|
239 |
|