Spaces:
Running
on
Zero
Running
on
Zero
Clamp logits BEFORE softmax
Browse files
app.py
CHANGED
@@ -135,6 +135,7 @@ def generate_diffusion_text(input_ids):
|
|
135 |
with torch.no_grad():
|
136 |
input_tensor = torch.tensor([input_ids], dtype=torch.long).to(model.device)
|
137 |
logits = model(input_ids=input_tensor)["logits"]
|
|
|
138 |
probs = torch.nn.functional.softmax(logits, dim=-1)[0]
|
139 |
probs = torch.clamp(probs, min=1e-8, max=1.0)
|
140 |
print("probs", probs)
|
|
|
135 |
with torch.no_grad():
|
136 |
input_tensor = torch.tensor([input_ids], dtype=torch.long).to(model.device)
|
137 |
logits = model(input_ids=input_tensor)["logits"]
|
138 |
+
logits = logits.clamp(min=-1e4, max=1e4)
|
139 |
probs = torch.nn.functional.softmax(logits, dim=-1)[0]
|
140 |
probs = torch.clamp(probs, min=1e-8, max=1.0)
|
141 |
print("probs", probs)
|