Spaces:
Running
on
Zero
Running
on
Zero
Set to bidirectional for debugging
Browse files- llama_diffusion_model.py +1 -1
llama_diffusion_model.py
CHANGED
@@ -122,7 +122,7 @@ class CustomTransformerModel(PreTrainedModel):
|
|
122 |
# Build attention mask
|
123 |
device = input_ids.device
|
124 |
|
125 |
-
masking_type = getattr(self.config, "masking_type", "
|
126 |
if masking_type == 'bidirectional':
|
127 |
base_mask = torch.ones(seq_len, seq_len, dtype=torch.bool, device=device)
|
128 |
elif masking_type == 'bidirectional_masked':
|
|
|
122 |
# Build attention mask
|
123 |
device = input_ids.device
|
124 |
|
125 |
+
masking_type = getattr(self.config, "masking_type", "bidirectional")
|
126 |
if masking_type == 'bidirectional':
|
127 |
base_mask = torch.ones(seq_len, seq_len, dtype=torch.bool, device=device)
|
128 |
elif masking_type == 'bidirectional_masked':
|