Spaces:
Running on Zero

Ruurd commited on
Commit
04f0876
·
verified ·
1 Parent(s): 57e6bce

Set to bidirectional for debugging

Browse files
Files changed (1) hide show
  1. llama_diffusion_model.py +1 -1
llama_diffusion_model.py CHANGED
@@ -122,7 +122,7 @@ class CustomTransformerModel(PreTrainedModel):
122
  # Build attention mask
123
  device = input_ids.device
124
 
125
- masking_type = getattr(self.config, "masking_type", "unidirectional")
126
  if masking_type == 'bidirectional':
127
  base_mask = torch.ones(seq_len, seq_len, dtype=torch.bool, device=device)
128
  elif masking_type == 'bidirectional_masked':
 
122
  # Build attention mask
123
  device = input_ids.device
124
 
125
+ masking_type = getattr(self.config, "masking_type", "bidirectional")
126
  if masking_type == 'bidirectional':
127
  base_mask = torch.ones(seq_len, seq_len, dtype=torch.bool, device=device)
128
  elif masking_type == 'bidirectional_masked':