Jezia commited on
Commit
583db0a
·
1 Parent(s): e4dd2b9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -13,7 +13,7 @@ def tokenize_data(text):
13
  input_ = str(text) + ' </s>'
14
  max_len = 80
15
  # tokenize inputs
16
- tokenized_inputs = tokenizer(input_, padding='max_length', truncation=True, max_length=max_len, return_attention_mask=True, return_tensors='pt')
17
 
18
  inputs={"input_ids": tokenized_inputs['input_ids'],
19
  "attention_mask": tokenized_inputs['attention_mask']}
 
13
  input_ = str(text) + ' </s>'
14
  max_len = 80
15
  # tokenize inputs
16
+ tokenized_inputs = tokenizer(input_, padding='max_length', truncation=True, max_length=max_len, return_attention_mask=True)
17
 
18
  inputs={"input_ids": tokenized_inputs['input_ids'],
19
  "attention_mask": tokenized_inputs['attention_mask']}