ferdmartin commited on
Commit
e939ab4
·
1 Parent(s): e71afca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -152,7 +152,7 @@ def main():
152
  prediction, predict_proba = nb_lr(model, text)
153
  st.session_state["sklearn"] = True
154
  else:
155
- prediction, predict_proba = torch_pred(tokenizer, model, text)
156
  st.session_state["torch"] = True
157
 
158
  # Store the result in session state
@@ -182,7 +182,7 @@ def main():
182
  with st.spinner('Wait for it 💭... BERT-based model explanations take around 4-10 minutes. In case you want to abort, refresh the page.'):
183
  # TORCH EXPLAINER PRED FUNC (USES logits)
184
  def f(x):
185
- tv = torch.tensor([tokenizer.encode(v, padding='max_length', max_length=512, truncation=True) for v in x])#.cuda()
186
  outputs = model(tv).detach().cpu().numpy()
187
  scores = (np.exp(outputs).T / np.exp(outputs).sum(-1)).T
188
  val = scipy.special.logit(scores[:,1]) # use one vs rest logit units
 
152
  prediction, predict_proba = nb_lr(model, text)
153
  st.session_state["sklearn"] = True
154
  else:
155
+ prediction, predict_proba = torch_pred(tokenizer, model, format_text(text))
156
  st.session_state["torch"] = True
157
 
158
  # Store the result in session state
 
182
  with st.spinner('Wait for it 💭... BERT-based model explanations take around 4-10 minutes. In case you want to abort, refresh the page.'):
183
  # TORCH EXPLAINER PRED FUNC (USES logits)
184
  def f(x):
185
+ tv = torch.tensor([tokenizer.encode(v, padding='max_length', max_length=512, truncation=True) for v in x])
186
  outputs = model(tv).detach().cpu().numpy()
187
  scores = (np.exp(outputs).T / np.exp(outputs).sum(-1)).T
188
  val = scipy.special.logit(scores[:,1]) # use one vs rest logit units