Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -25,16 +25,15 @@ model, tokenizer = load_model_and_tokenizer()
|
|
25 |
def get_predict(title: str, abstract: str) -> (str, float, dict):
|
26 |
text = [title + tokenizer.sep_token + abstract[:128]]
|
27 |
|
28 |
-
|
29 |
tokens_info = tokenizer(
|
30 |
text,
|
31 |
padding=True,
|
32 |
truncation=True,
|
33 |
return_tensors="pt",
|
34 |
-
)
|
35 |
|
36 |
with torch.no_grad():
|
37 |
-
|
38 |
probs = torch.nn.functional.softmax(out.logits, dim=-1).tolist()[0]
|
39 |
|
40 |
return list(sorted([(p, ind_to_target[i]) for i, p in enumerate(probs)], reversed=True))
|
|
|
25 |
def get_predict(title: str, abstract: str) -> (str, float, dict):
|
26 |
text = [title + tokenizer.sep_token + abstract[:128]]
|
27 |
|
|
|
28 |
tokens_info = tokenizer(
|
29 |
text,
|
30 |
padding=True,
|
31 |
truncation=True,
|
32 |
return_tensors="pt",
|
33 |
+
)
|
34 |
|
35 |
with torch.no_grad():
|
36 |
+
out = model(**tokens_info)
|
37 |
probs = torch.nn.functional.softmax(out.logits, dim=-1).tolist()[0]
|
38 |
|
39 |
return list(sorted([(p, ind_to_target[i]) for i, p in enumerate(probs)], reversed=True))
|