Update README.md
Browse files
README.md
CHANGED
@@ -9,28 +9,11 @@ datasets:
|
|
9 |
INT8 ONNX version of [philschmid/flan-t5-base-samsum](https://huggingface.co/philschmid/flan-t5-base-samsum) to use with [Transformers.js](https://huggingface.co/docs/transformers.js).
|
10 |
|
11 |
### Example usage
|
12 |
-
|
13 |
```js
|
14 |
import { pipeline } from '@xenova/transformers';
|
15 |
|
16 |
-
const generator = await pipeline('
|
17 |
const output = await generator("Val: it's raining! Candy: I know, just started... Val: r we going? we will be wet Candy: maybe wait a little? see if stops Val: ok. let's wait half h and than see Candy: god idea, I call u then Val: great :)", { add_special_tokens: true, max_new_tokens: 60, repetition_penalty: 1.2});
|
18 |
-
console.log(output);
|
19 |
-
|
20 |
-
```
|
21 |
-
|
22 |
-
#### Auto Classes
|
23 |
-
```js
|
24 |
-
import { AutoModelForCausalLM, AutoTokenizer } from '@xenova/transformers';
|
25 |
-
|
26 |
-
const model_path = 'Felladrin/onnx-flan-t5-base-samsum';
|
27 |
-
const model = await AutoModelForCausalLM.from_pretrained(model_path);
|
28 |
-
const tokenizer = await AutoTokenizer.from_pretrained(model_path);
|
29 |
-
|
30 |
-
const prompt = "Val: it's raining! Candy: I know, just started... Val: r we going? we will be wet Candy: maybe wait a little? see if stops Val: ok. let's wait half h and than see Candy: god idea, I call u then Val: great :)";
|
31 |
-
const { input_ids } = tokenizer(prompt);
|
32 |
-
const tokens = await model.generate(input_ids, { max_new_tokens: 60, repetition_penalty: 1.2});
|
33 |
-
console.log(tokenizer.decode(tokens[0], { skip_special_tokens: true }));
|
34 |
-
// It's raining. Val and Candy will wait half an hour and then see if...
|
35 |
-
```
|
36 |
-
|
|
|
9 |
INT8 ONNX version of [philschmid/flan-t5-base-samsum](https://huggingface.co/philschmid/flan-t5-base-samsum) to use with [Transformers.js](https://huggingface.co/docs/transformers.js).
|
10 |
|
11 |
### Example usage
|
12 |
+
|
13 |
```js
|
14 |
import { pipeline } from '@xenova/transformers';
|
15 |
|
16 |
+
const generator = await pipeline('text2text-generation', 'Felladrin/onnx-flan-t5-base-samsum');
|
17 |
const output = await generator("Val: it's raining! Candy: I know, just started... Val: r we going? we will be wet Candy: maybe wait a little? see if stops Val: ok. let's wait half h and than see Candy: god idea, I call u then Val: great :)", { add_special_tokens: true, max_new_tokens: 60, repetition_penalty: 1.2});
|
18 |
+
console.log(output); // It's raining. Val and Candy will wait half an hour and then see if...
|
19 |
+
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|