Spaces:
Runtime error
Runtime error
from tokenizers import models, trainers, Tokenizer | |
tokenizer = Tokenizer(model=models.WordPiece(unk_token="[UNK]")) | |
special_tokens = ["[UNK]", "[PAD]", "[CLS]", "[SEP]", "[MASK]"] | |
trainer = trainers.WordPieceTrainer(vocab_size=25000, special_tokens=special_tokens) | |
tokenizer.train(["https://datasets-server.huggingface.co/rows?dataset=wikimedia%2Fwikipedia&config=20231101.en&split=train&offset=0&length=100"], | |
trainer=trainer) | |
encoding = tokenizer.encode("Let's test this tokenizer...", "on a pair of sentences.") | |
print(encoding.ids) |