unet_small2_new / tokenizer_config.json
itay-nakash's picture
Upload tokenizer
e82c198
raw
history blame contribute delete
476 Bytes
{
"bos_token": "<s>",
"clean_up_tokenization_spaces": true,
"cls_token": "<cls>",
"eos_token": "</s>",
"mask_token": "<mask>",
"model_max_length": 128,
"pad_token": "<pad>",
"sep_token": "<sep>",
"special_tokens_map_file": "/data/home/itay.nakash/cramming_w_elad/cramming_playground/outputs/unet_small2_new/checkpoints/ScriptableCrammedBERT_2023-08-10_2.5690/special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast",
"unk_token": "<unk>"
}