greedy-intersection / tokenizer.json
kibrq's picture
Update model
8f70f91
raw
history blame contribute delete
840 Bytes
{
"version": "1.0",
"truncation": null,
"padding": null,
"added_tokens": [
{
"id": 8,
"content": "<s>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false,
"special": true
},
{
"id": 9,
"content": "<pad>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": false,
"special": true
}
],
"normalizer": null,
"pre_tokenizer": {
"type": "Sequence",
"pretokenizers": [
{
"type": "WhitespaceSplit"
}
]
},
"post_processor": null,
"decoder": null,
"model": {
"type": "WordLevel",
"vocab": {
"-4": 0,
"-3": 1,
"-2": 2,
"-1": 3,
"1": 4,
"2": 5,
"3": 6,
"4": 7
},
"unk_token": "<unk>"
}
}