{ "model_type": "sentencepiece", "tokenizer_class": "PreTrainedTokenizerFast", "vocab_size": 128000, "model_max_length": 512, "unk_token": "", "bos_token": "", "eos_token": "", "pad_token": "" }