File size: 1,005 Bytes
788c379 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
{
"model_type": "nanogpt",
"architectures": [
"NanoGPTChat"
],
"auto_map": {
"AutoConfig": "configuration_nanogpt.NanoGPTConfig",
"AutoModel": "modeling_nanogpt.NanoGPTChat",
"AutoModelForCausalLM": "modeling_nanogpt.NanoGPTChat",
"AutoTokenizer": "tokenizer_nanogpt.NanoGPTChatTokenizer"
},
"bos_token": "<|bos|>",
"eos_token": "<|assistant_end|>",
"pad_token": "<|assistant_end|>",
"sequence_len": 2048,
"vocab_size": 65536,
"n_layer": 20,
"n_head": 10,
"n_kv_head": 10,
"n_embd": 1280,
"chat_template": "{% if messages[0]['role'] == 'system' %}<|bos|><|user_start|>{{ messages[0]['content'] }}\n\n{{ messages[1]['content'] }}<|user_end|>{% set messages = messages[2:] %}{% else %}<|bos|>{% endif %}{% for message in messages %}{% if loop.index0 % 2 == 0 %}<|user_start|>{{ message['content'] }}<|user_end|>{% else %}<|assistant_start|>{{ message['content'] }}<|assistant_end|>{% endif %}{% endfor %}"
} |