{ | |
"architectures": [ | |
"GPTForCausalLM" | |
], | |
"model_type": "gpt2", | |
"vocab_size": 65536, | |
"n_positions": 2048, | |
"n_ctx": 2048, | |
"n_embd": 1280, | |
"n_head": 10, | |
"n_layer": 20, | |
"n_kv_head": 10, | |
"rotary": true, | |
"rotary_base": 10000, | |
"tie_word_embeddings": false | |
} |