ziadrone commited on
Commit
689a819
·
verified ·
1 Parent(s): c7eb1f1

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +9 -12
config.json CHANGED
@@ -4,22 +4,19 @@
4
  "ShivikM1ForCausalLM"
5
  ],
6
  "vocab_size": 49156,
7
- "hidden_size": 2048,
8
- "num_hidden_layers": 24,
9
- "num_attention_heads": 16,
10
- "intermediate_size": 8192,
11
  "rotary_dim": 128,
12
  "context_length": 4096,
13
- "num_paths": 3,
14
- "tie_word_embeddings": true,
15
- "bos_token_id": 0,
16
- "eos_token_id": 2,
17
- "pad_token_id": 1,
18
- "hidden_act": "silu",
19
  "tokenizer_class": "ShivikM1Tokenizer",
20
  "auto_map": {
 
21
  "AutoModelForCausalLM": "modeling_shivik_m1.ShivikM1ForCausalLM",
22
- "AutoTokenizer": "tokenization_shivik_m1.ShivikM1Tokenizer",
23
- "AutoConfig": "modeling_shivik_m1.ShivikM1Config"
24
  }
25
  }
 
4
  "ShivikM1ForCausalLM"
5
  ],
6
  "vocab_size": 49156,
7
+ "d_model": 2048,
8
+ "n_layers": 24,
9
+ "num_heads": 16,
10
+ "num_paths": 3,
11
  "rotary_dim": 128,
12
  "context_length": 4096,
13
+ "initializer_range": 0.02,
14
+ "use_cache": true,
15
+ "torch_dtype": "float16",
 
 
 
16
  "tokenizer_class": "ShivikM1Tokenizer",
17
  "auto_map": {
18
+ "AutoConfig": "modeling_shivik_m1.ShivikM1Config",
19
  "AutoModelForCausalLM": "modeling_shivik_m1.ShivikM1ForCausalLM",
20
+ "AutoTokenizer": "tokenization_shivik_m1.ShivikM1Tokenizer"
 
21
  }
22
  }