jeromeku commited on
Commit
2bcf69e
·
verified ·
1 Parent(s): 85f4f73

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -5
config.json CHANGED
@@ -1,7 +1,4 @@
1
  {
2
- "architectures": [
3
- "RND1LM"
4
- ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "auto_map": {
@@ -9,7 +6,6 @@
9
  "AutoModel": "modeling_rnd.RND1Model",
10
  "AutoModelForMaskedLM": "modeling_rnd.RND1LM"
11
  },
12
- "bos_token_id": 151643,
13
  "decoder_sparse_step": 1,
14
  "dtype": "bfloat16",
15
  "eos_token_id": 151645,
@@ -34,13 +30,14 @@
34
  "num_hidden_layers": 48,
35
  "num_key_value_heads": 4,
36
  "output_router_logits": false,
 
37
  "rms_norm_eps": 1e-06,
38
  "rope_scaling": false,
39
  "rope_theta": 1000000.0,
40
  "router_aux_loss_coef": 0.001,
41
  "sliding_window": false,
42
  "tie_word_embeddings": false,
43
- "transformers_version": "4.56.1",
44
  "use_cache": false,
45
  "use_sliding_window": false,
46
  "vocab_size": 151936
 
1
  {
 
 
 
2
  "attention_bias": false,
3
  "attention_dropout": 0.0,
4
  "auto_map": {
 
6
  "AutoModel": "modeling_rnd.RND1Model",
7
  "AutoModelForMaskedLM": "modeling_rnd.RND1LM"
8
  },
 
9
  "decoder_sparse_step": 1,
10
  "dtype": "bfloat16",
11
  "eos_token_id": 151645,
 
30
  "num_hidden_layers": 48,
31
  "num_key_value_heads": 4,
32
  "output_router_logits": false,
33
+ "pad_token_id": 151643,
34
  "rms_norm_eps": 1e-06,
35
  "rope_scaling": false,
36
  "rope_theta": 1000000.0,
37
  "router_aux_loss_coef": 0.001,
38
  "sliding_window": false,
39
  "tie_word_embeddings": false,
40
+ "transformers_version": "4.57.0",
41
  "use_cache": false,
42
  "use_sliding_window": false,
43
  "vocab_size": 151936