FlameF0X commited on
Commit
cd5b4c0
·
verified ·
1 Parent(s): b3aa53d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -13
config.json CHANGED
@@ -1,22 +1,16 @@
1
  {
2
- "architectures": [
3
- "I3ForCausalLM"
4
- ],
5
  "model_type": "i3",
 
 
 
 
 
6
  "vocab_size": 4466,
7
  "d_model": 512,
8
  "n_layers": 12,
9
  "n_heads": 16,
10
  "max_seq_len": 256,
11
  "rank": 128,
12
- "d_state": 64,
13
- "tokenizer_type": "chunk",
14
- "chunk_strategy": "variable_2_3",
15
- "torch_dtype": "float32",
16
- "transformers_version": "4.36.0",
17
- "auto_map": {
18
- "AutoConfig": "configuration_i3.I3Config",
19
- "AutoModelForCausalLM": "modeling_i3.I3ForCausalLM",
20
- "AutoTokenizer": "tokenization_i3.I3Tokenizer"
21
- }
22
  }
 
1
  {
2
+ "architectures": ["I3ForCausalLM"],
 
 
3
  "model_type": "i3",
4
+ "auto_map": {
5
+ "AutoConfig": "i3_transformer.configuration_i3.I3Config",
6
+ "AutoModelForCausalLM": "i3_transformer.modeling_i3.I3ForCausalLM",
7
+ "AutoTokenizer": "i3_transformer.tokenization_i3.I3Tokenizer"
8
+ },
9
  "vocab_size": 4466,
10
  "d_model": 512,
11
  "n_layers": 12,
12
  "n_heads": 16,
13
  "max_seq_len": 256,
14
  "rank": 128,
15
+ "d_state": 64
 
 
 
 
 
 
 
 
 
16
  }