adding context length var to config.json, this is the same var used in bamaba v2 9b

#6
by anakin004 - opened
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -25,6 +25,7 @@
25
  "mamba_n_groups": 1,
26
  "mamba_n_heads": 128,
27
  "mamba_proj_bias": false,
 
28
  "model_type": "bamba",
29
  "num_attention_heads": 32,
30
  "num_hidden_layers": 32,
 
25
  "mamba_n_groups": 1,
26
  "mamba_n_heads": 128,
27
  "mamba_proj_bias": false,
28
+ "max_position_embeddings": 4096,
29
  "model_type": "bamba",
30
  "num_attention_heads": 32,
31
  "num_hidden_layers": 32,