modernvbert-embed / config.json
QuentinJG's picture
Upload BiModernVBert
0eeea3f verified
raw
history blame
1.29 kB
{
"additional_vocab_size": 40,
"architectures": [
"BiModernVBert"
],
"auto_map": {
"AutoConfig": "configuration_vbert.VBertConfig",
"AutoModel": "modeling_vbert.VBertModel",
"AutoModelForMaskedLM": "modeling_vbert.VBertForMaskedLM"
},
"freeze_config": {
"freeze_lm_head": true,
"freeze_text_layers": true,
"freeze_vision_layers": true
},
"hidden_size": 768,
"image_token_id": 50407,
"initializer_range": 0.02,
"max_position_embeddings": 8192,
"model_type": "vbert",
"neftune_noise_alpha": 0.0,
"output_attentions": false,
"pixel_shuffle_factor": 4,
"qk_layer_norms": false,
"scale_factor": 4,
"text_config": {
"hidden_size": 768,
"intermediate_size": 1152,
"mlp_bias": false,
"model_type": "vbert",
"num_hidden_layers": 22,
"text_model_name": "jhu-clsp/ettin-encoder-150m",
"vocab_size": 50368
},
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": null,
"use_cache": true,
"use_resampler": false,
"vision_config": {
"embed_dim": 768,
"image_size": 512,
"intermediate_size": 3072,
"model_type": "vbert",
"num_hidden_layers": 12,
"patch_size": 16,
"vision_model_name": "google/siglip2-base-patch16-512"
},
"vocab_size": 50368
}