| { | |
| "architectures": [ | |
| "GARModel" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "configuration_gar.GARConfig", | |
| "AutoModel": "modeling_gar.GARModel", | |
| "AutoModelForCausalLM": "modeling_gar.GARModel" | |
| }, | |
| "crop_tokens_ids": [ | |
| 128004, | |
| 128005, | |
| 128008, | |
| 128010, | |
| 128011 | |
| ], | |
| "kernel_size": [ | |
| 14, | |
| 14 | |
| ], | |
| "mask_path_embedding_out_channels": 1024, | |
| "mllm_config": { | |
| "_name_or_path": "/mnt/bn/zilongdata-us/wangyuhao/model/Perception-LM-1B", | |
| "architectures": [ | |
| "PerceptionLMForConditionalGeneration" | |
| ], | |
| "image_token_id": 128002, | |
| "model_type": "perception_lm", | |
| "projector_pooling_ratio": 2, | |
| "text_config": { | |
| "_name_or_path": "", | |
| "add_cross_attention": false, | |
| "architectures": null, | |
| "attention_bias": false, | |
| "attention_dropout": 0.0, | |
| "bad_words_ids": null, | |
| "begin_suppress_tokens": null, | |
| "bos_token_id": 128000, | |
| "chunk_size_feed_forward": 0, | |
| "cross_attention_hidden_size": null, | |
| "decoder_start_token_id": null, | |
| "diversity_penalty": 0.0, | |
| "do_sample": false, | |
| "early_stopping": false, | |
| "encoder_no_repeat_ngram_size": 0, | |
| "eos_token_id": [ | |
| 128001, | |
| 128009 | |
| ], | |
| "exponential_decay_length_penalty": null, | |
| "finetuning_task": null, | |
| "forced_bos_token_id": null, | |
| "forced_eos_token_id": null, | |
| "head_dim": 64, | |
| "hidden_act": "silu", | |
| "hidden_size": 2048, | |
| "id2label": { | |
| "0": "LABEL_0", | |
| "1": "LABEL_1" | |
| }, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 8192, | |
| "is_decoder": false, | |
| "is_encoder_decoder": false, | |
| "label2id": { | |
| "LABEL_0": 0, | |
| "LABEL_1": 1 | |
| }, | |
| "length_penalty": 1.0, | |
| "max_length": 20, | |
| "max_position_embeddings": 11520, | |
| "min_length": 0, | |
| "mlp_bias": false, | |
| "model_type": "llama", | |
| "no_repeat_ngram_size": 0, | |
| "num_attention_heads": 32, | |
| "num_beam_groups": 1, | |
| "num_beams": 1, | |
| "num_hidden_layers": 16, | |
| "num_key_value_heads": 8, | |
| "num_return_sequences": 1, | |
| "output_attentions": false, | |
| "output_hidden_states": false, | |
| "output_scores": false, | |
| "pad_token_id": null, | |
| "prefix": null, | |
| "pretraining_tp": 1, | |
| "problem_type": null, | |
| "pruned_heads": {}, | |
| "remove_invalid_values": false, | |
| "repetition_penalty": 1.0, | |
| "return_dict": true, | |
| "return_dict_in_generate": false, | |
| "rms_norm_eps": 1e-05, | |
| "rope_scaling": { | |
| "factor": 32.0, | |
| "high_freq_factor": 4.0, | |
| "low_freq_factor": 1.0, | |
| "original_max_position_embeddings": 8192, | |
| "rope_type": "llama3" | |
| }, | |
| "rope_theta": 500000.0, | |
| "sep_token_id": null, | |
| "suppress_tokens": null, | |
| "task_specific_params": null, | |
| "temperature": 1.0, | |
| "tf_legacy_loss": false, | |
| "tie_encoder_decoder": false, | |
| "tie_word_embeddings": true, | |
| "tokenizer_class": null, | |
| "top_k": 50, | |
| "top_p": 1.0, | |
| "torch_dtype": "bfloat16", | |
| "torchscript": false, | |
| "typical_p": 1.0, | |
| "use_bfloat16": false, | |
| "use_cache": true, | |
| "use_flash_attn": true, | |
| "vocab_size": 128262 | |
| }, | |
| "torch_dtype": "bfloat16", | |
| "use_flash_attn": true, | |
| "video_token_id": 128003, | |
| "vision_config": { | |
| "_name_or_path": "", | |
| "add_cross_attention": false, | |
| "architecture": "vit_pe_core_large_patch14_336", | |
| "architectures": null, | |
| "bad_words_ids": null, | |
| "begin_suppress_tokens": null, | |
| "bos_token_id": null, | |
| "chunk_size_feed_forward": 0, | |
| "cross_attention_hidden_size": null, | |
| "decoder_start_token_id": null, | |
| "diversity_penalty": 0.0, | |
| "do_pooling": true, | |
| "do_sample": false, | |
| "early_stopping": false, | |
| "encoder_no_repeat_ngram_size": 0, | |
| "eos_token_id": null, | |
| "exponential_decay_length_penalty": null, | |
| "finetuning_task": null, | |
| "forced_bos_token_id": null, | |
| "forced_eos_token_id": null, | |
| "global_pool": "map", | |
| "initializer_range": 0.02, | |
| "is_decoder": false, | |
| "is_encoder_decoder": false, | |
| "label_names": [ | |
| "LABEL_0", | |
| "LABEL_1" | |
| ], | |
| "length_penalty": 1.0, | |
| "max_length": 20, | |
| "min_length": 0, | |
| "model_args": { | |
| "depth": 23, | |
| "embed_dim": 1024, | |
| "global_pool": "", | |
| "img_size": [ | |
| 448, | |
| 448 | |
| ], | |
| "init_values": 0.1, | |
| "ref_feat_shape": [ | |
| 32, | |
| 32 | |
| ], | |
| "use_post_transformer_norm": false | |
| }, | |
| "model_type": "timm_wrapper", | |
| "no_repeat_ngram_size": 0, | |
| "num_beam_groups": 1, | |
| "num_beams": 1, | |
| "num_classes": 2, | |
| "num_features": 1024, | |
| "num_return_sequences": 1, | |
| "output_attentions": false, | |
| "output_hidden_states": false, | |
| "output_scores": false, | |
| "pad_token_id": null, | |
| "prefix": null, | |
| "pretrained_cfg": { | |
| "classifier": "head", | |
| "crop_mode": "center", | |
| "crop_pct": 1.0, | |
| "custom_load": false, | |
| "first_conv": "patch_embed.proj", | |
| "fixed_input_size": true, | |
| "input_size": [ | |
| 3, | |
| 336, | |
| 336 | |
| ], | |
| "interpolation": "bicubic", | |
| "license": "custom", | |
| "mean": [ | |
| 0.5, | |
| 0.5, | |
| 0.5 | |
| ], | |
| "pool_size": null, | |
| "std": [ | |
| 0.5, | |
| 0.5, | |
| 0.5 | |
| ], | |
| "tag": "fb" | |
| }, | |
| "problem_type": null, | |
| "pruned_heads": {}, | |
| "remove_invalid_values": false, | |
| "repetition_penalty": 1.0, | |
| "return_dict": true, | |
| "return_dict_in_generate": false, | |
| "sep_token_id": null, | |
| "suppress_tokens": null, | |
| "task_specific_params": null, | |
| "temperature": 1.0, | |
| "tf_legacy_loss": false, | |
| "tie_encoder_decoder": false, | |
| "tie_word_embeddings": true, | |
| "tokenizer_class": null, | |
| "top_k": 50, | |
| "top_p": 1.0, | |
| "torch_dtype": "bfloat16", | |
| "torchscript": false, | |
| "typical_p": 1.0, | |
| "use_bfloat16": false, | |
| "use_flash_attn": false | |
| }, | |
| "vision_use_cls_token": true | |
| }, | |
| "model_type": "GAR", | |
| "output_attentions": false, | |
| "patch_size_h": 14, | |
| "patch_size_w": 14, | |
| "prompt_numbers": 5, | |
| "max_num_tiles": 16, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": null | |
| } | |