File size: 1,071 Bytes
			
			| c66f778 e0ecf96 c66f778 e0ecf96 c66f778 e0ecf96 c66f778 e0ecf96 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 | {
    "architectures": [
        "LtgbertFoCausalLM"
    ],
    "auto_map": {
        "AutoConfig": "configuration_ltgbert.LtgbertConfig",
        "AutoModel": "modeling_ltgbert.LtgbertModel",
        "AutoModelForCausalLM": "modeling_ltgbert.LtgbertForCausalLM",
        "AutoModelForMaskedLM": "modeling_ltgbert.LtgbertForMaskedLM",
        "AutoModelForSequenceClassification": "modeling_ltgbert.LtgbertForSequenceClassification",
        "AutoModelForTokenClassification": "modeling_ltgbert.LtgbertForTokenClassification",
        "AutoModelForQuestionAnswering": "modeling_ltgbert.LtgbertForQuestionAnswering",
        "AutoModelForMultipleChoice": "modeling_ltgbert.LtgbertForMultipleChoice"
    },
    "attention_probs_dropout_prob": 0.1,
    "hidden_dropout_prob": 0.1,
    "hidden_size": 768,
    "intermediate_size": 3072,
    "layer_norm_eps": 1e-5,
    "max_position_embeddings": 512,
    "num_attention_heads": 12,
    "num_hidden_layers": 12,
    "position_bucket_size": 32,
    "torch_dtype": "float32",
    "vocab_size": 32768,
    "temperature": 2.65
}
 | 
