{ "architectures": [ "CausalBERTMultiTaskModel" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_eurobert.EuroBertConfig", "AutoModel": "modeling_eurobert.EuroBertModel", "AutoModelForMaskedLM": "modeling_eurobert.EuroBertForMaskedLM", "AutoModelForPreTraining": "modeling_eurobert.EuroBertPreTrainedModel", "AutoModelForSequenceClassification": "modeling_eurobert.EuroBertForSequenceClassification", "AutoModelForTokenClassification": "modeling_eurobert.EuroBertForTokenClassification" }, "base_model_name": "EuroBERT/EuroBERT-210m", "bos_token": "<|begin_of_text|>", "bos_token_id": 128000, "clf_pooling": "late", "eos_token": "<|end_of_text|>", "eos_token_id": 128001, "head_dim": 64, "hidden_act": "silu", "hidden_dropout": 0.0, "hidden_size": 768, "id2label_relation": { "0": "NO_RELATION", "1": "MONO_POS_CAUSE", "10": "MONO_NEG_EFFECT", "11": "DIST_NEG_EFFECT", "12": "PRIO_NEG_EFFECT", "13": "INTERDEPENDENCY", "2": "DIST_POS_CAUSE", "3": "PRIO_POS_CAUSE", "4": "MONO_NEG_CAUSE", "5": "DIST_NEG_CAUSE", "6": "PRIO_NEG_CAUSE", "7": "MONO_POS_EFFECT", "8": "DIST_POS_EFFECT", "9": "PRIO_POS_EFFECT" }, "id2label_span": { "0": "O", "1": "B-INDICATOR", "2": "I-INDICATOR", "3": "B-ENTITY", "4": "I-ENTITY" }, "initializer_range": 0.02, "intermediate_size": 3072, "mask_token": "<|mask|>", "mask_token_id": 128002, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "eurobert", "num_attention_heads": 12, "num_hidden_layers": 12, "num_key_value_heads": 12, "num_relation_labels": 14, "num_span_labels": 5, "pad_token": "<|end_of_text|>", "pad_token_id": 128001, "pretraining_tp": 1, "relation_class_weights": [ 0.1, 0.1, 0.1, 0.1, 0.1, 0.20260826579313382, 0.32417322526901415, 0.1, 0.1, 0.13507217719542255, 0.1, 0.10130413289656691, 0.10805774175633805, 0.1 ], "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 250000, "span_class_weights": [ 0.1, 0.4253362505800068, 0.288930595674656, 0.19287324011981216, 0.1 ], "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.1", "use_cache": false, "vocab_size": 128256 }