mediawatch-el-climate / config.json
andefined's picture
Upload folder using huggingface_hub
fd55846 verified
{
"architectures": [
"RobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"dtype": "float32",
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "B\u0399\u03a9\u03a3\u0399\u039c\u039f\u03a4\u0397\u03a4\u0391",
"1": "\u03a0\u0395\u03a1\u0399\u0392\u0391\u039b\u039b\u039f\u039d",
"2": "\u039a\u039b\u0399\u039c\u0391\u03a4\u0399\u039a\u0397 \u0391\u039b\u039b\u0391\u0393\u0397",
"3": "\u0398\u0395\u03a1\u039c\u039f\u039a\u03a1\u0391\u03a3\u0399\u0391",
"4": "\u039a\u039b\u0399\u039c\u0391\u03a4\u0399\u039a\u0397 \u039a\u03a1\u0399\u03a3\u0397",
"5": "\u039a\u039b\u0399\u039c\u0391",
"6": "\u03a1\u03a5\u03a0\u0391\u039d\u03a3\u0397",
"7": "\u0395\u039d\u0395\u03a1\u0393\u0395\u0399\u0391"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B\u0399\u03a9\u03a3\u0399\u039c\u039f\u03a4\u0397\u03a4\u0391": 0,
"\u0395\u039d\u0395\u03a1\u0393\u0395\u0399\u0391": 7,
"\u0398\u0395\u03a1\u039c\u039f\u039a\u03a1\u0391\u03a3\u0399\u0391": 3,
"\u039a\u039b\u0399\u039c\u0391": 5,
"\u039a\u039b\u0399\u039c\u0391\u03a4\u0399\u039a\u0397 \u0391\u039b\u039b\u0391\u0393\u0397": 2,
"\u039a\u039b\u0399\u039c\u0391\u03a4\u0399\u039a\u0397 \u039a\u03a1\u0399\u03a3\u0397": 4,
"\u03a0\u0395\u03a1\u0399\u0392\u0391\u039b\u039b\u039f\u039d": 1,
"\u03a1\u03a5\u03a0\u0391\u039d\u03a3\u0397": 6
},
"layer_norm_eps": 1e-12,
"max_length": 512,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"transformers_version": "4.56.2",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}