greedy-intersection / config.json
kibrq's picture
Update model
623e9da
raw
history blame contribute delete
868 Bytes
{
"architectures": [
"GreedyModel"
],
"auto_map": {
"AutoConfig": "configuration_greedy.GreedyConfig",
"AutoModelForCausalLM": "modeling_greedy.GreedyModel"
},
"eos_token_id": 8,
"pad_token_id": 9,
"reciprocals": [
[
4,
3
],
[
5,
2
],
[
6,
1
],
[
7,
0
]
],
"reducables": [
[
[
4
],
[
3
]
],
[
[
5
],
[
2
]
],
[
[
6
],
[
1
]
],
[
[
7
],
[
0
]
],
[
[
4,
5,
6,
7
],
[
0,
1,
2,
3
]
]
],
"torch_dtype": "float32",
"transformers_version": "4.21.1",
"vocab_size": 10
}