File size: 242 Bytes
27668b0 |
1 2 3 4 5 6 7 8 9 10 11 |
{
"adapter_layers": 28,
"adapter_len": 100,
"auto_mapping": null,
"base_model_name_or_path": null,
"inference_mode": false,
"peft_type": "ADAPTION_PROMPT",
"revision": null,
"target_modules": null,
"task_type": "CAUSAL_LM"
} |