File size: 666 Bytes
ce6f46b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
{
  "dtype": "int8",
  "input_info": null,
  "optimum_version": "1.27.0",
  "output_attentions": false,
  "quantization_config": {
    "all_layers": null,
    "backup_precision": null,
    "bits": 8,
    "dataset": null,
    "dtype": "int8",
    "gptq": null,
    "group_size": -1,
    "ignored_scope": null,
    "lora_correction": null,
    "num_samples": null,
    "processor": null,
    "quant_method": "default",
    "ratio": 1.0,
    "scale_estimation": null,
    "sensitivity_metric": null,
    "statistics_path": null,
    "sym": true,
    "tokenizer": null,
    "trust_remote_code": true
  },
  "save_onnx_model": false,
  "transformers_version": "4.53.3"
}