File size: 120 Bytes
43c5292
 
 
 
 
 
 
1
2
3
4
5
6
7
8
import torch

PRECISION_TO_TYPE = {
    "fp32": torch.float32,
    "fp16": torch.float16,
    "bf16": torch.bfloat16,
}