KevinNg99's picture
Initial commit.
43c5292
raw
history blame contribute delete
120 Bytes
import torch
PRECISION_TO_TYPE = {
"fp32": torch.float32,
"fp16": torch.float16,
"bf16": torch.bfloat16,
}