| { | |
| "results": { | |
| "hellaswag": { | |
| "acc": 0.27450199203187253, | |
| "acc_stderr": 0.008909237404005179, | |
| "acc_norm": 0.28884462151394424, | |
| "acc_norm_stderr": 0.009048238955347484 | |
| } | |
| }, | |
| "versions": { | |
| "hellaswag": 0 | |
| }, | |
| "config": { | |
| "model": "hf-causal-experimental", | |
| "model_args": "pretrained=BEE-spoke-data/smol_llama-81M-tied,revision=main,trust_remote_code=True,dtype='float'", | |
| "num_fewshot": 10, | |
| "batch_size": "16", | |
| "batch_sizes": [], | |
| "device": "cuda", | |
| "no_cache": false, | |
| "limit": 0.25, | |
| "bootstrap_iters": 100000, | |
| "description_dict": {} | |
| } | |
| } |