| { | |
| "results": { | |
| "arc_challenge": { | |
| "acc": 0.16723549488054607, | |
| "acc_stderr": 0.01090553272460121, | |
| "acc_norm": 0.22184300341296928, | |
| "acc_norm_stderr": 0.012141659068147884 | |
| } | |
| }, | |
| "versions": { | |
| "arc_challenge": 0 | |
| }, | |
| "config": { | |
| "model": "hf-causal-experimental", | |
| "model_args": "pretrained=BEE-spoke-data/smol_llama-81M-tied,revision=main,trust_remote_code=True,dtype='float'", | |
| "num_fewshot": 25, | |
| "batch_size": "16", | |
| "batch_sizes": [], | |
| "device": "cuda", | |
| "no_cache": false, | |
| "limit": null, | |
| "bootstrap_iters": 100000, | |
| "description_dict": {} | |
| } | |
| } |