config_general dict | results dict | versions dict | config_tasks dict | summary_tasks dict | summary_general dict |
|---|---|---|---|---|---|
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 145877.357026936,
"end_time": 167502.554312204,
"total_evaluation_time_secondes": "21625.19728526802",
"model_n... | {
"harness|arc:challenge|25": {
"acc": 0.7013651877133106,
"acc_stderr": 0.013374078615068749,
"acc_norm": 0.7389078498293515,
"acc_norm_stderr": 0.012835523909473848
},
"harness|hellaswag|10": {
"acc": 0.719577773351922,
"acc_stderr": 0.004482874732237349,
"acc_norm": 0.89065923122883... | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowled... | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendryck... | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "ca48d52265c0051f",
"hash_cont_tokens": "e8abf848493b50f7"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4687,
"non_padde... | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "a8fa53915153e1db",
"hash_cont_tokens": "62b683fb5cadf0a1"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113348,
"non_padded": 1524,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 3973041.324489038,
"end_time": 4009208.078815405,
"total_evaluation_time_secondes": "36166.75432636682",
"model... | {
"harness|arc:challenge|25": {
"acc": 0.735494880546075,
"acc_stderr": 0.012889272949313371,
"acc_norm": 0.7602389078498294,
"acc_norm_stderr": 0.012476304127453944
},
"harness|hellaswag|10": {
"acc": 0.7199761003784106,
"acc_stderr": 0.004480929450281562,
"acc_norm": 0.89265086636128... | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowled... | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendryck... | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "a86de36cca2a19b9",
"hash_cont_tokens": "402adfa0ed1abfe3"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4687,
"non_padde... | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "44ee6f861f86ab7d",
"hash_cont_tokens": "63510439853388bf"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113413,
"non_padded": 1459,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 1009823.331966069,
"end_time": 1033923.596197144,
"total_evaluation_time_secondes": "24100.264231075067",
"mode... | {
"harness|arc:challenge|25": {
"acc": 0.7218430034129693,
"acc_stderr": 0.0130944699195388,
"acc_norm": 0.7406143344709898,
"acc_norm_stderr": 0.012808273573927094
},
"harness|hellaswag|10": {
"acc": 0.6701852220673172,
"acc_stderr": 0.004691848665399069,
"acc_norm": 0.867357100179247... | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowled... | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendryck... | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "f52f7134dd4e8235",
"hash_cont_tokens": "e23c779c4c2dd1ec"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4682,
"non_padde... | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "2f7ca631fba4ce39",
"hash_cont_tokens": "e621b4a7c3fa87a7"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113445,
"non_padded": 1427,
"num_truncated_few_shots": 0
} |
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED) | {"harness|arc:challenge|25":{"acc":0.7167235494880546,"acc_stderr":0.013167478735134575,"acc_norm":0(...TRUNCATED) | {"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED) | {"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED) | {"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED) | {"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED) |
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED) | {"harness|arc:challenge|25":{"acc":0.7192832764505119,"acc_stderr":0.013131238126975583,"acc_norm":0(...TRUNCATED) | {"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED) | {"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED) | {"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED) | {"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED) |
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED) | {"harness|arc:challenge|25":{"acc":0.7081911262798635,"acc_stderr":0.013284525292403503,"acc_norm":0(...TRUNCATED) | {"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED) | {"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED) | {"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED) | {"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED) |
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED) | {"harness|arc:challenge|25":{"acc":0.7704778156996587,"acc_stderr":0.012288926760890797,"acc_norm":0(...TRUNCATED) | {"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED) | {"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED) | {"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED) | {"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED) |
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED) | {"harness|arc:challenge|25":{"acc":0.7704778156996587,"acc_stderr":0.012288926760890797,"acc_norm":0(...TRUNCATED) | {"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED) | {"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED) | {"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED) | {"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED) |
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED) | {"harness|arc:challenge|25":{"acc":0.7627986348122867,"acc_stderr":0.012430399829260851,"acc_norm":0(...TRUNCATED) | {"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED) | {"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED) | {"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED) | {"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED) |
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED) | {"harness|arc:challenge|25":{"acc":0.7627986348122867,"acc_stderr":0.012430399829260851,"acc_norm":0(...TRUNCATED) | {"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED) | {"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED) | {"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED) | {"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED) |
End of preview. Expand in Data Studio
README.md exists but content is empty.
- Downloads last month
- 3