Dataset Viewer
model_id
stringlengths 5
27
| name
stringlengths 5
40
| overall_score
float64 0
0.64
| is_open_source
bool 2
classes | Number of Params
float64 7
405B
⌀ | is_reasoning
bool 2
classes | is_moe
bool 2
classes | Analytical Chemistry
float64 0
0.62
⌀ | Chemical Preference
float64 0
0.6
⌀ | General Chemistry
float64 0.05
0.93
⌀ | Inorganic Chemistry
float64 0.05
0.9
⌀ | Materials Science
float64 0
0.75
⌀ | Organic Chemistry
float64 0.01
0.84
⌀ | Physical Chemistry
float64 0.06
0.85
⌀ | Technical Chemistry
float64 0
0.85
⌀ | Toxicity and Safety
float64 0.02
0.48
⌀ |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
o1-preview
|
o1-preview
| 0.64
| false
| null | true
| null | 0.62
| 0.56
| 0.93
| 0.9
| 0.73
| 0.83
| 0.85
| 0.85
| 0.48
|
claude3.5
|
Claude-3.5 (Sonnet)
| 0.63
| false
| null | false
| null | 0.57
| 0.58
| 0.83
| 0.84
| 0.71
| 0.83
| 0.77
| 0.85
| 0.44
|
claude3.5-react
|
Claude-3.5 (Sonnet) React
| 0.62
| false
| null | false
| null | 0.58
| 0.6
| 0.87
| 0.8
| 0.68
| 0.84
| 0.76
| 0.8
| 0.41
|
gpt-4o
|
GPT-4o
| 0.61
| false
| null | false
| null | 0.56
| 0.59
| 0.81
| 0.8
| 0.75
| 0.76
| 0.72
| 0.75
| 0.44
|
llama3.1-405b-instruct
|
Llama-3.1-405B-Instruct
| 0.58
| true
| 405,000,000,000
| false
| false
| 0.51
| 0.54
| 0.79
| 0.77
| 0.65
| 0.76
| 0.71
| 0.7
| 0.42
|
mistral-large-2-123b
|
Mistral-Large-2
| 0.57
| true
| 123,000,000,000
| false
| false
| 0.48
| 0.55
| 0.79
| 0.79
| 0.67
| 0.73
| 0.69
| 0.68
| 0.4
|
paper-qa
|
PaperQA2
| 0.57
| null | null | false
| null | 0.46
| 0.56
| 0.72
| 0.74
| 0.69
| 0.68
| 0.68
| 0.7
| 0.42
|
claude3
|
Claude-3 (Opus)
| 0.57
| false
| null | false
| null | 0.47
| 0.57
| 0.77
| 0.79
| 0.63
| 0.7
| 0.65
| 0.7
| 0.41
|
llama3.1-70b-instruct
|
Llama-3.1-70B-Instruct
| 0.53
| true
| 70,000,000,000
| false
| false
| 0.41
| 0.52
| 0.69
| 0.77
| 0.67
| 0.66
| 0.64
| 0.65
| 0.38
|
llama3-70b-instruct-T-one
|
Llama-3-70B-Instruct (Temperature 1.0)
| 0.52
| true
| 70,000,000,000
| false
| false
| 0.38
| 0.53
| 0.6
| 0.68
| 0.62
| 0.63
| 0.6
| 0.6
| 0.37
|
llama3-70b-instruct
|
Llama-3-70B-Instruct
| 0.52
| true
| 70,000,000,000
| false
| false
| 0.41
| 0.53
| 0.6
| 0.66
| 0.63
| 0.63
| 0.59
| 0.62
| 0.37
|
gpt-4o-react
|
GPT-4o React
| 0.51
| false
| null | false
| null | 0.47
| 0.42
| 0.76
| 0.73
| 0.56
| 0.72
| 0.6
| 0.72
| 0.37
|
llama3.1-70b-instruct-T-one
|
Llama-3.1-70B-Instruct (Temperature 1.0)
| 0.51
| true
| 70,000,000,000
| false
| false
| 0.37
| 0.54
| 0.66
| 0.7
| 0.65
| 0.55
| 0.56
| 0.55
| 0.39
|
gemma-2-9b-it-T-one
|
Gemma-2-9B-it (Temperature 1.0)
| 0.48
| true
| 9,000,000,000
| false
| false
| 0.29
| 0.56
| 0.56
| 0.54
| 0.5
| 0.55
| 0.47
| 0.47
| 0.34
|
gemma-2-9b-it
|
Gemma-2-9B-it
| 0.48
| true
| 9,000,000,000
| false
| false
| 0.32
| 0.55
| 0.54
| 0.55
| 0.52
| 0.56
| 0.48
| 0.53
| 0.34
|
llama3.1-8b-instruct
|
Llama-3.1-8B-Instruct
| 0.47
| true
| 8,000,000,000
| false
| false
| 0.39
| 0.53
| 0.5
| 0.5
| 0.4
| 0.58
| 0.51
| 0.45
| 0.33
|
phi-3-medium-4k-instruct
|
Phi-3-Medium-4k-Instruct
| 0.47
| true
| 14,000,000,000
| false
| false
| 0.34
| 0.53
| 0.48
| 0.63
| 0.55
| 0.56
| 0.46
| 0.55
| 0.33
|
gpt-3.5-turbo
|
GPT-3.5 Turbo
| 0.47
| false
| null | false
| null | 0.38
| 0.53
| 0.49
| 0.54
| 0.48
| 0.59
| 0.4
| 0.4
| 0.31
|
claude2
|
Claude-2
| 0.47
| false
| null | false
| null | 0.38
| 0.51
| 0.5
| 0.61
| 0.46
| 0.59
| 0.5
| 0.47
| 0.33
|
llama3.1-8b-instruct-T-one
|
Llama-3.1-8B-Instruct (Temperature 1.0)
| 0.46
| true
| 8,000,000,000
| false
| false
| 0.36
| 0.52
| 0.53
| 0.48
| 0.42
| 0.58
| 0.42
| 0.4
| 0.32
|
llama3-8b-instruct
|
Llama-3-8B-Instruct
| 0.46
| true
| 8,000,000,000
| false
| false
| 0.41
| 0.52
| 0.44
| 0.49
| 0.42
| 0.56
| 0.37
| 0.6
| 0.32
|
llama3-8b-instruct-T-one
|
Llama-3-8B-Instruct (Temperature 1.0)
| 0.46
| true
| 8,000,000,000
| false
| false
| 0.4
| 0.52
| 0.44
| 0.54
| 0.45
| 0.55
| 0.35
| 0.62
| 0.32
|
command-r+
|
Command-R+
| 0.45
| false
| 104,000,000,000
| false
| null | 0.34
| 0.51
| 0.5
| 0.52
| 0.46
| 0.55
| 0.33
| 0.5
| 0.31
|
gemini-pro
|
Gemini-Pro
| 0.45
| false
| null | false
| null | 0.39
| 0.5
| 0.48
| 0.47
| 0.5
| 0.57
| 0.45
| 0.47
| 0.31
|
mixtral-8x7b-instruct-T-one
|
Mixtral-8x7b-Instruct (Temperature 1.0)
| 0.42
| true
| 47,000,000,000
| false
| true
| 0.28
| 0.52
| 0.45
| 0.51
| 0.4
| 0.47
| 0.35
| 0.33
| 0.27
|
mixtral-8x7b-instruct
|
Mixtral-8x7b-Instruct
| 0.42
| true
| 47,000,000,000
| false
| true
| 0.27
| 0.54
| 0.42
| 0.55
| 0.42
| 0.48
| 0.33
| 0.33
| 0.27
|
gpt-4
|
GPT-4
| 0.41
| false
| null | false
| null | 0.43
| 0.16
| 0.7
| 0.7
| 0.61
| 0.68
| 0.64
| 0.7
| 0.41
|
llama2-70b-chat
|
Llama-2-70B Chat
| 0.27
| true
| 70,000,000,000
| false
| false
| 0.07
| 0.49
| 0.13
| 0.22
| 0.18
| 0.15
| 0.17
| 0.12
| 0.14
|
llama2-13b-chat
|
Llama-2-13B Chat
| 0.26
| true
| 13,000,000,000
| false
| false
| 0.09
| 0.48
| 0.11
| 0.27
| 0.1
| 0.15
| 0.15
| 0.1
| 0.1
|
gemma-1-1-7b-it
|
Gemma-1.1-7B-it
| 0.19
| true
| 7,000,000,000
| false
| false
| 0.21
| 0
| 0.34
| 0.41
| 0.36
| 0.38
| 0.29
| 0.38
| 0.23
|
gemma-1-1-7b-it-T-one
|
Gemma-1.1-7B-it (Temperature 1.0)
| 0.19
| true
| 7,000,000,000
| false
| false
| 0.21
| 0.01
| 0.35
| 0.41
| 0.36
| 0.37
| 0.3
| 0.38
| 0.22
|
galactica_120b
|
Galactica-120b
| 0.02
| true
| 120,000,000,000
| false
| false
| 0
| 0
| 0.05
| 0.05
| 0
| 0.01
| 0.06
| 0
| 0.02
|
test-1-main
|
test-1
| 0
| true
| 7
| true
| true
| null | null | null | null | null | null | null | null | null |
README.md exists but content is empty.
- Downloads last month
- 7