question
stringlengths 8
123
| choices
listlengths 4
4
| answer
int64 0
3
| answer_label
stringclasses 4
values | split
stringclasses 1
value | subcategories
stringclasses 1
value | category
stringclasses 1
value | lang
stringclasses 1
value | second_lang
stringclasses 1
value | notes
stringclasses 1
value | id
stringlengths 7
7
| set_id
float64 200
220
| variation_id
float64 1
1
| vanilla_cos_sim_to_canonical
dict | trimmed_cos_sim_to_canonical
dict | token_counts
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7 + 7 is
|
[
"14",
"15",
"13",
"16"
] | 0 |
A
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
200-1.0
| 200 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 5,
"Qwen/Qwen3-8B": 5,
"bigscience/bloom": 4,
"common-pile/comma-v0.1-1t": 5,
"facebook/xglm-564M": 4,
"google-bert/bert-base-multilingual-cased": 4,
"google/byt5-small": 8,
"google/gemma-2-2b": 5,
"gpt2": 4,
"meta-llama/Llama-3.2-1B": 5,
"microsoft/Phi-3-mini-4k-instruct": 6,
"mistralai/tekken": 5,
"tiktoken/gpt-4o": 5,
"tokenmonster/englishcode-32000-consistent-v1": 4
}
|
||
1/2 + 1/4 = 2/4 + 1/4 equals
|
[
"1/2",
"3/8",
"1/3",
"3/4"
] | 3 |
D
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
201-1.0
| 201 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 19,
"Qwen/Qwen3-8B": 19,
"bigscience/bloom": 9,
"common-pile/comma-v0.1-1t": 19,
"facebook/xglm-564M": 10,
"google-bert/bert-base-multilingual-cased": 17,
"google/byt5-small": 28,
"google/gemma-2-2b": 19,
"gpt2": 16,
"meta-llama/Llama-3.2-1B": 19,
"microsoft/Phi-3-mini-4k-instruct": 20,
"mistralai/tekken": 19,
"tiktoken/gpt-4o": 19,
"tokenmonster/englishcode-32000-consistent-v1": 13
}
|
||
The area of a 4 by 3 rectangle is
|
[
"10 square units",
"13 square units",
"12 square units",
"14 square units"
] | 2 |
C
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
202-1.0
| 202 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 11,
"Qwen/Qwen3-8B": 11,
"bigscience/bloom": 9,
"common-pile/comma-v0.1-1t": 12,
"facebook/xglm-564M": 10,
"google-bert/bert-base-multilingual-cased": 11,
"google/byt5-small": 33,
"google/gemma-2-2b": 11,
"gpt2": 9,
"meta-llama/Llama-3.2-1B": 11,
"microsoft/Phi-3-mini-4k-instruct": 11,
"mistralai/tekken": 11,
"tiktoken/gpt-4o": 11,
"tokenmonster/englishcode-32000-consistent-v1": 8
}
|
||
In the pattern 2, 4, 6, 8, the next number is
|
[
"9",
"8",
"10",
"11"
] | 2 |
C
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
203-1.0
| 203 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 19,
"Qwen/Qwen3-8B": 19,
"bigscience/bloom": 15,
"common-pile/comma-v0.1-1t": 19,
"facebook/xglm-564M": 13,
"google-bert/bert-base-multilingual-cased": 15,
"google/byt5-small": 45,
"google/gemma-2-2b": 19,
"gpt2": 15,
"meta-llama/Llama-3.2-1B": 19,
"microsoft/Phi-3-mini-4k-instruct": 19,
"mistralai/tekken": 19,
"tiktoken/gpt-4o": 19,
"tokenmonster/englishcode-32000-consistent-v1": 9
}
|
||
The sum of angles in a triangle is
|
[
"180 degrees",
"90 degrees",
"60 degrees",
"360 degrees"
] | 0 |
A
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
204-1.0
| 204 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 8,
"Qwen/Qwen3-8B": 8,
"bigscience/bloom": 8,
"common-pile/comma-v0.1-1t": 10,
"facebook/xglm-564M": 9,
"google-bert/bert-base-multilingual-cased": 8,
"google/byt5-small": 34,
"google/gemma-2-2b": 8,
"gpt2": 8,
"meta-llama/Llama-3.2-1B": 8,
"microsoft/Phi-3-mini-4k-instruct": 8,
"mistralai/tekken": 8,
"tiktoken/gpt-4o": 8,
"tokenmonster/englishcode-32000-consistent-v1": 6
}
|
||
In the sequence 15, 20, 25, ___, 35, the missing number is
|
[
"28",
"24",
"30",
"32"
] | 2 |
C
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
205-1.0
| 205 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 25,
"Qwen/Qwen3-8B": 25,
"bigscience/bloom": 17,
"common-pile/comma-v0.1-1t": 21,
"facebook/xglm-564M": 16,
"google-bert/bert-base-multilingual-cased": 19,
"google/byt5-small": 58,
"google/gemma-2-2b": 25,
"gpt2": 17,
"meta-llama/Llama-3.2-1B": 21,
"microsoft/Phi-3-mini-4k-instruct": 26,
"mistralai/tekken": 25,
"tiktoken/gpt-4o": 21,
"tokenmonster/englishcode-32000-consistent-v1": 13
}
|
||
The number of sides in a square is
|
[
"3",
"4",
"5",
"6"
] | 1 |
B
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
206-1.0
| 206 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 9,
"Qwen/Qwen3-8B": 9,
"bigscience/bloom": 9,
"common-pile/comma-v0.1-1t": 11,
"facebook/xglm-564M": 8,
"google-bert/bert-base-multilingual-cased": 8,
"google/byt5-small": 35,
"google/gemma-2-2b": 9,
"gpt2": 9,
"meta-llama/Llama-3.2-1B": 9,
"microsoft/Phi-3-mini-4k-instruct": 9,
"mistralai/tekken": 9,
"tiktoken/gpt-4o": 9,
"tokenmonster/englishcode-32000-consistent-v1": 7
}
|
||
Half of a circle is colored blue. The fraction that is shaded is
|
[
"1/3",
"1/4",
"1/5",
"1/2"
] | 3 |
D
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
207-1.0
| 207 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 14,
"Qwen/Qwen3-8B": 14,
"bigscience/bloom": 15,
"common-pile/comma-v0.1-1t": 15,
"facebook/xglm-564M": 17,
"google-bert/bert-base-multilingual-cased": 16,
"google/byt5-small": 64,
"google/gemma-2-2b": 14,
"gpt2": 15,
"meta-llama/Llama-3.2-1B": 14,
"microsoft/Phi-3-mini-4k-instruct": 15,
"mistralai/tekken": 14,
"tiktoken/gpt-4o": 14,
"tokenmonster/englishcode-32000-consistent-v1": 14
}
|
||
In the Pythagorean theorem a² + b² = c², variable 'c' represents
|
[
"The hypotenuse",
"The shortest side",
"The base",
"The height"
] | 0 |
A
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
208-1.0
| 208 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 20,
"Qwen/Qwen3-8B": 20,
"bigscience/bloom": 18,
"common-pile/comma-v0.1-1t": 23,
"facebook/xglm-564M": 22,
"google-bert/bert-base-multilingual-cased": 21,
"google/byt5-small": 67,
"google/gemma-2-2b": 17,
"gpt2": 20,
"meta-llama/Llama-3.2-1B": 20,
"microsoft/Phi-3-mini-4k-instruct": 21,
"mistralai/tekken": 20,
"tiktoken/gpt-4o": 21,
"tokenmonster/englishcode-32000-consistent-v1": 30
}
|
||
24 stickers minus 8 stickers leaves
|
[
"15",
"14",
"17",
"16"
] | 3 |
D
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
209-1.0
| 209 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 8,
"Qwen/Qwen3-8B": 8,
"bigscience/bloom": 8,
"common-pile/comma-v0.1-1t": 9,
"facebook/xglm-564M": 8,
"google-bert/bert-base-multilingual-cased": 8,
"google/byt5-small": 35,
"google/gemma-2-2b": 8,
"gpt2": 6,
"meta-llama/Llama-3.2-1B": 7,
"microsoft/Phi-3-mini-4k-instruct": 11,
"mistralai/tekken": 10,
"tiktoken/gpt-4o": 7,
"tokenmonster/englishcode-32000-consistent-v1": 8
}
|
||
3 apples at 25 cents each costs
|
[
"70 cents",
"85 cents",
"80 cents",
"75 cents"
] | 3 |
D
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
210-1.0
| 210 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 9,
"Qwen/Qwen3-8B": 9,
"bigscience/bloom": 7,
"common-pile/comma-v0.1-1t": 8,
"facebook/xglm-564M": 9,
"google-bert/bert-base-multilingual-cased": 8,
"google/byt5-small": 31,
"google/gemma-2-2b": 9,
"gpt2": 7,
"meta-llama/Llama-3.2-1B": 8,
"microsoft/Phi-3-mini-4k-instruct": 12,
"mistralai/tekken": 9,
"tiktoken/gpt-4o": 8,
"tokenmonster/englishcode-32000-consistent-v1": 8
}
|
||
47 rounded to the nearest 10 is
|
[
"40",
"55",
"45",
"50"
] | 3 |
D
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
211-1.0
| 211 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 10,
"Qwen/Qwen3-8B": 10,
"bigscience/bloom": 7,
"common-pile/comma-v0.1-1t": 10,
"facebook/xglm-564M": 9,
"google-bert/bert-base-multilingual-cased": 7,
"google/byt5-small": 31,
"google/gemma-2-2b": 10,
"gpt2": 7,
"meta-llama/Llama-3.2-1B": 8,
"microsoft/Phi-3-mini-4k-instruct": 11,
"mistralai/tekken": 10,
"tiktoken/gpt-4o": 8,
"tokenmonster/englishcode-32000-consistent-v1": 6
}
|
||
9 × 9 equals
|
[
"81",
"62",
"36",
"64"
] | 0 |
A
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
212-1.0
| 212 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 5,
"Qwen/Qwen3-8B": 5,
"bigscience/bloom": 4,
"common-pile/comma-v0.1-1t": 5,
"facebook/xglm-564M": 5,
"google-bert/bert-base-multilingual-cased": 5,
"google/byt5-small": 13,
"google/gemma-2-2b": 5,
"gpt2": 4,
"meta-llama/Llama-3.2-1B": 5,
"microsoft/Phi-3-mini-4k-instruct": 6,
"mistralai/tekken": 5,
"tiktoken/gpt-4o": 5,
"tokenmonster/englishcode-32000-consistent-v1": 6
}
|
||
A pizza is cut into 8 equal slices. If you eat 3 slices, 5 slices will be left. The fraction of pizza which is left will be
|
[
"3/8",
"5/8",
"5/3",
"3/5"
] | 1 |
B
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
213-1.0
| 213 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 33,
"Qwen/Qwen3-8B": 33,
"bigscience/bloom": 30,
"common-pile/comma-v0.1-1t": 34,
"facebook/xglm-564M": 34,
"google-bert/bert-base-multilingual-cased": 35,
"google/byt5-small": 123,
"google/gemma-2-2b": 33,
"gpt2": 30,
"meta-llama/Llama-3.2-1B": 33,
"microsoft/Phi-3-mini-4k-instruct": 38,
"mistralai/tekken": 33,
"tiktoken/gpt-4o": 33,
"tokenmonster/englishcode-32000-consistent-v1": 24
}
|
||
If x + 6 = 12, then x = 12 - 6 equals
|
[
"8",
"7",
"6",
"17"
] | 2 |
C
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
214-1.0
| 214 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 20,
"Qwen/Qwen3-8B": 20,
"bigscience/bloom": 14,
"common-pile/comma-v0.1-1t": 18,
"facebook/xglm-564M": 14,
"google-bert/bert-base-multilingual-cased": 15,
"google/byt5-small": 37,
"google/gemma-2-2b": 20,
"gpt2": 14,
"meta-llama/Llama-3.2-1B": 18,
"microsoft/Phi-3-mini-4k-instruct": 20,
"mistralai/tekken": 20,
"tiktoken/gpt-4o": 18,
"tokenmonster/englishcode-32000-consistent-v1": 13
}
|
||
50% of 60 is
|
[
"30",
"25",
"35",
"40"
] | 0 |
A
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
215-1.0
| 215 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 8,
"Qwen/Qwen3-8B": 8,
"bigscience/bloom": 4,
"common-pile/comma-v0.1-1t": 7,
"facebook/xglm-564M": 4,
"google-bert/bert-base-multilingual-cased": 5,
"google/byt5-small": 12,
"google/gemma-2-2b": 8,
"gpt2": 5,
"meta-llama/Llama-3.2-1B": 6,
"microsoft/Phi-3-mini-4k-instruct": 9,
"mistralai/tekken": 8,
"tiktoken/gpt-4o": 6,
"tokenmonster/englishcode-32000-consistent-v1": 5
}
|
||
5,000,000,000 / 1,000 is equal to
|
[
"50,000,000",
"5,000,000",
"1,000",
"500,000"
] | 1 |
B
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
216-1.0
| 216 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 23,
"Qwen/Qwen3-8B": 23,
"bigscience/bloom": 14,
"common-pile/comma-v0.1-1t": 17,
"facebook/xglm-564M": 7,
"google-bert/bert-base-multilingual-cased": 14,
"google/byt5-small": 33,
"google/gemma-2-2b": 23,
"gpt2": 14,
"meta-llama/Llama-3.2-1B": 15,
"microsoft/Phi-3-mini-4k-instruct": 24,
"mistralai/tekken": 23,
"tiktoken/gpt-4o": 15,
"tokenmonster/englishcode-32000-consistent-v1": 10
}
|
||
The value of 16^(1/2) is equal to
|
[
"4",
"3",
"5",
"6"
] | 0 |
A
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
217-1.0
| 217 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 14,
"Qwen/Qwen3-8B": 14,
"bigscience/bloom": 11,
"common-pile/comma-v0.1-1t": 16,
"facebook/xglm-564M": 11,
"google-bert/bert-base-multilingual-cased": 13,
"google/byt5-small": 33,
"google/gemma-2-2b": 14,
"gpt2": 13,
"meta-llama/Llama-3.2-1B": 13,
"microsoft/Phi-3-mini-4k-instruct": 14,
"mistralai/tekken": 14,
"tiktoken/gpt-4o": 13,
"tokenmonster/englishcode-32000-consistent-v1": 9
}
|
||
Calculate 3.1416 × 2
|
[
"7.2831",
"6.2832",
"628.33",
"62.832"
] | 1 |
B
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
218-1.0
| 218 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 11,
"Qwen/Qwen3-8B": 11,
"bigscience/bloom": 7,
"common-pile/comma-v0.1-1t": 9,
"facebook/xglm-564M": 6,
"google-bert/bert-base-multilingual-cased": 8,
"google/byt5-small": 21,
"google/gemma-2-2b": 11,
"gpt2": 9,
"meta-llama/Llama-3.2-1B": 9,
"microsoft/Phi-3-mini-4k-instruct": 12,
"mistralai/tekken": 11,
"tiktoken/gpt-4o": 9,
"tokenmonster/englishcode-32000-consistent-v1": 9
}
|
||
The unit of volume is
|
[
"m^2",
"m^3",
"m",
"kg"
] | 1 |
B
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
219-1.0
| 219 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 5,
"Qwen/Qwen3-8B": 5,
"bigscience/bloom": 5,
"common-pile/comma-v0.1-1t": 6,
"facebook/xglm-564M": 5,
"google-bert/bert-base-multilingual-cased": 5,
"google/byt5-small": 21,
"google/gemma-2-2b": 5,
"gpt2": 5,
"meta-llama/Llama-3.2-1B": 5,
"microsoft/Phi-3-mini-4k-instruct": 5,
"mistralai/tekken": 5,
"tiktoken/gpt-4o": 5,
"tokenmonster/englishcode-32000-consistent-v1": 5
}
|
||
The last digit of 1982354 is
|
[
"1",
"8",
"4",
"2"
] | 2 |
C
|
test
|
ASCII
|
Mathematical & Scientific Notation
|
eng_Latn
|
220-1.0
| 220 | 1 |
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 1,
"Qwen/Qwen3-8B": 1,
"bigscience/bloom": 1,
"common-pile/comma-v0.1-1t": 1,
"facebook/xglm-564M": 1,
"google-bert/bert-base-multilingual-cased": 1,
"google/byt5-small": 1,
"google/gemma-2-2b": 1,
"gpt2": 1,
"meta-llama/Llama-3.2-1B": 1,
"microsoft/Phi-3-mini-4k-instruct": 1,
"mistralai/tekken": 1,
"tiktoken/gpt-4o": 1,
"tokenmonster/englishcode-32000-consistent-v1": 1
}
|
{
"CohereLabs/aya-expanse-8b": 13,
"Qwen/Qwen3-8B": 13,
"bigscience/bloom": 8,
"common-pile/comma-v0.1-1t": 10,
"facebook/xglm-564M": 7,
"google-bert/bert-base-multilingual-cased": 9,
"google/byt5-small": 28,
"google/gemma-2-2b": 13,
"gpt2": 8,
"meta-llama/Llama-3.2-1B": 9,
"microsoft/Phi-3-mini-4k-instruct": 13,
"mistralai/tekken": 13,
"tiktoken/gpt-4o": 9,
"tokenmonster/englishcode-32000-consistent-v1": 8
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.