Datasets:

Modalities:
Text
Formats:
parquet
Languages:
English
ArXiv:
Libraries:
Datasets
pandas
License:
SaylorTwift HF Staff commited on
Commit
717f8f1
·
verified ·
1 Parent(s): cdb6b3c

Add 'anatomy' config data files

Browse files
README.md CHANGED
@@ -88,6 +88,8 @@ dataset_info:
88
  features:
89
  - name: question
90
  dtype: string
 
 
91
  - name: choices
92
  sequence: string
93
  - name: answer
@@ -100,19 +102,19 @@ dataset_info:
100
  '3': D
101
  splits:
102
  - name: auxiliary_train
103
- num_bytes: 160601377
104
  num_examples: 99842
105
  - name: test
106
- num_bytes: 33121
107
  num_examples: 135
108
  - name: validation
109
- num_bytes: 3140
110
  num_examples: 14
111
  - name: dev
112
- num_bytes: 967
113
  num_examples: 5
114
- download_size: 166184960
115
- dataset_size: 160638605
116
  - config_name: astronomy
117
  features:
118
  - name: question
@@ -1729,6 +1731,16 @@ configs:
1729
  path: all/validation-*
1730
  - split: dev
1731
  path: all/dev-*
 
 
 
 
 
 
 
 
 
 
1732
  ---
1733
 
1734
  # Dataset Card for MMLU
 
88
  features:
89
  - name: question
90
  dtype: string
91
+ - name: subject
92
+ dtype: string
93
  - name: choices
94
  sequence: string
95
  - name: answer
 
102
  '3': D
103
  splits:
104
  - name: auxiliary_train
105
+ num_bytes: 161000625
106
  num_examples: 99842
107
  - name: test
108
+ num_bytes: 34594
109
  num_examples: 135
110
  - name: validation
111
+ num_bytes: 3282
112
  num_examples: 14
113
  - name: dev
114
+ num_bytes: 1010
115
  num_examples: 5
116
+ download_size: 47191229
117
+ dataset_size: 161039511
118
  - config_name: astronomy
119
  features:
120
  - name: question
 
1731
  path: all/validation-*
1732
  - split: dev
1733
  path: all/dev-*
1734
+ - config_name: anatomy
1735
+ data_files:
1736
+ - split: auxiliary_train
1737
+ path: anatomy/auxiliary_train-*
1738
+ - split: test
1739
+ path: anatomy/test-*
1740
+ - split: validation
1741
+ path: anatomy/validation-*
1742
+ - split: dev
1743
+ path: anatomy/dev-*
1744
  ---
1745
 
1746
  # Dataset Card for MMLU
anatomy/auxiliary_train-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2782fc860f57d9345a9233ab04f494b0af5ae85b893a27853f7014b14a3bd07
3
+ size 47163955
anatomy/dev-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e42aef33f442b00d95ac7eb84b2f582d95857662fcad4fc7885ab67823f5b9e3
3
+ size 3014
anatomy/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2322eec0d77992b8ea97c6bef6a7c7696f1c83ed0125b8dab65eba889d9569c3
3
+ size 19548
anatomy/validation-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6458974e0a2fb2e4885be0c27dc04a7091b9084869def767784ecce1a32ef56
3
+ size 4712
dataset_infos.json CHANGED
@@ -77,39 +77,34 @@
77
  "features": {
78
  "question": {
79
  "dtype": "string",
80
- "id": null,
 
 
 
81
  "_type": "Value"
82
  },
83
  "choices": {
84
  "feature": {
85
  "dtype": "string",
86
- "id": null,
87
  "_type": "Value"
88
  },
89
- "length": -1,
90
- "id": null,
91
  "_type": "Sequence"
92
  },
93
  "answer": {
94
- "num_classes": 4,
95
  "names": [
96
  "A",
97
  "B",
98
  "C",
99
  "D"
100
  ],
101
- "id": null,
102
  "_type": "ClassLabel"
103
  }
104
  },
105
- "post_processed": null,
106
- "supervised_keys": null,
107
- "task_templates": null,
108
- "builder_name": "mmlu",
109
  "config_name": "anatomy",
110
  "version": {
111
  "version_str": "1.0.0",
112
- "description": null,
113
  "major": 1,
114
  "minor": 0,
115
  "patch": 0
@@ -117,39 +112,32 @@
117
  "splits": {
118
  "auxiliary_train": {
119
  "name": "auxiliary_train",
120
- "num_bytes": 160601257,
121
  "num_examples": 99842,
122
- "dataset_name": "mmlu"
123
  },
124
  "test": {
125
  "name": "test",
126
- "num_bytes": 33109,
127
  "num_examples": 135,
128
- "dataset_name": "mmlu"
129
  },
130
  "validation": {
131
  "name": "validation",
132
- "num_bytes": 3128,
133
  "num_examples": 14,
134
- "dataset_name": "mmlu"
135
  },
136
  "dev": {
137
  "name": "dev",
138
- "num_bytes": 955,
139
  "num_examples": 5,
140
- "dataset_name": "mmlu"
141
- }
142
- },
143
- "download_checksums": {
144
- "data.tar": {
145
- "num_bytes": 166184960,
146
- "checksum": "bec563ba4bac1d6aaf04141cd7d1605d7a5ca833e38f994051e818489592989b"
147
  }
148
  },
149
- "download_size": 166184960,
150
- "post_processing_size": null,
151
- "dataset_size": 160638449,
152
- "size_in_bytes": 326823409
153
  },
154
  "astronomy": {
155
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",
 
77
  "features": {
78
  "question": {
79
  "dtype": "string",
80
+ "_type": "Value"
81
+ },
82
+ "subject": {
83
+ "dtype": "string",
84
  "_type": "Value"
85
  },
86
  "choices": {
87
  "feature": {
88
  "dtype": "string",
 
89
  "_type": "Value"
90
  },
 
 
91
  "_type": "Sequence"
92
  },
93
  "answer": {
 
94
  "names": [
95
  "A",
96
  "B",
97
  "C",
98
  "D"
99
  ],
 
100
  "_type": "ClassLabel"
101
  }
102
  },
103
+ "builder_name": "parquet",
104
+ "dataset_name": "mmlu",
 
 
105
  "config_name": "anatomy",
106
  "version": {
107
  "version_str": "1.0.0",
 
108
  "major": 1,
109
  "minor": 0,
110
  "patch": 0
 
112
  "splits": {
113
  "auxiliary_train": {
114
  "name": "auxiliary_train",
115
+ "num_bytes": 161000625,
116
  "num_examples": 99842,
117
+ "dataset_name": null
118
  },
119
  "test": {
120
  "name": "test",
121
+ "num_bytes": 34594,
122
  "num_examples": 135,
123
+ "dataset_name": null
124
  },
125
  "validation": {
126
  "name": "validation",
127
+ "num_bytes": 3282,
128
  "num_examples": 14,
129
+ "dataset_name": null
130
  },
131
  "dev": {
132
  "name": "dev",
133
+ "num_bytes": 1010,
134
  "num_examples": 5,
135
+ "dataset_name": null
 
 
 
 
 
 
136
  }
137
  },
138
+ "download_size": 47191229,
139
+ "dataset_size": 161039511,
140
+ "size_in_bytes": 208230740
 
141
  },
142
  "astronomy": {
143
  "description": "This is a massive multitask test consisting of multiple-choice questions from various branches of knowledge, covering 57 tasks including elementary mathematics, US history, computer science, law, and more.\n",