gsaltintas commited on
Commit
7e2ec55
·
verified ·
1 Parent(s): f4bec04

Uploading tokenizer_robustness_completion_english_macron_diacritic subset

Browse files
README.md CHANGED
@@ -2240,6 +2240,130 @@ dataset_info:
2240
  num_examples: 38
2241
  download_size: 37855
2242
  dataset_size: 20307
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2243
  configs:
2244
  - config_name: tokenizer_robustness_completion_english_abbreviations
2245
  data_files:
@@ -2313,6 +2437,10 @@ configs:
2313
  data_files:
2314
  - split: test
2315
  path: tokenizer_robustness_completion_english_lowercase/test-*
 
 
 
 
2316
  ---
2317
 
2318
  # Dataset Card for Tokenization Robustness
 
2240
  num_examples: 38
2241
  download_size: 37855
2242
  dataset_size: 20307
2243
+ - config_name: tokenizer_robustness_completion_english_macron_diacritic
2244
+ features:
2245
+ - name: question
2246
+ dtype: string
2247
+ - name: choices
2248
+ list: string
2249
+ - name: answer
2250
+ dtype: int64
2251
+ - name: answer_label
2252
+ dtype: string
2253
+ - name: split
2254
+ dtype: string
2255
+ - name: subcategories
2256
+ dtype: string
2257
+ - name: category
2258
+ dtype: string
2259
+ - name: lang
2260
+ dtype: string
2261
+ - name: second_lang
2262
+ dtype: string
2263
+ - name: notes
2264
+ dtype: string
2265
+ - name: id
2266
+ dtype: string
2267
+ - name: set_id
2268
+ dtype: string
2269
+ - name: variation_id
2270
+ dtype: string
2271
+ - name: vanilla_cos_sim_to_canonical
2272
+ struct:
2273
+ - name: CohereLabs/aya-expanse-8b
2274
+ dtype: float64
2275
+ - name: Qwen/Qwen3-8B
2276
+ dtype: float64
2277
+ - name: bigscience/bloom
2278
+ dtype: float64
2279
+ - name: common-pile/comma-v0.1-1t
2280
+ dtype: float64
2281
+ - name: facebook/xglm-564M
2282
+ dtype: float64
2283
+ - name: google-bert/bert-base-multilingual-cased
2284
+ dtype: float64
2285
+ - name: google/byt5-small
2286
+ dtype: float64
2287
+ - name: google/gemma-2-2b
2288
+ dtype: float64
2289
+ - name: gpt2
2290
+ dtype: float64
2291
+ - name: meta-llama/Llama-3.2-1B
2292
+ dtype: float64
2293
+ - name: microsoft/Phi-3-mini-4k-instruct
2294
+ dtype: float64
2295
+ - name: mistralai/tekken
2296
+ dtype: float64
2297
+ - name: tiktoken/gpt-4o
2298
+ dtype: float64
2299
+ - name: tokenmonster/englishcode-32000-consistent-v1
2300
+ dtype: float64
2301
+ - name: trimmed_cos_sim_to_canonical
2302
+ struct:
2303
+ - name: CohereLabs/aya-expanse-8b
2304
+ dtype: float64
2305
+ - name: Qwen/Qwen3-8B
2306
+ dtype: float64
2307
+ - name: bigscience/bloom
2308
+ dtype: float64
2309
+ - name: common-pile/comma-v0.1-1t
2310
+ dtype: float64
2311
+ - name: facebook/xglm-564M
2312
+ dtype: float64
2313
+ - name: google-bert/bert-base-multilingual-cased
2314
+ dtype: float64
2315
+ - name: google/byt5-small
2316
+ dtype: float64
2317
+ - name: google/gemma-2-2b
2318
+ dtype: float64
2319
+ - name: gpt2
2320
+ dtype: float64
2321
+ - name: meta-llama/Llama-3.2-1B
2322
+ dtype: float64
2323
+ - name: microsoft/Phi-3-mini-4k-instruct
2324
+ dtype: float64
2325
+ - name: mistralai/tekken
2326
+ dtype: float64
2327
+ - name: tiktoken/gpt-4o
2328
+ dtype: float64
2329
+ - name: tokenmonster/englishcode-32000-consistent-v1
2330
+ dtype: float64
2331
+ - name: token_counts
2332
+ struct:
2333
+ - name: CohereLabs/aya-expanse-8b
2334
+ dtype: int64
2335
+ - name: Qwen/Qwen3-8B
2336
+ dtype: int64
2337
+ - name: bigscience/bloom
2338
+ dtype: int64
2339
+ - name: common-pile/comma-v0.1-1t
2340
+ dtype: int64
2341
+ - name: facebook/xglm-564M
2342
+ dtype: int64
2343
+ - name: google-bert/bert-base-multilingual-cased
2344
+ dtype: int64
2345
+ - name: google/byt5-small
2346
+ dtype: int64
2347
+ - name: google/gemma-2-2b
2348
+ dtype: int64
2349
+ - name: gpt2
2350
+ dtype: int64
2351
+ - name: meta-llama/Llama-3.2-1B
2352
+ dtype: int64
2353
+ - name: microsoft/Phi-3-mini-4k-instruct
2354
+ dtype: int64
2355
+ - name: mistralai/tekken
2356
+ dtype: int64
2357
+ - name: tiktoken/gpt-4o
2358
+ dtype: int64
2359
+ - name: tokenmonster/englishcode-32000-consistent-v1
2360
+ dtype: int64
2361
+ splits:
2362
+ - name: test
2363
+ num_bytes: 25871
2364
+ num_examples: 40
2365
+ download_size: 42278
2366
+ dataset_size: 25871
2367
  configs:
2368
  - config_name: tokenizer_robustness_completion_english_abbreviations
2369
  data_files:
 
2437
  data_files:
2438
  - split: test
2439
  path: tokenizer_robustness_completion_english_lowercase/test-*
2440
+ - config_name: tokenizer_robustness_completion_english_macron_diacritic
2441
+ data_files:
2442
+ - split: test
2443
+ path: tokenizer_robustness_completion_english_macron_diacritic/test-*
2444
  ---
2445
 
2446
  # Dataset Card for Tokenization Robustness
tokenizer_robustness_completion_english_macron_diacritic/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:104173bc9f71d5594b316bc9fec3cd1b7dc21aeca4c9398c84e2202822271c43
3
+ size 42278