Datasets:
Uploading tokenizer_robustness_completion_english_web_search_query subset
Browse files
README.md
CHANGED
@@ -3356,6 +3356,130 @@ dataset_info:
|
|
3356 |
num_examples: 40
|
3357 |
download_size: 41912
|
3358 |
dataset_size: 25705
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3359 |
configs:
|
3360 |
- config_name: tokenizer_robustness_completion_english_abbreviations
|
3361 |
data_files:
|
@@ -3465,6 +3589,10 @@ configs:
|
|
3465 |
data_files:
|
3466 |
- split: test
|
3467 |
path: tokenizer_robustness_completion_english_superscript_subscript_styling/test-*
|
|
|
|
|
|
|
|
|
3468 |
---
|
3469 |
|
3470 |
# Dataset Card for Tokenization Robustness
|
|
|
3356 |
num_examples: 40
|
3357 |
download_size: 41912
|
3358 |
dataset_size: 25705
|
3359 |
+
- config_name: tokenizer_robustness_completion_english_web_search_query
|
3360 |
+
features:
|
3361 |
+
- name: question
|
3362 |
+
dtype: string
|
3363 |
+
- name: choices
|
3364 |
+
list: string
|
3365 |
+
- name: answer
|
3366 |
+
dtype: int64
|
3367 |
+
- name: answer_label
|
3368 |
+
dtype: string
|
3369 |
+
- name: split
|
3370 |
+
dtype: string
|
3371 |
+
- name: subcategories
|
3372 |
+
dtype: string
|
3373 |
+
- name: category
|
3374 |
+
dtype: string
|
3375 |
+
- name: lang
|
3376 |
+
dtype: string
|
3377 |
+
- name: second_lang
|
3378 |
+
dtype: string
|
3379 |
+
- name: notes
|
3380 |
+
dtype: string
|
3381 |
+
- name: id
|
3382 |
+
dtype: string
|
3383 |
+
- name: set_id
|
3384 |
+
dtype: string
|
3385 |
+
- name: variation_id
|
3386 |
+
dtype: string
|
3387 |
+
- name: vanilla_cos_sim_to_canonical
|
3388 |
+
struct:
|
3389 |
+
- name: CohereLabs/aya-expanse-8b
|
3390 |
+
dtype: float64
|
3391 |
+
- name: Qwen/Qwen3-8B
|
3392 |
+
dtype: float64
|
3393 |
+
- name: bigscience/bloom
|
3394 |
+
dtype: float64
|
3395 |
+
- name: common-pile/comma-v0.1-1t
|
3396 |
+
dtype: float64
|
3397 |
+
- name: facebook/xglm-564M
|
3398 |
+
dtype: float64
|
3399 |
+
- name: google-bert/bert-base-multilingual-cased
|
3400 |
+
dtype: float64
|
3401 |
+
- name: google/byt5-small
|
3402 |
+
dtype: float64
|
3403 |
+
- name: google/gemma-2-2b
|
3404 |
+
dtype: float64
|
3405 |
+
- name: gpt2
|
3406 |
+
dtype: float64
|
3407 |
+
- name: meta-llama/Llama-3.2-1B
|
3408 |
+
dtype: float64
|
3409 |
+
- name: microsoft/Phi-3-mini-4k-instruct
|
3410 |
+
dtype: float64
|
3411 |
+
- name: mistralai/tekken
|
3412 |
+
dtype: float64
|
3413 |
+
- name: tiktoken/gpt-4o
|
3414 |
+
dtype: float64
|
3415 |
+
- name: tokenmonster/englishcode-32000-consistent-v1
|
3416 |
+
dtype: float64
|
3417 |
+
- name: trimmed_cos_sim_to_canonical
|
3418 |
+
struct:
|
3419 |
+
- name: CohereLabs/aya-expanse-8b
|
3420 |
+
dtype: float64
|
3421 |
+
- name: Qwen/Qwen3-8B
|
3422 |
+
dtype: float64
|
3423 |
+
- name: bigscience/bloom
|
3424 |
+
dtype: float64
|
3425 |
+
- name: common-pile/comma-v0.1-1t
|
3426 |
+
dtype: float64
|
3427 |
+
- name: facebook/xglm-564M
|
3428 |
+
dtype: float64
|
3429 |
+
- name: google-bert/bert-base-multilingual-cased
|
3430 |
+
dtype: float64
|
3431 |
+
- name: google/byt5-small
|
3432 |
+
dtype: float64
|
3433 |
+
- name: google/gemma-2-2b
|
3434 |
+
dtype: float64
|
3435 |
+
- name: gpt2
|
3436 |
+
dtype: float64
|
3437 |
+
- name: meta-llama/Llama-3.2-1B
|
3438 |
+
dtype: float64
|
3439 |
+
- name: microsoft/Phi-3-mini-4k-instruct
|
3440 |
+
dtype: float64
|
3441 |
+
- name: mistralai/tekken
|
3442 |
+
dtype: float64
|
3443 |
+
- name: tiktoken/gpt-4o
|
3444 |
+
dtype: float64
|
3445 |
+
- name: tokenmonster/englishcode-32000-consistent-v1
|
3446 |
+
dtype: float64
|
3447 |
+
- name: token_counts
|
3448 |
+
struct:
|
3449 |
+
- name: CohereLabs/aya-expanse-8b
|
3450 |
+
dtype: int64
|
3451 |
+
- name: Qwen/Qwen3-8B
|
3452 |
+
dtype: int64
|
3453 |
+
- name: bigscience/bloom
|
3454 |
+
dtype: int64
|
3455 |
+
- name: common-pile/comma-v0.1-1t
|
3456 |
+
dtype: int64
|
3457 |
+
- name: facebook/xglm-564M
|
3458 |
+
dtype: int64
|
3459 |
+
- name: google-bert/bert-base-multilingual-cased
|
3460 |
+
dtype: int64
|
3461 |
+
- name: google/byt5-small
|
3462 |
+
dtype: int64
|
3463 |
+
- name: google/gemma-2-2b
|
3464 |
+
dtype: int64
|
3465 |
+
- name: gpt2
|
3466 |
+
dtype: int64
|
3467 |
+
- name: meta-llama/Llama-3.2-1B
|
3468 |
+
dtype: int64
|
3469 |
+
- name: microsoft/Phi-3-mini-4k-instruct
|
3470 |
+
dtype: int64
|
3471 |
+
- name: mistralai/tekken
|
3472 |
+
dtype: int64
|
3473 |
+
- name: tiktoken/gpt-4o
|
3474 |
+
dtype: int64
|
3475 |
+
- name: tokenmonster/englishcode-32000-consistent-v1
|
3476 |
+
dtype: int64
|
3477 |
+
splits:
|
3478 |
+
- name: test
|
3479 |
+
num_bytes: 18991
|
3480 |
+
num_examples: 37
|
3481 |
+
download_size: 38294
|
3482 |
+
dataset_size: 18991
|
3483 |
configs:
|
3484 |
- config_name: tokenizer_robustness_completion_english_abbreviations
|
3485 |
data_files:
|
|
|
3589 |
data_files:
|
3590 |
- split: test
|
3591 |
path: tokenizer_robustness_completion_english_superscript_subscript_styling/test-*
|
3592 |
+
- config_name: tokenizer_robustness_completion_english_web_search_query
|
3593 |
+
data_files:
|
3594 |
+
- split: test
|
3595 |
+
path: tokenizer_robustness_completion_english_web_search_query/test-*
|
3596 |
---
|
3597 |
|
3598 |
# Dataset Card for Tokenization Robustness
|
tokenizer_robustness_completion_english_web_search_query/test-00000-of-00001.parquet
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a90ccc6edbe620c262dc86aa28571fc6a1ca29179e6162138510db63d9ff48b9
|
3 |
+
size 38294
|