gsaltintas commited on
Commit
5b936bd
·
verified ·
1 Parent(s): 6864cd6

Uploading tokenizer_robustness_completion_stem_space_removal subset

Browse files
README.md CHANGED
@@ -1496,6 +1496,130 @@ dataset_info:
1496
  num_examples: 18
1497
  download_size: 31156
1498
  dataset_size: 11267
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1499
  configs:
1500
  - config_name: tokenizer_robustness_completion_stem_canonical
1501
  data_files:
@@ -1545,6 +1669,10 @@ configs:
1545
  data_files:
1546
  - split: test
1547
  path: tokenizer_robustness_completion_stem_scripted_text/test-*
 
 
 
 
1548
  ---
1549
 
1550
  # Dataset Card for Tokenization Robustness
 
1496
  num_examples: 18
1497
  download_size: 31156
1498
  dataset_size: 11267
1499
+ - config_name: tokenizer_robustness_completion_stem_space_removal
1500
+ features:
1501
+ - name: question
1502
+ dtype: string
1503
+ - name: choices
1504
+ list: string
1505
+ - name: answer
1506
+ dtype: int64
1507
+ - name: answer_label
1508
+ dtype: string
1509
+ - name: split
1510
+ dtype: string
1511
+ - name: subcategories
1512
+ dtype: string
1513
+ - name: lang
1514
+ dtype: string
1515
+ - name: second_lang
1516
+ dtype: string
1517
+ - name: notes
1518
+ dtype: string
1519
+ - name: id
1520
+ dtype: string
1521
+ - name: set_id
1522
+ dtype: string
1523
+ - name: variation_id
1524
+ dtype: string
1525
+ - name: question_general_category
1526
+ dtype: string
1527
+ - name: vanilla_cos_sim_to_canonical
1528
+ struct:
1529
+ - name: CohereLabs/aya-expanse-8b
1530
+ dtype: float64
1531
+ - name: Qwen/Qwen3-8B
1532
+ dtype: float64
1533
+ - name: bigscience/bloom
1534
+ dtype: float64
1535
+ - name: common-pile/comma-v0.1-1t
1536
+ dtype: float64
1537
+ - name: facebook/xglm-564M
1538
+ dtype: float64
1539
+ - name: google-bert/bert-base-multilingual-cased
1540
+ dtype: float64
1541
+ - name: google/byt5-small
1542
+ dtype: float64
1543
+ - name: google/gemma-2-2b
1544
+ dtype: float64
1545
+ - name: gpt2
1546
+ dtype: float64
1547
+ - name: meta-llama/Llama-3.2-1B
1548
+ dtype: float64
1549
+ - name: microsoft/Phi-3-mini-4k-instruct
1550
+ dtype: float64
1551
+ - name: mistralai/tekken
1552
+ dtype: float64
1553
+ - name: tiktoken/gpt-4o
1554
+ dtype: float64
1555
+ - name: tokenmonster/englishcode-32000-consistent-v1
1556
+ dtype: float64
1557
+ - name: trimmed_cos_sim_to_canonical
1558
+ struct:
1559
+ - name: CohereLabs/aya-expanse-8b
1560
+ dtype: float64
1561
+ - name: Qwen/Qwen3-8B
1562
+ dtype: float64
1563
+ - name: bigscience/bloom
1564
+ dtype: float64
1565
+ - name: common-pile/comma-v0.1-1t
1566
+ dtype: float64
1567
+ - name: facebook/xglm-564M
1568
+ dtype: float64
1569
+ - name: google-bert/bert-base-multilingual-cased
1570
+ dtype: float64
1571
+ - name: google/byt5-small
1572
+ dtype: float64
1573
+ - name: google/gemma-2-2b
1574
+ dtype: float64
1575
+ - name: gpt2
1576
+ dtype: float64
1577
+ - name: meta-llama/Llama-3.2-1B
1578
+ dtype: float64
1579
+ - name: microsoft/Phi-3-mini-4k-instruct
1580
+ dtype: float64
1581
+ - name: mistralai/tekken
1582
+ dtype: float64
1583
+ - name: tiktoken/gpt-4o
1584
+ dtype: float64
1585
+ - name: tokenmonster/englishcode-32000-consistent-v1
1586
+ dtype: float64
1587
+ - name: token_counts
1588
+ struct:
1589
+ - name: CohereLabs/aya-expanse-8b
1590
+ dtype: int64
1591
+ - name: Qwen/Qwen3-8B
1592
+ dtype: int64
1593
+ - name: bigscience/bloom
1594
+ dtype: int64
1595
+ - name: common-pile/comma-v0.1-1t
1596
+ dtype: int64
1597
+ - name: facebook/xglm-564M
1598
+ dtype: int64
1599
+ - name: google-bert/bert-base-multilingual-cased
1600
+ dtype: int64
1601
+ - name: google/byt5-small
1602
+ dtype: int64
1603
+ - name: google/gemma-2-2b
1604
+ dtype: int64
1605
+ - name: gpt2
1606
+ dtype: int64
1607
+ - name: meta-llama/Llama-3.2-1B
1608
+ dtype: int64
1609
+ - name: microsoft/Phi-3-mini-4k-instruct
1610
+ dtype: int64
1611
+ - name: mistralai/tekken
1612
+ dtype: int64
1613
+ - name: tiktoken/gpt-4o
1614
+ dtype: int64
1615
+ - name: tokenmonster/englishcode-32000-consistent-v1
1616
+ dtype: int64
1617
+ splits:
1618
+ - name: test
1619
+ num_bytes: 22025
1620
+ num_examples: 41
1621
+ download_size: 40316
1622
+ dataset_size: 22025
1623
  configs:
1624
  - config_name: tokenizer_robustness_completion_stem_canonical
1625
  data_files:
 
1669
  data_files:
1670
  - split: test
1671
  path: tokenizer_robustness_completion_stem_scripted_text/test-*
1672
+ - config_name: tokenizer_robustness_completion_stem_space_removal
1673
+ data_files:
1674
+ - split: test
1675
+ path: tokenizer_robustness_completion_stem_space_removal/test-*
1676
  ---
1677
 
1678
  # Dataset Card for Tokenization Robustness
tokenizer_robustness_completion_stem_space_removal/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f8b4a00d09b5b810870ba814ae0ec3477e9dc6f05f3e910fedb70ebbdfb1b4f
3
+ size 40316