gsaltintas commited on
Commit
ba33a3c
·
verified ·
1 Parent(s): be62875

Uploading tokenizer_robustness_completion_english_similar_words subset

Browse files
README.md CHANGED
@@ -2736,6 +2736,130 @@ dataset_info:
2736
  num_examples: 40
2737
  download_size: 42142
2738
  dataset_size: 27700
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2739
  configs:
2740
  - config_name: tokenizer_robustness_completion_english_abbreviations
2741
  data_files:
@@ -2825,6 +2949,10 @@ configs:
2825
  data_files:
2826
  - split: test
2827
  path: tokenizer_robustness_completion_english_scripted_text/test-*
 
 
 
 
2828
  ---
2829
 
2830
  # Dataset Card for Tokenization Robustness
 
2736
  num_examples: 40
2737
  download_size: 42142
2738
  dataset_size: 27700
2739
+ - config_name: tokenizer_robustness_completion_english_similar_words
2740
+ features:
2741
+ - name: question
2742
+ dtype: string
2743
+ - name: choices
2744
+ list: string
2745
+ - name: answer
2746
+ dtype: int64
2747
+ - name: answer_label
2748
+ dtype: string
2749
+ - name: split
2750
+ dtype: string
2751
+ - name: subcategories
2752
+ dtype: string
2753
+ - name: category
2754
+ dtype: string
2755
+ - name: lang
2756
+ dtype: string
2757
+ - name: second_lang
2758
+ dtype: string
2759
+ - name: notes
2760
+ dtype: string
2761
+ - name: id
2762
+ dtype: string
2763
+ - name: set_id
2764
+ dtype: string
2765
+ - name: variation_id
2766
+ dtype: string
2767
+ - name: vanilla_cos_sim_to_canonical
2768
+ struct:
2769
+ - name: CohereLabs/aya-expanse-8b
2770
+ dtype: float64
2771
+ - name: Qwen/Qwen3-8B
2772
+ dtype: float64
2773
+ - name: bigscience/bloom
2774
+ dtype: float64
2775
+ - name: common-pile/comma-v0.1-1t
2776
+ dtype: float64
2777
+ - name: facebook/xglm-564M
2778
+ dtype: float64
2779
+ - name: google-bert/bert-base-multilingual-cased
2780
+ dtype: float64
2781
+ - name: google/byt5-small
2782
+ dtype: float64
2783
+ - name: google/gemma-2-2b
2784
+ dtype: float64
2785
+ - name: gpt2
2786
+ dtype: float64
2787
+ - name: meta-llama/Llama-3.2-1B
2788
+ dtype: float64
2789
+ - name: microsoft/Phi-3-mini-4k-instruct
2790
+ dtype: float64
2791
+ - name: mistralai/tekken
2792
+ dtype: float64
2793
+ - name: tiktoken/gpt-4o
2794
+ dtype: float64
2795
+ - name: tokenmonster/englishcode-32000-consistent-v1
2796
+ dtype: float64
2797
+ - name: trimmed_cos_sim_to_canonical
2798
+ struct:
2799
+ - name: CohereLabs/aya-expanse-8b
2800
+ dtype: float64
2801
+ - name: Qwen/Qwen3-8B
2802
+ dtype: float64
2803
+ - name: bigscience/bloom
2804
+ dtype: float64
2805
+ - name: common-pile/comma-v0.1-1t
2806
+ dtype: float64
2807
+ - name: facebook/xglm-564M
2808
+ dtype: float64
2809
+ - name: google-bert/bert-base-multilingual-cased
2810
+ dtype: float64
2811
+ - name: google/byt5-small
2812
+ dtype: float64
2813
+ - name: google/gemma-2-2b
2814
+ dtype: float64
2815
+ - name: gpt2
2816
+ dtype: float64
2817
+ - name: meta-llama/Llama-3.2-1B
2818
+ dtype: float64
2819
+ - name: microsoft/Phi-3-mini-4k-instruct
2820
+ dtype: float64
2821
+ - name: mistralai/tekken
2822
+ dtype: float64
2823
+ - name: tiktoken/gpt-4o
2824
+ dtype: float64
2825
+ - name: tokenmonster/englishcode-32000-consistent-v1
2826
+ dtype: float64
2827
+ - name: token_counts
2828
+ struct:
2829
+ - name: CohereLabs/aya-expanse-8b
2830
+ dtype: int64
2831
+ - name: Qwen/Qwen3-8B
2832
+ dtype: int64
2833
+ - name: bigscience/bloom
2834
+ dtype: int64
2835
+ - name: common-pile/comma-v0.1-1t
2836
+ dtype: int64
2837
+ - name: facebook/xglm-564M
2838
+ dtype: int64
2839
+ - name: google-bert/bert-base-multilingual-cased
2840
+ dtype: int64
2841
+ - name: google/byt5-small
2842
+ dtype: int64
2843
+ - name: google/gemma-2-2b
2844
+ dtype: int64
2845
+ - name: gpt2
2846
+ dtype: int64
2847
+ - name: meta-llama/Llama-3.2-1B
2848
+ dtype: int64
2849
+ - name: microsoft/Phi-3-mini-4k-instruct
2850
+ dtype: int64
2851
+ - name: mistralai/tekken
2852
+ dtype: int64
2853
+ - name: tiktoken/gpt-4o
2854
+ dtype: int64
2855
+ - name: tokenmonster/englishcode-32000-consistent-v1
2856
+ dtype: int64
2857
+ splits:
2858
+ - name: test
2859
+ num_bytes: 42951
2860
+ num_examples: 74
2861
+ download_size: 47994
2862
+ dataset_size: 42951
2863
  configs:
2864
  - config_name: tokenizer_robustness_completion_english_abbreviations
2865
  data_files:
 
2949
  data_files:
2950
  - split: test
2951
  path: tokenizer_robustness_completion_english_scripted_text/test-*
2952
+ - config_name: tokenizer_robustness_completion_english_similar_words
2953
+ data_files:
2954
+ - split: test
2955
+ path: tokenizer_robustness_completion_english_similar_words/test-*
2956
  ---
2957
 
2958
  # Dataset Card for Tokenization Robustness
tokenizer_robustness_completion_english_similar_words/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b22fa21e29ff0f479158606828d622ae97a0f4b5c758c9ccc8a76ac0650fc37a
3
+ size 47994