gsaltintas commited on
Commit
9a05055
·
verified ·
1 Parent(s): cf79b72

Uploading tokenizer_robustness_completion_stem_enclosed_characters subset

Browse files
README.md CHANGED
@@ -752,6 +752,130 @@ dataset_info:
752
  num_examples: 18
753
  download_size: 31336
754
  dataset_size: 11237
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
755
  configs:
756
  - config_name: tokenizer_robustness_completion_stem_canonical
757
  data_files:
@@ -777,6 +901,10 @@ configs:
777
  data_files:
778
  - split: test
779
  path: tokenizer_robustness_completion_stem_double_struck/test-*
 
 
 
 
780
  ---
781
 
782
  # Dataset Card for Tokenization Robustness
 
752
  num_examples: 18
753
  download_size: 31336
754
  dataset_size: 11237
755
+ - config_name: tokenizer_robustness_completion_stem_enclosed_characters
756
+ features:
757
+ - name: question
758
+ dtype: string
759
+ - name: choices
760
+ list: string
761
+ - name: answer
762
+ dtype: int64
763
+ - name: answer_label
764
+ dtype: string
765
+ - name: split
766
+ dtype: string
767
+ - name: subcategories
768
+ dtype: string
769
+ - name: lang
770
+ dtype: string
771
+ - name: second_lang
772
+ dtype: string
773
+ - name: notes
774
+ dtype: string
775
+ - name: id
776
+ dtype: string
777
+ - name: set_id
778
+ dtype: string
779
+ - name: variation_id
780
+ dtype: string
781
+ - name: question_general_category
782
+ dtype: string
783
+ - name: vanilla_cos_sim_to_canonical
784
+ struct:
785
+ - name: CohereLabs/aya-expanse-8b
786
+ dtype: float64
787
+ - name: Qwen/Qwen3-8B
788
+ dtype: float64
789
+ - name: bigscience/bloom
790
+ dtype: float64
791
+ - name: common-pile/comma-v0.1-1t
792
+ dtype: float64
793
+ - name: facebook/xglm-564M
794
+ dtype: float64
795
+ - name: google-bert/bert-base-multilingual-cased
796
+ dtype: float64
797
+ - name: google/byt5-small
798
+ dtype: float64
799
+ - name: google/gemma-2-2b
800
+ dtype: float64
801
+ - name: gpt2
802
+ dtype: float64
803
+ - name: meta-llama/Llama-3.2-1B
804
+ dtype: float64
805
+ - name: microsoft/Phi-3-mini-4k-instruct
806
+ dtype: float64
807
+ - name: mistralai/tekken
808
+ dtype: float64
809
+ - name: tiktoken/gpt-4o
810
+ dtype: float64
811
+ - name: tokenmonster/englishcode-32000-consistent-v1
812
+ dtype: float64
813
+ - name: trimmed_cos_sim_to_canonical
814
+ struct:
815
+ - name: CohereLabs/aya-expanse-8b
816
+ dtype: float64
817
+ - name: Qwen/Qwen3-8B
818
+ dtype: float64
819
+ - name: bigscience/bloom
820
+ dtype: float64
821
+ - name: common-pile/comma-v0.1-1t
822
+ dtype: float64
823
+ - name: facebook/xglm-564M
824
+ dtype: float64
825
+ - name: google-bert/bert-base-multilingual-cased
826
+ dtype: float64
827
+ - name: google/byt5-small
828
+ dtype: float64
829
+ - name: google/gemma-2-2b
830
+ dtype: float64
831
+ - name: gpt2
832
+ dtype: float64
833
+ - name: meta-llama/Llama-3.2-1B
834
+ dtype: float64
835
+ - name: microsoft/Phi-3-mini-4k-instruct
836
+ dtype: float64
837
+ - name: mistralai/tekken
838
+ dtype: float64
839
+ - name: tiktoken/gpt-4o
840
+ dtype: float64
841
+ - name: tokenmonster/englishcode-32000-consistent-v1
842
+ dtype: float64
843
+ - name: token_counts
844
+ struct:
845
+ - name: CohereLabs/aya-expanse-8b
846
+ dtype: int64
847
+ - name: Qwen/Qwen3-8B
848
+ dtype: int64
849
+ - name: bigscience/bloom
850
+ dtype: int64
851
+ - name: common-pile/comma-v0.1-1t
852
+ dtype: int64
853
+ - name: facebook/xglm-564M
854
+ dtype: int64
855
+ - name: google-bert/bert-base-multilingual-cased
856
+ dtype: int64
857
+ - name: google/byt5-small
858
+ dtype: int64
859
+ - name: google/gemma-2-2b
860
+ dtype: int64
861
+ - name: gpt2
862
+ dtype: int64
863
+ - name: meta-llama/Llama-3.2-1B
864
+ dtype: int64
865
+ - name: microsoft/Phi-3-mini-4k-instruct
866
+ dtype: int64
867
+ - name: mistralai/tekken
868
+ dtype: int64
869
+ - name: tiktoken/gpt-4o
870
+ dtype: int64
871
+ - name: tokenmonster/englishcode-32000-consistent-v1
872
+ dtype: int64
873
+ splits:
874
+ - name: test
875
+ num_bytes: 25460
876
+ num_examples: 42
877
+ download_size: 32767
878
+ dataset_size: 25460
879
  configs:
880
  - config_name: tokenizer_robustness_completion_stem_canonical
881
  data_files:
 
901
  data_files:
902
  - split: test
903
  path: tokenizer_robustness_completion_stem_double_struck/test-*
904
+ - config_name: tokenizer_robustness_completion_stem_enclosed_characters
905
+ data_files:
906
+ - split: test
907
+ path: tokenizer_robustness_completion_stem_enclosed_characters/test-*
908
  ---
909
 
910
  # Dataset Card for Tokenization Robustness
tokenizer_robustness_completion_stem_enclosed_characters/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7ab912f75d4cc800c8c469a5c63d77511c403d76a6dcf20e2c65b5335b49e2e
3
+ size 32767