gsaltintas commited on
Commit
b981567
·
verified ·
1 Parent(s): e866eb4

Uploading tokenizer_robustness_completion_math_turkish subset

Browse files
README.md CHANGED
@@ -1000,6 +1000,130 @@ dataset_info:
1000
  num_examples: 21
1001
  download_size: 34634
1002
  dataset_size: 12129
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1003
  configs:
1004
  - config_name: tokenizer_robustness_completion_math_canonical
1005
  data_files:
@@ -1033,6 +1157,10 @@ configs:
1033
  data_files:
1034
  - split: test
1035
  path: tokenizer_robustness_completion_math_spelled_out/test-*
 
 
 
 
1036
  ---
1037
 
1038
  # Dataset Card for Tokenization Robustness Math
 
1000
  num_examples: 21
1001
  download_size: 34634
1002
  dataset_size: 12129
1003
+ - config_name: tokenizer_robustness_completion_math_turkish
1004
+ features:
1005
+ - name: question
1006
+ dtype: string
1007
+ - name: choices
1008
+ list: string
1009
+ - name: answer
1010
+ dtype: int64
1011
+ - name: answer_label
1012
+ dtype: string
1013
+ - name: split
1014
+ dtype: string
1015
+ - name: subcategories
1016
+ dtype: string
1017
+ - name: category
1018
+ dtype: string
1019
+ - name: lang
1020
+ dtype: string
1021
+ - name: second_lang
1022
+ dtype: string
1023
+ - name: notes
1024
+ dtype: string
1025
+ - name: id
1026
+ dtype: string
1027
+ - name: set_id
1028
+ dtype: string
1029
+ - name: variation_id
1030
+ dtype: string
1031
+ - name: vanilla_cos_sim_to_canonical
1032
+ struct:
1033
+ - name: CohereLabs/aya-expanse-8b
1034
+ dtype: float64
1035
+ - name: Qwen/Qwen3-8B
1036
+ dtype: float64
1037
+ - name: bigscience/bloom
1038
+ dtype: float64
1039
+ - name: common-pile/comma-v0.1-1t
1040
+ dtype: float64
1041
+ - name: facebook/xglm-564M
1042
+ dtype: float64
1043
+ - name: google-bert/bert-base-multilingual-cased
1044
+ dtype: float64
1045
+ - name: google/byt5-small
1046
+ dtype: float64
1047
+ - name: google/gemma-2-2b
1048
+ dtype: float64
1049
+ - name: gpt2
1050
+ dtype: float64
1051
+ - name: meta-llama/Llama-3.2-1B
1052
+ dtype: float64
1053
+ - name: microsoft/Phi-3-mini-4k-instruct
1054
+ dtype: float64
1055
+ - name: mistralai/tekken
1056
+ dtype: float64
1057
+ - name: tiktoken/gpt-4o
1058
+ dtype: float64
1059
+ - name: tokenmonster/englishcode-32000-consistent-v1
1060
+ dtype: float64
1061
+ - name: trimmed_cos_sim_to_canonical
1062
+ struct:
1063
+ - name: CohereLabs/aya-expanse-8b
1064
+ dtype: float64
1065
+ - name: Qwen/Qwen3-8B
1066
+ dtype: float64
1067
+ - name: bigscience/bloom
1068
+ dtype: float64
1069
+ - name: common-pile/comma-v0.1-1t
1070
+ dtype: float64
1071
+ - name: facebook/xglm-564M
1072
+ dtype: float64
1073
+ - name: google-bert/bert-base-multilingual-cased
1074
+ dtype: float64
1075
+ - name: google/byt5-small
1076
+ dtype: float64
1077
+ - name: google/gemma-2-2b
1078
+ dtype: float64
1079
+ - name: gpt2
1080
+ dtype: float64
1081
+ - name: meta-llama/Llama-3.2-1B
1082
+ dtype: float64
1083
+ - name: microsoft/Phi-3-mini-4k-instruct
1084
+ dtype: float64
1085
+ - name: mistralai/tekken
1086
+ dtype: float64
1087
+ - name: tiktoken/gpt-4o
1088
+ dtype: float64
1089
+ - name: tokenmonster/englishcode-32000-consistent-v1
1090
+ dtype: float64
1091
+ - name: token_counts
1092
+ struct:
1093
+ - name: CohereLabs/aya-expanse-8b
1094
+ dtype: int64
1095
+ - name: Qwen/Qwen3-8B
1096
+ dtype: int64
1097
+ - name: bigscience/bloom
1098
+ dtype: int64
1099
+ - name: common-pile/comma-v0.1-1t
1100
+ dtype: int64
1101
+ - name: facebook/xglm-564M
1102
+ dtype: int64
1103
+ - name: google-bert/bert-base-multilingual-cased
1104
+ dtype: int64
1105
+ - name: google/byt5-small
1106
+ dtype: int64
1107
+ - name: google/gemma-2-2b
1108
+ dtype: int64
1109
+ - name: gpt2
1110
+ dtype: int64
1111
+ - name: meta-llama/Llama-3.2-1B
1112
+ dtype: int64
1113
+ - name: microsoft/Phi-3-mini-4k-instruct
1114
+ dtype: int64
1115
+ - name: mistralai/tekken
1116
+ dtype: int64
1117
+ - name: tiktoken/gpt-4o
1118
+ dtype: int64
1119
+ - name: tokenmonster/englishcode-32000-consistent-v1
1120
+ dtype: int64
1121
+ splits:
1122
+ - name: test
1123
+ num_bytes: 11339
1124
+ num_examples: 21
1125
+ download_size: 34650
1126
+ dataset_size: 11339
1127
  configs:
1128
  - config_name: tokenizer_robustness_completion_math_canonical
1129
  data_files:
 
1157
  data_files:
1158
  - split: test
1159
  path: tokenizer_robustness_completion_math_spelled_out/test-*
1160
+ - config_name: tokenizer_robustness_completion_math_turkish
1161
+ data_files:
1162
+ - split: test
1163
+ path: tokenizer_robustness_completion_math_turkish/test-*
1164
  ---
1165
 
1166
  # Dataset Card for Tokenization Robustness Math
tokenizer_robustness_completion_math_turkish/test-00000-of-00001.parquet CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eaa9ed03bc10d0ea3d9bd548fee0839f7f4f7fcef45af2dee81bac1b460c7362
3
- size 34647
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:116a4ab11cfe7cb5bd5db2fbbea5a5ef199981724fbc91383740e9eea1e82ece
3
+ size 34650