Datasets:

ArXiv:
License:
orionweller commited on
Commit
7f469c8
·
verified ·
1 Parent(s): 75e9de2

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/fineweb2-sampled-ext-v2/bel_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0031-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
  2. train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  3. train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  4. train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  5. train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  6. train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  7. train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups-upsample-3e768dcc/shard.00001.mds +3 -0
  8. train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups-upsample-3254b4b0/shard.00000.mds +3 -0
  9. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00040-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  10. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00376-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  11. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00407-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  12. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00433-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  13. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00667-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  14. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00667-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  15. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00667-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  16. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00687-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  17. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00687-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  18. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00687-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  19. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00814-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  20. train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00889-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  21. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  22. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  23. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  24. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  25. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00033-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  26. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  27. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00266-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  28. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00268-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  29. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00279-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  30. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00303-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  31. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00303-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  32. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00303-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  33. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00349-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  34. train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00097-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  35. train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00097-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  36. train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00380-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  37. train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00380-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  38. train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00386-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  39. train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00386-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  40. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0019-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  41. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0019-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  42. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0019-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  43. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  44. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0084-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  45. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0084-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  46. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0084-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  47. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0124-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  48. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0124-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  49. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0124-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  50. train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0156-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
train/fineweb2-sampled-ext-v2/bel_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0031-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:796005c98b0eb39bfdcb8b8f0d3cb4f6ed6d60b5653df299fede6d323b299ca6
3
+ size 67107519
train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c247cba83dc596138d590d66f1cf41c0162c4fde3955fbad259232fd4bb764e
3
+ size 67105572
train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45fe244e15901e9e7a6bff59fa37ab158d692d4ed75e2d8406611585fbe45546
3
+ size 67108707
train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0ba01e33758c472be600b201193c4e35a43334addfffae7fa1e4092a3fce2af
3
+ size 67105992
train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdc3c2ebf7d162379efcd6b77a590e4f553c44f5a7e3e24bd2d801d6dc335479
3
+ size 67107896
train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9d3fcd6768e305fe5f7dd32731c5557b9ed78f8ce441718438e785d041799d7
3
+ size 67098559
train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups-upsample-3e768dcc/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73b93e1347e13eada42f730ade1c810fd4e04afc833c68a0ccc87792db0c0def
3
+ size 46088553
train/fineweb2-sampled-ext-v2/ceb_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups-upsample-3254b4b0/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:caea853ab4955102b77c45973af4b51ca65a69a67ffa897b32f60b39289e62e7
3
+ size 67101245
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00040-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 72679234, "total_tokens_skipped": 125, "percentiles": {"0th": 65, "10th": 174, "20th": 228, "30th": 287, "40th": 355, "50th": 453, "60th": 632, "70th": 872, "80th": 1268, "90th": 2131, "95th": 3368, "99th": 7954, "100th": 8191}}
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00376-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc298c3992dcb4708a9e39f8f5145938c647eae19e4ef41716d34b82b4e65bb5
3
+ size 16221343
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00407-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00433-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a361e12018abfc238b523cc90f6afc8f3a65d5636b4070263e8db01091e94064
3
+ size 17227674
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00667-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108377, "hashes": {}}, "samples": 18943, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 19318991, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 49817676, "hashes": {}}, "samples": 13712, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14261401, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00667-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 28710430, "total_tokens_skipped": 26, "percentiles": {"0th": 62, "10th": 196, "20th": 265, "30th": 325, "40th": 387, "50th": 484, "60th": 640, "70th": 830, "80th": 1137, "90th": 1879, "95th": 2867, "99th": 7485, "100th": 8191}}
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00667-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00687-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108417, "hashes": {}}, "samples": 16204, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10917003, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67077853, "hashes": {}}, "samples": 15803, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 11476471, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67094424, "hashes": {}}, "samples": 15846, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 11842882, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 34105612, "hashes": {}}, "samples": 8382, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 5953421, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00687-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 57949229, "total_tokens_skipped": 10, "percentiles": {"0th": 60, "10th": 188, "20th": 265, "30th": 334, "40th": 416, "50th": 557, "60th": 735, "70th": 978, "80th": 1416, "90th": 2376, "95th": 3551, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00687-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00814-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f7ce5fce47dcfeca6bd4f3d536110b77cc42ac9cf33780e2388892ac7567f34
3
+ size 67077460
train/fineweb2-sampled-ext-v2/cmn_Hani-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00889-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4790ade132d8573d93b13100d84d121e6c43261286896cd858d44e0492c0691
3
+ size 67108607
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc27c1764ded840ec32f0068b7927af0f40ca6517b18afa71030c87c66a1039c
3
+ size 67106907
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34f445b14f8561f0ff429ad0859ec33b5e30a9200f71c0001fae6748c9390977
3
+ size 67106181
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d93b75bd6768b8793da510d0fde33d3b038ffeef41b6f34fa62be1580311e48f
3
+ size 67092973
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b932dd312f68c6d61eaa3830e55b1c40ad8d256acdd645e31968a1abb70547b
3
+ size 67106483
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00033-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81a65c8adc7ca9db3bf63a8521145f81fe80552ec1aff23165ffe459154ef16b
3
+ size 67107568
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5aa139718cbb1b18fb14d91b1cdfedb24b5c34f8e54090a7e4a3a72fbc27edf
3
+ size 67108698
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00266-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7977e6e340c77916d63d1877f160c33231066d8d64eca31b52bf939d5834066f
3
+ size 42515564
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00268-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b594beb08dc34a763e252e8c06a7e8609f1ebc3aa49302ec32e621a636edecd
3
+ size 47668504
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00279-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3947b8ce9416cf92484bb6dde7667608a9e4287b56c9af661a991322cf90c8e1
3
+ size 64804444
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00303-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67093134, "hashes": {}}, "samples": 18281, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 18806123, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 49729849, "hashes": {}}, "samples": 13325, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14721610, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00303-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 28702123, "total_tokens_skipped": 0, "percentiles": {"0th": 60, "10th": 115, "20th": 184, "30th": 279, "40th": 363, "50th": 499, "60th": 673, "70th": 894, "80th": 1254, "90th": 2037, "95th": 3074, "99th": 7979, "100th": 8191}}
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00303-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00349-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:180c334868cdce6f03f1555d31ef566f5ce4bcfd012003bb1cd21504c8db6f9d
3
+ size 13126715
train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00097-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108312, "hashes": {}}, "samples": 25406, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12709370, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 55490161, "hashes": {}}, "samples": 21885, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10954444, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00097-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 29898329, "total_tokens_skipped": 21, "percentiles": {"0th": 47, "10th": 110, "20th": 155, "30th": 189, "40th": 226, "50th": 274, "60th": 334, "70th": 442, "80th": 757, "90th": 1397, "95th": 2233, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00380-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108671, "hashes": {}}, "samples": 27203, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11386153, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67096581, "hashes": {}}, "samples": 26163, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 11541696, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 3634639, "hashes": {}}, "samples": 742, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 512830, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00380-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 33600153, "total_tokens_skipped": 0, "percentiles": {"0th": 45, "10th": 111, "20th": 157, "30th": 191, "40th": 232, "50th": 284, "60th": 350, "70th": 510, "80th": 822, "90th": 1454, "95th": 2193, "99th": 5389, "100th": 8191}}
train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00386-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 27892607, "total_tokens_skipped": 26, "percentiles": {"0th": 41, "10th": 109, "20th": 157, "30th": 192, "40th": 233, "50th": 283, "60th": 349, "70th": 510, "80th": 842, "90th": 1490, "95th": 2307, "99th": 6064, "100th": 8191}}
train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00386-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0019-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108154, "hashes": {}}, "samples": 9987, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 4755611, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67079888, "hashes": {}}, "samples": 9437, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5335352, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 9997537, "hashes": {}}, "samples": 1200, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 644955, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0019-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 35716419, "total_tokens_skipped": 80, "percentiles": {"0th": 161, "10th": 344, "20th": 456, "30th": 586, "40th": 752, "50th": 967, "60th": 1272, "70th": 1785, "80th": 2634, "90th": 4332, "95th": 6380, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0019-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0084-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67104109, "hashes": {}}, "samples": 10666, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 5291280, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 64874961, "hashes": {}}, "samples": 9880, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5708332, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0084-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 32666124, "total_tokens_skipped": 255, "percentiles": {"0th": 167, "10th": 333, "20th": 433, "30th": 548, "40th": 669, "50th": 845, "60th": 1133, "70th": 1562, "80th": 2356, "90th": 3947, "95th": 5884, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0084-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0124-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106890, "hashes": {}}, "samples": 11035, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 7511948, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 58445920, "hashes": {}}, "samples": 9330, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6073712, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0124-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 31062466, "total_tokens_skipped": 104, "percentiles": {"0th": 147, "10th": 327, "20th": 432, "30th": 541, "40th": 671, "50th": 839, "60th": 1094, "70th": 1539, "80th": 2309, "90th": 3719, "95th": 5228, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0124-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/khk_Cyrl_train-sampled/batch_0156-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106799, "hashes": {}}, "samples": 11512, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9452509, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 52587042, "hashes": {}}, "samples": 8836, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7260453, "hashes": {}}}], "version": 2}