Datasets:

ArXiv:
License:
orionweller commited on
Commit
2d39d13
·
verified ·
1 Parent(s): 5967eee

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  3. train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  4. train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0008-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  5. train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0008-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  6. train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0112-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  7. train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0112-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  8. train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0112-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  9. train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/003_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  10. train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/003_00000-batch_0116-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  11. train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/003_00000-batch_0116-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  12. train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/003_00000-batch_0116-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  13. train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/004_00000-batch_0030-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  14. train/fineweb2-sampled-ext-v2/cym_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  15. train/fineweb2-sampled-ext-v2/cym_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0011-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  16. train/fineweb2-sampled-ext-v2/cym_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0017-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  17. train/fineweb2-sampled-ext-v2/cym_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0034-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  18. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  19. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00536-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  20. train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00560-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  21. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  22. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  23. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  24. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00044-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  25. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00044-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  26. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00044-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  27. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00143-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  28. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00285-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  29. train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00285-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  30. train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0027-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  31. train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0057-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  32. train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0057-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  33. train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0123-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  34. train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0123-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  35. train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0123-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  36. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00017-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  37. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00072-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  38. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00093-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  39. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00115-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  40. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00115-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  41. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00115-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  42. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00175-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  43. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00175-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  44. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00175-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  45. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00216-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  46. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00216-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  47. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00232-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  48. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00244-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  49. train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00254-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  50. train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106457, "hashes": {}}, "samples": 7403, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 5416873, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67108004, "hashes": {}}, "samples": 7590, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5580469, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67106629, "hashes": {}}, "samples": 7873, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 5515097, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67106765, "hashes": {}}, "samples": 7754, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 5454729, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 67108539, "hashes": {}}, "samples": 7164, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 5429896, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00005.mds", "bytes": 67092607, "hashes": {}}, "samples": 7706, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00005.mds.zstd", "bytes": 5637279, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00006.mds", "bytes": 67107495, "hashes": {}}, "samples": 7389, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00006.mds.zstd", "bytes": 5677783, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00007.mds", "bytes": 67105682, "hashes": {}}, "samples": 7178, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00007.mds.zstd", "bytes": 5649519, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00008.mds", "bytes": 2102710, "hashes": {}}, "samples": 227, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00008.mds.zstd", "bytes": 224277, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 133771444, "total_tokens_skipped": 162, "percentiles": {"0th": 121, "10th": 381, "20th": 554, "30th": 775, "40th": 1033, "50th": 1382, "60th": 1855, "70th": 2511, "80th": 3587, "90th": 5583, "95th": 8190, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0008-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67089448, "hashes": {}}, "samples": 7573, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8248693, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107024, "hashes": {}}, "samples": 8002, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8598662, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67108470, "hashes": {}}, "samples": 7948, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 8628612, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67102541, "hashes": {}}, "samples": 8022, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 8927645, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 57909692, "hashes": {}}, "samples": 6240, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 7473724, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/amh_Ethi-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0008-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0112-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67101932, "hashes": {}}, "samples": 21166, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6410010, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107731, "hashes": {}}, "samples": 19166, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6488602, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67101746, "hashes": {}}, "samples": 19072, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6761803, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67106834, "hashes": {}}, "samples": 18876, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 7033215, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 13465966, "hashes": {}}, "samples": 3974, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 1397163, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0112-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 69158429, "total_tokens_skipped": 125, "percentiles": {"0th": 91, "10th": 200, "20th": 269, "30th": 341, "40th": 424, "50th": 522, "60th": 651, "70th": 843, "80th": 1131, "90th": 1734, "95th": 2532, "99th": 5664, "100th": 8191}}
train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0112-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/003_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:574bba5d3449b7ff5604b49578ff4d998460d3757f3acd8b6d908a8db19a818d
3
+ size 14514002
train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/003_00000-batch_0116-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106172, "hashes": {}}, "samples": 22079, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12430077, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67108072, "hashes": {}}, "samples": 20080, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13166529, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 4846, "hashes": {}}, "samples": 2, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 1297, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/003_00000-batch_0116-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4d7a5c0874e8f95725c2017369f775181aa55d10dba0432aaef79780ab8768d
3
+ size 4846
train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/003_00000-batch_0116-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 32882200, "total_tokens_skipped": 18, "percentiles": {"0th": 92, "10th": 191, "20th": 252, "30th": 319, "40th": 392, "50th": 484, "60th": 599, "70th": 756, "80th": 1013, "90th": 1536, "95th": 2273, "99th": 6041, "100th": 8191}}
train/fineweb2-sampled-ext-v2/bos_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/004_00000-batch_0030-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c019aaf42e4b80d13afa178c41ac45d7275e10ae7148020e7673e1ab3241459
3
+ size 35154087
train/fineweb2-sampled-ext-v2/cym_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f62651ae791ebdc9cff89e38b2b695bbff79bed637d7cc708fd600ae4951089
3
+ size 67104303
train/fineweb2-sampled-ext-v2/cym_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0011-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:950a6788219c25ea41be6d29f7407f044900b37d9b4e3510bc87124001bcd4eb
3
+ size 67103672
train/fineweb2-sampled-ext-v2/cym_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0017-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29036b7a037921ae7470da2b1f6a3b53fa51235b80b257db07f079d33ae8bf31
3
+ size 36219808
train/fineweb2-sampled-ext-v2/cym_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0034-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5611a175f6cc3c55137732cda20f6ad836baaf3059b189d906515da1222c7a1
3
+ size 67104510
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 67353779, "total_tokens_skipped": 8, "percentiles": {"0th": 60, "10th": 109, "20th": 143, "30th": 205, "40th": 280, "50th": 351, "60th": 461, "70th": 670, "80th": 987, "90th": 1731, "95th": 2889, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00536-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6c4e2dc5932a906bc4d8b59677b0b78c2e529778cac7df1dd03b773639b7d33
3
+ size 67105319
train/fineweb2-sampled-ext-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00560-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01e37d9dec4a570c2404533b0b0f0d812af9f8adf2bda6be888652f8157a9dba
3
+ size 67103678
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67102147, "hashes": {}}, "samples": 17161, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6037948, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67104358, "hashes": {}}, "samples": 21648, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6533605, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67105750, "hashes": {}}, "samples": 18558, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6889065, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67090292, "hashes": {}}, "samples": 16980, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 6405151, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 59829898, "hashes": {}}, "samples": 18699, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 5908086, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 80576425, "total_tokens_skipped": 285, "percentiles": {"0th": 59, "10th": 106, "20th": 165, "30th": 246, "40th": 316, "50th": 404, "60th": 551, "70th": 756, "80th": 1057, "90th": 1858, "95th": 3333, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00044-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108696, "hashes": {}}, "samples": 21281, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9032299, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67108468, "hashes": {}}, "samples": 19966, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9043184, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67106253, "hashes": {}}, "samples": 20579, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9140616, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 5619786, "hashes": {}}, "samples": 1268, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 871119, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00044-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 50731465, "total_tokens_skipped": 288, "percentiles": {"0th": 58, "10th": 101, "20th": 146, "30th": 225, "40th": 299, "50th": 383, "60th": 536, "70th": 733, "80th": 1012, "90th": 1696, "95th": 2820, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00044-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00143-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00285-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67102165, "hashes": {}}, "samples": 19047, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9370697, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67106109, "hashes": {}}, "samples": 20602, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9981490, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67099581, "hashes": {}}, "samples": 18551, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9859229, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 1032581, "hashes": {}}, "samples": 302, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 152374, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00285-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 49653631, "total_tokens_skipped": 0, "percentiles": {"0th": 63, "10th": 103, "20th": 157, "30th": 236, "40th": 309, "50th": 399, "60th": 558, "70th": 764, "80th": 1098, "90th": 1888, "95th": 3092, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0027-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:762c7ed2b89b52728033b5993f74e8dc6f9c9bde068252f48d88ccf74e8af6ad
3
+ size 5546460
train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0057-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67102749, "hashes": {}}, "samples": 10540, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 7175344, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 63847171, "hashes": {}}, "samples": 9839, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5262421, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0057-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0123-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107181, "hashes": {}}, "samples": 11580, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8410662, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 53694817, "hashes": {}}, "samples": 8664, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7288468, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0123-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 29876567, "total_tokens_skipped": 0, "percentiles": {"0th": 151, "10th": 498, "20th": 677, "30th": 824, "40th": 963, "50th": 1105, "60th": 1272, "70th": 1493, "80th": 1860, "90th": 2815, "95th": 4048, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/guj_Gujr_train-sampled/batch_0123-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00017-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f21f1a80f51a0da1c77b4353e5673d0391f2e91b3c65dd0d5e9608ae35d2bfc
3
+ size 67105490
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00072-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57bd2937e2213da5a818dfdd1fefeb878b1a60e4d9f94f0f5dd68e241a9d7156
3
+ size 5391488
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00093-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc69a217bdd2fe9f46d41535eacfafe5f6cc282c6bffee86edf8431952ef0157
3
+ size 5181248
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00115-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67086317, "hashes": {}}, "samples": 22917, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12094701, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67088409, "hashes": {}}, "samples": 20843, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13111109, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 12104055, "hashes": {}}, "samples": 5467, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 1759876, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00115-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 35786598, "total_tokens_skipped": 227, "percentiles": {"0th": 63, "10th": 102, "20th": 138, "30th": 198, "40th": 267, "50th": 331, "60th": 417, "70th": 618, "80th": 918, "90th": 1598, "95th": 2606, "99th": 8113, "100th": 8191}}
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00115-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00175-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67103661, "hashes": {}}, "samples": 22224, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9221796, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67094749, "hashes": {}}, "samples": 25655, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9031095, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 58933446, "hashes": {}}, "samples": 17977, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9430252, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00175-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 47235646, "total_tokens_skipped": 80, "percentiles": {"0th": 61, "10th": 103, "20th": 136, "30th": 194, "40th": 257, "50th": 317, "60th": 391, "70th": 557, "80th": 857, "90th": 1572, "95th": 2642, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00175-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00216-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b419c4260a7fb5a8f3e62a65ba9b6a28b5c13bf0c65c6f0a892cd9da305ee539
3
+ size 67099316
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00216-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9be5bb0ccecea7b31bca182d40289dd8c268a4380a7da3ce86150eb99b6eb8ad
3
+ size 64864532
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00232-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bf9983336aa75b083a595f241d349cb6467dcb31fb8ca66e33d5a76781d68ed
3
+ size 23274451
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00244-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77455a72db3e2f12acc766bb003ee47ae07a6635dd97fef343d3b071f973bf62
3
+ size 67107242
train/fineweb2-sampled-ext-v2/ita_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00254-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6fed62523bfdbfc5ef3e219f15fbaeed640108c3cba437feb8e61e1ef0e237a
3
+ size 48064343
train/fineweb2-sampled-ext-v2/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107910, "hashes": {}}, "samples": 20005, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6142666, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67105362, "hashes": {}}, "samples": 31641, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7163595, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67106728, "hashes": {}}, "samples": 26973, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7534624, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 28107388, "hashes": {}}, "samples": 12605, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 2904533, "hashes": {}}}], "version": 2}