Datasets:

ArXiv:
License:
orionweller commited on
Commit
ed2667f
·
verified ·
1 Parent(s): 4eab215

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00006.mds +3 -0
  3. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  4. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  5. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  6. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  7. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  8. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  9. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  10. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  11. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  12. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  13. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  14. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  15. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  16. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  17. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  18. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  19. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  20. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  21. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00008-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  22. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00008-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  23. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00008-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  24. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00008-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  25. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  26. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  27. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  28. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  29. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  30. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  31. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  32. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  33. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  34. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  35. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  36. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  37. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  38. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  39. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  40. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  41. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  42. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  43. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  44. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00023-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  45. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  46. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  47. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  48. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  49. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  50. train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67078637, "hashes": {}}, "samples": 10293, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6652558, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107867, "hashes": {}}, "samples": 14354, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5694267, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67098918, "hashes": {}}, "samples": 15721, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6034146, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67100354, "hashes": {}}, "samples": 14030, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 6512240, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 67084903, "hashes": {}}, "samples": 12821, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 6751897, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00005.mds", "bytes": 67088759, "hashes": {}}, "samples": 12570, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00005.mds.zstd", "bytes": 7902078, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00006.mds", "bytes": 10086806, "hashes": {}}, "samples": 1988, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00006.mds.zstd", "bytes": 1197834, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00006.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7a7833f3cc4a7a63d4e3fc10c14579a51b1f5cfae9071db0dfbd2e5558cc94d
3
+ size 10086806
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 101858215, "total_tokens_skipped": 185, "percentiles": {"0th": 72, "10th": 141, "20th": 202, "30th": 284, "40th": 361, "50th": 445, "60th": 669, "70th": 1083, "80th": 1738, "90th": 3275, "95th": 6022, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a538351138257f506423c28c360c065ac2b190310c36a58117ec2b93b6f8744c
3
+ size 67108040
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16091dad4862141a47757c2548eef77d1427a4b7d64c889b8a22d41a71336ddf
3
+ size 67105774
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d264264e58163e9c1e3a914da67cac45d4452f1559722cc68acc60e98f8134f4
3
+ size 67104437
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee2797d98f8aad33541a9e07f422df7ec16cdc0b556dba8db69bfc42d958a7cc
3
+ size 67107814
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67084707, "hashes": {}}, "samples": 9642, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 5903144, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67104652, "hashes": {}}, "samples": 16088, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5737637, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67107438, "hashes": {}}, "samples": 15786, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6054155, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67094069, "hashes": {}}, "samples": 11982, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 5874247, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 67105511, "hashes": {}}, "samples": 12424, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 6749041, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00005.mds", "bytes": 67101961, "hashes": {}}, "samples": 12366, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00005.mds.zstd", "bytes": 7946204, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00006.mds", "bytes": 32078665, "hashes": {}}, "samples": 6663, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00006.mds.zstd", "bytes": 4077299, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 107315318, "total_tokens_skipped": 145, "percentiles": {"0th": 79, "10th": 140, "20th": 201, "30th": 282, "40th": 357, "50th": 436, "60th": 645, "70th": 1081, "80th": 1733, "90th": 3244, "95th": 6747, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f0a6e2ff1d4327494421bd93af5f9c714393848a8c220c5b733cb2b4de7dd7f
3
+ size 58529006
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106942, "hashes": {}}, "samples": 15386, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8924257, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107974, "hashes": {}}, "samples": 16220, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8080576, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67103744, "hashes": {}}, "samples": 16007, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 8655064, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67084913, "hashes": {}}, "samples": 13924, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 8506998, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 19092467, "hashes": {}}, "samples": 4108, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 1626836, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 70827955, "total_tokens_skipped": 25, "percentiles": {"0th": 78, "10th": 139, "20th": 197, "30th": 274, "40th": 348, "50th": 421, "60th": 571, "70th": 924, "80th": 1453, "90th": 2628, "95th": 4593, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b326c2758b023d6e55eb3ff5f01fb892af0cd15ba460f4ea02151361e63be05
3
+ size 67108167
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1eda4191703bd57424c0210eaceeacce328d698114d5d1565b8515feaa64c502
3
+ size 67095596
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4cd55304235639b1f2cd57fee81639ef27c78862faff596cb9405a5226fe0525
3
+ size 67108267
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0f02bae0e21b05411fb1db2ac9907e4dbe5528d3f0edef8002b72288a28e3e7
3
+ size 67108855
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00007-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a97d663bff9e2ecb20d9a46195ed40d6e300d85066d7df825c968f30b9a42b09
3
+ size 27216175
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00008-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67102105, "hashes": {}}, "samples": 14399, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8788367, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107465, "hashes": {}}, "samples": 15571, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8536658, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67104668, "hashes": {}}, "samples": 16281, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7517130, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67082912, "hashes": {}}, "samples": 14197, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 9233584, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 21097831, "hashes": {}}, "samples": 5436, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 2252823, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00008-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:151f8f94e104506f4ecdba1c230f9976cc7eb37add46ef9a69e15b6a7c6a69a3
3
+ size 21097831
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00008-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 71323803, "total_tokens_skipped": 0, "percentiles": {"0th": 78, "10th": 139, "20th": 198, "30th": 279, "40th": 351, "50th": 428, "60th": 595, "70th": 942, "80th": 1527, "90th": 2662, "95th": 4494, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00008-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd5eb89405230d1b621e58ae3509c78734c79867e6b3a27ea0f2c96d6cd6e08a
3
+ size 67108286
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17a423a22fac300a108328abfa59b2045f29c515f8b4097cb7775da3474e06ec
3
+ size 67090176
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62376f4b934c25d81ea994c45a271d017d75cdcdad2811f5d829430bd34dd414
3
+ size 67107733
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2659a3ba7db46075d235a3a736c91cffeee6ea36d03f76577586d74a7cb75bd
3
+ size 67105206
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:367bee0d939fba6a6c5ea11f69221fe156cd3210cb8013046b67f92454de5d9a
3
+ size 67108539
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a51beac2a34bb61fe4ba08afede554ad7d4bb6b10366a25b53b9eae79d670133
3
+ size 67078803
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fdb04f9e9d77d50bc1d3c6ebfe4cd004deb4c155ec444835e714b972045190f
3
+ size 59905369
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108338, "hashes": {}}, "samples": 16235, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10856292, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67105547, "hashes": {}}, "samples": 16279, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10959194, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67080750, "hashes": {}}, "samples": 14126, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9872398, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 22422977, "hashes": {}}, "samples": 4475, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 4083843, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 55114880, "total_tokens_skipped": 26, "percentiles": {"0th": 80, "10th": 139, "20th": 197, "30th": 275, "40th": 350, "50th": 422, "60th": 575, "70th": 914, "80th": 1434, "90th": 2563, "95th": 4647, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c857a4b615393cc84ed14b3e52efe34d469789ae4e823e6d85dd142f06189d59
3
+ size 67108417
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6470ec7da0b3265c05559bf76258d252a1fcb282b2cd4e7843471c90d6cee7a
3
+ size 67107555
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108349, "hashes": {}}, "samples": 16029, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 23144145, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 29446201, "hashes": {}}, "samples": 7051, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10149981, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 23770897, "total_tokens_skipped": 9, "percentiles": {"0th": 80, "10th": 137, "20th": 192, "30th": 266, "40th": 343, "50th": 418, "60th": 561, "70th": 895, "80th": 1386, "90th": 2376, "95th": 4161, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107629, "hashes": {}}, "samples": 16239, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 24123360, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 24046191, "hashes": {}}, "samples": 5623, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8946240, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 22440121, "total_tokens_skipped": 4, "percentiles": {"0th": 81, "10th": 137, "20th": 191, "30th": 267, "40th": 348, "50th": 427, "60th": 585, "70th": 900, "80th": 1370, "90th": 2373, "95th": 4069, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83cd4aededcaaed6dd89eb5ed99cafcd40e7859ad0c9ae760d5227ee34fea290
3
+ size 67106094
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00023-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec44c699fa2bef1c65e8d4811014f9719c2af004b8790d7ccb613d2d9466dbf3
3
+ size 23326535
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d9652779c04209711cde00277cb924d47310c5ad320bfe44dd785e6fd62937a
3
+ size 67087849
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:faf61587e9a352df4895a7a083c3c1d25fbb636e727085af19dcfd1f53bc2732
3
+ size 67099126
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6736e2a6cbd9d9924b6e22c4beeb7133fe91be33c12b4bb12d7e32e2279a3fc
3
+ size 67095987
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60c5ec50b1938af22242caa6fee216c10d5b90a88302f9997953cc428665b3f5
3
+ size 67103025
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89df31e526075e0e841832154bcce81e1b58483076c1bbede77305fb71998c1b
3
+ size 67099473
train/fineweb2-sampled-ext-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:639ba625ecec456d869b8683e90c45765821be8291677f3ff723d1e3fa10d38c
3
+ size 67097398