Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.  
							See raw diff
- .gitattributes +0 -0
 - index.json +0 -0
 - train/dolmino-math/index.json +0 -0
 - train/dolmino-math/num_tokens.json +4 -0
 - train/dolmino-math/split_10-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_10-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_10-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_111-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_111-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds +3 -0
 - train/dolmino-math/split_111-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_111-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_140-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_140-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_140-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_142-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_142-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_142-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_165-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_165-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds +3 -0
 - train/dolmino-math/split_165-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_165-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_244-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_244-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_244-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_346-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_346-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_346-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_373-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_373-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_373-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_38-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_38-tokenized-chunked-8000-512-128-backfill-nodups/shard.00001.mds +3 -0
 - train/dolmino-math/split_38-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_38-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_397-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_397-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_397-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_427-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_427-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds +3 -0
 - train/dolmino-math/split_427-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_427-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_437-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_437-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_437-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_468-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_483-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 - train/dolmino-math/split_483-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds +3 -0
 - train/dolmino-math/split_483-tokenized-chunked-8000-512-128-backfill-nodups/stats.json +1 -0
 - train/dolmino-math/split_483-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json +0 -0
 - train/dolmino-math/split_545-tokenized-chunked-8000-512-128-backfill-nodups/index.json +1 -0
 
    	
        .gitattributes
    CHANGED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        index.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/index.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/num_tokens.json
    ADDED
    
    | 
         @@ -0,0 +1,4 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "num_tokens": 10408920460,
         
     | 
| 3 | 
         
            +
              "num_skipped_tokens": 106350
         
     | 
| 4 | 
         
            +
            }
         
     | 
    	
        train/dolmino-math/split_10-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 42167979, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 21708944, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_10-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 20532226, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 19, "20th": 19, "30th": 19, "40th": 452, "50th": 829, "60th": 933, "70th": 1036, "80th": 1162, "90th": 1364, "95th": 1558, "99th": 2063, "100th": 3661}}
         
     | 
    	
        train/dolmino-math/split_10-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_111-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26706920, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9569274, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_111-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:40edeabf6b1ab35fc22282dd90475b7cd48e4e5fcc241014eabac0f725cced25
         
     | 
| 3 | 
         
            +
            size 26706920
         
     | 
    	
        train/dolmino-math/split_111-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 12785058, "total_tokens_skipped": 0, "percentiles": {"0th": 127, "10th": 248, "20th": 292, "30th": 332, "40th": 367, "50th": 404, "60th": 441, "70th": 482, "80th": 534, "90th": 621, "95th": 721, "99th": 964, "100th": 1199}}
         
     | 
    	
        train/dolmino-math/split_111-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_140-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26664267, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9578314, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_140-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 12763731, "total_tokens_skipped": 0, "percentiles": {"0th": 100, "10th": 249, "20th": 291, "30th": 332, "40th": 368, "50th": 403, "60th": 439, "70th": 482, "80th": 532, "90th": 620, "95th": 722, "99th": 956, "100th": 1224}}
         
     | 
    	
        train/dolmino-math/split_140-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_142-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26652213, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9575519, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_142-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 12757670, "total_tokens_skipped": 0, "percentiles": {"0th": 99, "10th": 249, "20th": 293, "30th": 332, "40th": 369, "50th": 404, "60th": 441, "70th": 480, "80th": 532, "90th": 619, "95th": 711, "99th": 958, "100th": 1189}}
         
     | 
    	
        train/dolmino-math/split_142-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_165-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 25010021, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8830387, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_165-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:9c8aef5e3a987c76ecba6d2bb6009b84e70490a5094094b31b684b883b1f7ece
         
     | 
| 3 | 
         
            +
            size 25010021
         
     | 
    	
        train/dolmino-math/split_165-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 11937395, "total_tokens_skipped": 0, "percentiles": {"0th": 118, "10th": 230, "20th": 269, "30th": 304, "40th": 339, "50th": 375, "60th": 410, "70th": 451, "80th": 499, "90th": 581, "95th": 674, "99th": 966, "100th": 1218}}
         
     | 
    	
        train/dolmino-math/split_165-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_244-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 25204679, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8870882, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_244-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 12034732, "total_tokens_skipped": 0, "percentiles": {"0th": 106, "10th": 229, "20th": 268, "30th": 306, "40th": 343, "50th": 378, "60th": 414, "70th": 454, "80th": 507, "90th": 592, "95th": 687, "99th": 961, "100th": 1222}}
         
     | 
    	
        train/dolmino-math/split_244-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_346-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 25312999, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8903287, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_346-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 12088835, "total_tokens_skipped": 0, "percentiles": {"0th": 98, "10th": 229, "20th": 270, "30th": 308, "40th": 345, "50th": 381, "60th": 418, "70th": 458, "80th": 508, "90th": 591, "95th": 685, "99th": 956, "100th": 1326}}
         
     | 
    	
        train/dolmino-math/split_346-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_373-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24789827, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9103271, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_373-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 11826493, "total_tokens_skipped": 0, "percentiles": {"0th": 89, "10th": 251, "20th": 284, "30th": 312, "40th": 339, "50th": 364, "60th": 394, "70th": 430, "80th": 480, "90th": 567, "95th": 663, "99th": 933, "100th": 1660}}
         
     | 
    	
        train/dolmino-math/split_373-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_38-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108833, "hashes": {}}, "samples": 29769, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 35406679, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 524837, "hashes": {}}, "samples": 231, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 282630, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_38-tokenized-chunked-8000-512-128-backfill-nodups/shard.00001.mds
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:4cf03623dadd1d146573b7d257c1e454d0c1108b1d67a008d8658d10d7f16054
         
     | 
| 3 | 
         
            +
            size 524837
         
     | 
    	
        train/dolmino-math/split_38-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 33201621, "total_tokens_skipped": 0, "percentiles": {"0th": 361, "10th": 783, "20th": 861, "30th": 919, "40th": 978, "50th": 1040, "60th": 1108, "70th": 1198, "80th": 1318, "90th": 1509, "95th": 1721, "99th": 2217, "100th": 2988}}
         
     | 
    	
        train/dolmino-math/split_38-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_397-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 24048372, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8965874, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_397-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 11455669, "total_tokens_skipped": 0, "percentiles": {"0th": 86, "10th": 254, "20th": 283, "30th": 308, "40th": 332, "50th": 355, "60th": 381, "70th": 412, "80th": 456, "90th": 534, "95th": 623, "99th": 896, "100th": 1656}}
         
     | 
    	
        train/dolmino-math/split_397-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_427-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 23988942, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8945058, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_427-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:2ae6cba68b23f371f2d891595c8075cc60e045694c9fa73aa653dd72dd881b4b
         
     | 
| 3 | 
         
            +
            size 23988942
         
     | 
    	
        train/dolmino-math/split_427-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 11425961, "total_tokens_skipped": 0, "percentiles": {"0th": 92, "10th": 253, "20th": 283, "30th": 308, "40th": 332, "50th": 354, "60th": 380, "70th": 412, "80th": 457, "90th": 534, "95th": 619, "99th": 880, "100th": 1633}}
         
     | 
    	
        train/dolmino-math/split_427-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_437-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 23979374, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8931614, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_437-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 11421188, "total_tokens_skipped": 0, "percentiles": {"0th": 78, "10th": 253, "20th": 283, "30th": 308, "40th": 332, "50th": 354, "60th": 379, "70th": 410, "80th": 454, "90th": 534, "95th": 619, "99th": 894, "100th": 1653}}
         
     | 
    	
        train/dolmino-math/split_437-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_468-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 22895940, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8257834, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_483-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 22840688, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8225527, "hashes": {}}}], "version": 2}
         
     | 
    	
        train/dolmino-math/split_483-tokenized-chunked-8000-512-128-backfill-nodups/shard.00000.mds
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:f51b30e117d7b38ea53c0b198e53a0738ff8a4c03d7b17172ffbdee7fa24c9db
         
     | 
| 3 | 
         
            +
            size 22840688
         
     | 
    	
        train/dolmino-math/split_483-tokenized-chunked-8000-512-128-backfill-nodups/stats.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"total_duplicated_tokens": 0, "total_tokens_written": 10852126, "total_tokens_skipped": 0, "percentiles": {"0th": 144, "10th": 248, "20th": 274, "30th": 295, "40th": 316, "50th": 337, "60th": 359, "70th": 386, "80th": 425, "90th": 498, "95th": 580, "99th": 844, "100th": 1649}}
         
     | 
    	
        train/dolmino-math/split_483-tokenized-chunked-8000-512-128-backfill-nodups/token_decile.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        train/dolmino-math/split_545-tokenized-chunked-8000-512-128-backfill-nodups/index.json
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 22950737, "hashes": {}}, "samples": 30000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8267905, "hashes": {}}}], "version": 2}
         
     |