params
stringclasses 14
values | data
stringclasses 25
values | task
stringclasses 66
values | step
int64 0
69.4k
| seed
stringclasses 5
values | chinchilla
stringclasses 1
value | tokens
int64 0
100B
| compute
float64 0
706,209,840,435B
| metrics
stringlengths 769
1.41k
|
---|---|---|---|---|---|---|---|---|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 1,250 |
small aux 3
|
5xC
| 409,600,000 | 240,713,662,464,000,000 |
{'predicted_index_raw': 1.8670520231213872, 'predicted_index_per_token': 2.0115606936416186, 'predicted_index_per_char': 1.8959537572254335, 'predicted_index_per_byte': 1.8959537572254335, 'predicted_index_uncond': 1.5722543352601157, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.20520231213872833, 'acc_per_token': 0.23121387283236994, 'acc_per_char': 0.21098265895953758, 'acc_per_byte': 0.21098265895953758, 'acc_uncond': 0.22832369942196531, 'no_answer': 0.0, 'sum_logits_corr': -41.12196636199951, 'logits_per_token_corr': -4.719279460624197, 'logits_per_char_corr': -0.9290391098872663, 'bits_per_byte_corr': 1.3403201166272187, 'correct_prob': 5.787857544419e-06, 'correct_prob_per_token': 0.020158527546356005, 'correct_prob_per_char': 0.4109997556095731, 'margin': -1.7619249876106177e-05, 'margin_per_token': -0.019238677392593228, 'margin_per_char': -0.08216945883793374, 'total_prob': 3.127459264053807e-05, 'total_prob_per_token': 0.08441704754383081, 'total_prob_per_char': 1.6851959416632605, 'uncond_correct_prob': 4.441751894429173e-07, 'uncond_correct_prob_per_token': 0.008498393003272875, 'uncond_correct_prob_per_char': 0.3507468070001957, 'uncond_total_prob': 3.3657225135587415e-06, 'norm_correct_prob': 0.2096356424970254, 'norm_correct_prob_per_token': 0.24435879971412103, 'norm_correct_prob_per_char': 0.24338173701513516, 'primary_metric': 0.20520231213872833}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 1,250 |
default
|
5xC
| 409,600,000 | 240,713,662,464,000,000 |
{'predicted_index_raw': 1.9104046242774566, 'predicted_index_per_token': 2.0, 'predicted_index_per_char': 1.9104046242774566, 'predicted_index_per_byte': 1.3034682080924855, 'predicted_index_uncond': 1.3988439306358382, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.22254335260115607, 'acc_per_char': 0.1936416184971098, 'acc_per_byte': 0.2861271676300578, 'acc_uncond': 0.23410404624277456, 'no_answer': 0.0, 'sum_logits_corr': -41.45398229808477, 'logits_per_token_corr': -4.815897791624004, 'logits_per_char_corr': -0.9428584788474769, 'logits_per_byte_corr': 1.3602572516943436, 'correct_prob': 3.3495261637162426e-06, 'correct_prob_per_token': 0.019513797908970332, 'correct_prob_per_char': 0.4062263840105976, 'margin': -2.4164799649249653e-05, 'margin_per_token': -0.01855273730487238, 'margin_per_char': -0.08242575929981354, 'total_prob': 3.164428420915687e-05, 'total_prob_per_token': 0.08267984287897351, 'total_prob_per_char': 1.6679109492726913, 'uncond_correct_prob': 9.596666395268003e-08, 'uncond_correct_prob_per_token': 0.008617074161008278, 'uncond_correct_prob_per_char': 0.3454751780620057, 'uncond_total_prob': 9.139887453043873e-07, 'norm_correct_prob': 0.21788186059417988, 'norm_correct_prob_per_token': 0.24381201391755397, 'norm_correct_prob_per_char': 0.2430494858380258, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 2,500 |
small aux 2
|
5xC
| 819,200,000 | 481,427,324,928,000,000 |
{'predicted_index_raw': 1.8410404624277457, 'predicted_index_per_token': 1.9421965317919074, 'predicted_index_per_char': 1.777456647398844, 'predicted_index_per_byte': 1.777456647398844, 'predicted_index_uncond': 1.3381502890173411, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.22254335260115607, 'acc_per_token': 0.23410404624277456, 'acc_per_char': 0.21098265895953758, 'acc_per_byte': 0.21098265895953758, 'acc_uncond': 0.24566473988439305, 'no_answer': 0.0, 'sum_logits_corr': -38.90182776809428, 'logits_per_token_corr': -4.517818636885099, 'logits_per_char_corr': -0.8882786469791637, 'bits_per_byte_corr': 1.2815151989253182, 'correct_prob': 6.016854470485879e-06, 'correct_prob_per_token': 0.023209073835652107, 'correct_prob_per_char': 0.4276806142086126, 'margin': -3.325783148078003e-05, 'margin_per_token': -0.019341799814037556, 'margin_per_char': -0.08091591164938397, 'total_prob': 4.8052132447316196e-05, 'total_prob_per_token': 0.09714202497616899, 'total_prob_per_char': 1.7510291592368017, 'uncond_correct_prob': 3.088181066807794e-07, 'uncond_correct_prob_per_token': 0.011369026482452473, 'uncond_correct_prob_per_char': 0.368190046367718, 'uncond_total_prob': 7.1338612478851165e-06, 'norm_correct_prob': 0.2163660143274092, 'norm_correct_prob_per_token': 0.24616091083116887, 'norm_correct_prob_per_char': 0.24350867753820388, 'primary_metric': 0.22254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 2,500 |
small aux 3
|
5xC
| 819,200,000 | 481,427,324,928,000,000 |
{'predicted_index_raw': 1.8410404624277457, 'predicted_index_per_token': 1.9277456647398843, 'predicted_index_per_char': 1.8179190751445087, 'predicted_index_per_byte': 1.8179190751445087, 'predicted_index_uncond': 1.4624277456647399, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21676300578034682, 'acc_per_token': 0.2254335260115607, 'acc_per_char': 0.20809248554913296, 'acc_per_byte': 0.20809248554913296, 'acc_uncond': 0.2023121387283237, 'no_answer': 0.0, 'sum_logits_corr': -39.44564745191894, 'logits_per_token_corr': -4.594622537374362, 'logits_per_char_corr': -0.900728915455475, 'bits_per_byte_corr': 1.2994771395138416, 'correct_prob': 3.1456447690773816e-06, 'correct_prob_per_token': 0.021653755220527298, 'correct_prob_per_char': 0.4214639972050224, 'margin': -1.702651933930437e-05, 'margin_per_token': -0.01873733372590331, 'margin_per_char': -0.07939056462399718, 'total_prob': 2.518748893199623e-05, 'total_prob_per_token': 0.09080121067424726, 'total_prob_per_char': 1.7202300304101257, 'uncond_correct_prob': 3.6350075895581764e-07, 'uncond_correct_prob_per_token': 0.011472936794947838, 'uncond_correct_prob_per_char': 0.37129025543917304, 'uncond_total_prob': 2.557471493759085e-06, 'norm_correct_prob': 0.22382345892634695, 'norm_correct_prob_per_token': 0.24783676083524406, 'norm_correct_prob_per_char': 0.2447835806785569, 'primary_metric': 0.21676300578034682}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 2,500 |
default
|
5xC
| 819,200,000 | 481,427,324,928,000,000 |
{'predicted_index_raw': 1.8526011560693643, 'predicted_index_per_token': 1.9277456647398843, 'predicted_index_per_char': 1.76878612716763, 'predicted_index_per_byte': 1.3323699421965318, 'predicted_index_uncond': 1.4277456647398843, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2023121387283237, 'acc_per_token': 0.21098265895953758, 'acc_per_char': 0.1994219653179191, 'acc_per_byte': 0.2832369942196532, 'acc_uncond': 0.24277456647398843, 'no_answer': 0.0, 'sum_logits_corr': -38.90573327389756, 'logits_per_token_corr': -4.471297361974404, 'logits_per_char_corr': -0.8806058787489334, 'logits_per_byte_corr': 1.2704457342496667, 'correct_prob': 8.535180532173453e-06, 'correct_prob_per_token': 0.023823668081014382, 'correct_prob_per_char': 0.4307477048200885, 'margin': -5.163201679458056e-05, 'margin_per_token': -0.019781694352368425, 'margin_per_char': -0.07990040090740892, 'total_prob': 7.31965681536923e-05, 'total_prob_per_token': 0.09956681036904148, 'total_prob_per_char': 1.7605815406797651, 'uncond_correct_prob': 1.4136044421817767e-07, 'uncond_correct_prob_per_token': 0.010074023510789986, 'uncond_correct_prob_per_char': 0.35973300332140595, 'uncond_total_prob': 2.320731038202708e-06, 'norm_correct_prob': 0.2056128433208585, 'norm_correct_prob_per_token': 0.2476830927952537, 'norm_correct_prob_per_char': 0.244180063366919, 'primary_metric': 0.2023121387283237}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 3,750 |
small aux 2
|
5xC
| 1,228,800,000 | 722,140,987,392,000,000 |
{'predicted_index_raw': 1.8526011560693643, 'predicted_index_per_token': 1.8150289017341041, 'predicted_index_per_char': 1.69364161849711, 'predicted_index_per_byte': 1.69364161849711, 'predicted_index_uncond': 1.476878612716763, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21965317919075145, 'acc_per_token': 0.2254335260115607, 'acc_per_char': 0.2254335260115607, 'acc_per_byte': 0.2254335260115607, 'acc_uncond': 0.26878612716763006, 'no_answer': 0.0, 'sum_logits_corr': -39.52123283926462, 'logits_per_token_corr': -4.701034333604718, 'logits_per_char_corr': -0.9259556343709383, 'bits_per_byte_corr': 1.335871601791107, 'correct_prob': 2.991899288704034e-06, 'correct_prob_per_token': 0.021208255570482497, 'correct_prob_per_char': 0.4158137984857175, 'margin': -1.829190153081122e-05, 'margin_per_token': -0.01532458486829366, 'margin_per_char': -0.07772185410836865, 'total_prob': 2.6449194542733245e-05, 'total_prob_per_token': 0.08621557660265322, 'total_prob_per_char': 1.6985081440573975, 'uncond_correct_prob': 8.074780845865832e-08, 'uncond_correct_prob_per_token': 0.010525980694138437, 'uncond_correct_prob_per_char': 0.35840932068634057, 'uncond_total_prob': 1.008599998226806e-06, 'norm_correct_prob': 0.2126564618341935, 'norm_correct_prob_per_token': 0.24563004810950675, 'norm_correct_prob_per_char': 0.2438471179782233, 'primary_metric': 0.21965317919075145}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 3,750 |
small aux 3
|
5xC
| 1,228,800,000 | 722,140,987,392,000,000 |
{'predicted_index_raw': 1.861271676300578, 'predicted_index_per_token': 1.9682080924855492, 'predicted_index_per_char': 1.8034682080924855, 'predicted_index_per_byte': 1.8034682080924855, 'predicted_index_uncond': 1.4132947976878614, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.22254335260115607, 'acc_per_token': 0.22254335260115607, 'acc_per_char': 0.1936416184971098, 'acc_per_byte': 0.1936416184971098, 'acc_uncond': 0.2138728323699422, 'no_answer': 0.0, 'sum_logits_corr': -38.59750338648096, 'logits_per_token_corr': -4.426647862877681, 'logits_per_char_corr': -0.8711114445745951, 'bits_per_byte_corr': 1.2567481611502926, 'correct_prob': 6.67633296476283e-06, 'correct_prob_per_token': 0.023767765736386177, 'correct_prob_per_char': 0.43320488397524637, 'margin': -1.4980133907733293e-05, 'margin_per_token': -0.02048737702351883, 'margin_per_char': -0.07851976892142842, 'total_prob': 3.1895314661761e-05, 'total_prob_per_token': 0.09993579512942832, 'total_prob_per_char': 1.7668905483623962, 'uncond_correct_prob': 2.0100185032864751e-07, 'uncond_correct_prob_per_token': 0.010929289674462961, 'uncond_correct_prob_per_char': 0.3639087578556889, 'uncond_total_prob': 1.8321331325513265e-06, 'norm_correct_prob': 0.22377019220438596, 'norm_correct_prob_per_token': 0.24793884243029096, 'norm_correct_prob_per_char': 0.24478528680132977, 'primary_metric': 0.22254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 3,750 |
default
|
5xC
| 1,228,800,000 | 722,140,987,392,000,000 |
{'predicted_index_raw': 1.8583815028901733, 'predicted_index_per_token': 1.9653179190751444, 'predicted_index_per_char': 1.8583815028901733, 'predicted_index_per_byte': 1.3208092485549132, 'predicted_index_uncond': 1.4017341040462428, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2138728323699422, 'acc_per_token': 0.23410404624277456, 'acc_per_char': 0.2023121387283237, 'acc_per_byte': 0.27167630057803466, 'acc_uncond': 0.24277456647398843, 'no_answer': 0.0, 'sum_logits_corr': -38.42360086661543, 'logits_per_token_corr': -4.449077489938204, 'logits_per_char_corr': -0.8719595246153435, 'logits_per_byte_corr': 1.2579716820193578, 'correct_prob': 1.298247767196169e-05, 'correct_prob_per_token': 0.026199932021411375, 'correct_prob_per_char': 0.4347041390806672, 'margin': -2.962398639554616e-05, 'margin_per_token': -0.021682846831405363, 'margin_per_char': -0.0798312572654831, 'total_prob': 6.036380181032522e-05, 'total_prob_per_token': 0.10717758223100715, 'total_prob_per_char': 1.7708856313891597, 'uncond_correct_prob': 1.2287549665058326e-07, 'uncond_correct_prob_per_token': 0.011182619905337993, 'uncond_correct_prob_per_char': 0.3613117156292967, 'uncond_total_prob': 1.1827535294458527e-06, 'norm_correct_prob': 0.22188613066354776, 'norm_correct_prob_per_token': 0.25051716772301663, 'norm_correct_prob_per_char': 0.24525545917652689, 'primary_metric': 0.2138728323699422}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 6,250 |
small aux 2
|
5xC
| 2,048,000,000 | 1,203,568,312,320,000,000 |
{'predicted_index_raw': 1.8179190751445087, 'predicted_index_per_token': 1.8872832369942196, 'predicted_index_per_char': 1.783236994219653, 'predicted_index_per_byte': 1.783236994219653, 'predicted_index_uncond': 1.416184971098266, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23121387283236994, 'acc_per_token': 0.2543352601156069, 'acc_per_char': 0.1907514450867052, 'acc_per_byte': 0.1907514450867052, 'acc_uncond': 0.23121387283236994, 'no_answer': 0.0, 'sum_logits_corr': -36.81398921205818, 'logits_per_token_corr': -4.2120001537278835, 'logits_per_char_corr': -0.8279927555458656, 'bits_per_byte_corr': 1.194541042318866, 'correct_prob': 1.938380594346885e-05, 'correct_prob_per_token': 0.030085588313311287, 'correct_prob_per_char': 0.45283981464013884, 'margin': -0.00014751642178586336, 'margin_per_token': -0.023115447098311412, 'margin_per_char': -0.08077168076874816, 'total_prob': 0.0002012514328271687, 'total_prob_per_token': 0.12415821981506381, 'total_prob_per_char': 1.847002790447387, 'uncond_correct_prob': 8.398859621006168e-08, 'uncond_correct_prob_per_token': 0.010961960214126421, 'uncond_correct_prob_per_char': 0.3607904138192825, 'uncond_total_prob': 7.833291998663776e-07, 'norm_correct_prob': 0.2247076948543362, 'norm_correct_prob_per_token': 0.24762749931402955, 'norm_correct_prob_per_char': 0.24450496464623075, 'primary_metric': 0.23121387283236994}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 6,250 |
small aux 3
|
5xC
| 2,048,000,000 | 1,203,568,312,320,000,000 |
{'predicted_index_raw': 1.8092485549132948, 'predicted_index_per_token': 1.7976878612716762, 'predicted_index_per_char': 1.6589595375722543, 'predicted_index_per_byte': 1.6589595375722543, 'predicted_index_uncond': 1.329479768786127, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.22254335260115607, 'acc_per_token': 0.21965317919075145, 'acc_per_char': 0.18497109826589594, 'acc_per_byte': 0.18497109826589594, 'acc_uncond': 0.21676300578034682, 'no_answer': 0.0, 'sum_logits_corr': -37.69007586330348, 'logits_per_token_corr': -4.343453271665478, 'logits_per_char_corr': -0.8571379205564468, 'bits_per_byte_corr': 1.2365886273455524, 'correct_prob': 5.787210339586561e-06, 'correct_prob_per_token': 0.024878484809435032, 'correct_prob_per_char': 0.4397452802196336, 'margin': -7.788301449463771e-05, 'margin_per_token': -0.01953770636135429, 'margin_per_char': -0.07888602732527308, 'total_prob': 9.51627241108353e-05, 'total_prob_per_token': 0.1039660580620198, 'total_prob_per_char': 1.7933639068877238, 'uncond_correct_prob': 1.2472711682570924e-07, 'uncond_correct_prob_per_token': 0.010644973180337221, 'uncond_correct_prob_per_char': 0.36027130979725475, 'uncond_total_prob': 1.214188423656951e-06, 'norm_correct_prob': 0.22531193470271627, 'norm_correct_prob_per_token': 0.24723243109008008, 'norm_correct_prob_per_char': 0.2448847284279277, 'primary_metric': 0.22254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 6,250 |
default
|
5xC
| 2,048,000,000 | 1,203,568,312,320,000,000 |
{'predicted_index_raw': 1.8034682080924855, 'predicted_index_per_token': 1.9104046242774566, 'predicted_index_per_char': 1.73121387283237, 'predicted_index_per_byte': 1.3901734104046244, 'predicted_index_uncond': 1.2658959537572254, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.20809248554913296, 'acc_per_token': 0.21965317919075145, 'acc_per_char': 0.2023121387283237, 'acc_per_byte': 0.2976878612716763, 'acc_uncond': 0.22254335260115607, 'no_answer': 0.0, 'sum_logits_corr': -38.12935202934838, 'logits_per_token_corr': -4.443322069498267, 'logits_per_char_corr': -0.8714359789952881, 'logits_per_byte_corr': 1.257216365349624, 'correct_prob': 5.290458464679514e-06, 'correct_prob_per_token': 0.024651495643760835, 'correct_prob_per_char': 0.43407343270153165, 'margin': -3.39344359700913e-05, 'margin_per_token': -0.021164263873089874, 'margin_per_char': -0.08110200337868205, 'total_prob': 5.00042241544163e-05, 'total_prob_per_token': 0.10499954785839205, 'total_prob_per_char': 1.777180253906135, 'uncond_correct_prob': 1.2055015807976366e-07, 'uncond_correct_prob_per_token': 0.010882497477490753, 'uncond_correct_prob_per_char': 0.36107903524917323, 'uncond_total_prob': 9.978303567169986e-07, 'norm_correct_prob': 0.20994668713987957, 'norm_correct_prob_per_token': 0.24494477646471943, 'norm_correct_prob_per_char': 0.24401499882144842, 'primary_metric': 0.20809248554913296}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 7,500 |
small aux 2
|
5xC
| 2,457,600,000 | 1,444,281,974,784,000,000 |
{'predicted_index_raw': 1.73121387283237, 'predicted_index_per_token': 1.7080924855491328, 'predicted_index_per_char': 1.6184971098265897, 'predicted_index_per_byte': 1.6184971098265897, 'predicted_index_uncond': 1.2919075144508672, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.23410404624277456, 'acc_per_char': 0.1907514450867052, 'acc_per_byte': 0.1907514450867052, 'acc_uncond': 0.24855491329479767, 'no_answer': 0.0, 'sum_logits_corr': -36.80621705716745, 'logits_per_token_corr': -4.247325838002604, 'logits_per_char_corr': -0.8368390517345168, 'bits_per_byte_corr': 1.2073035499604774, 'correct_prob': 1.4241265999638968e-05, 'correct_prob_per_token': 0.027096647645271456, 'correct_prob_per_char': 0.44780226358383873, 'margin': -0.00013458841176685048, 'margin_per_token': -0.02061663112748447, 'margin_per_char': -0.07821520453782459, 'total_prob': 0.0001760156781594992, 'total_prob_per_token': 0.11386591609748324, 'total_prob_per_char': 1.8268084861334866, 'uncond_correct_prob': 4.253769357587346e-08, 'uncond_correct_prob_per_token': 0.00971550637606433, 'uncond_correct_prob_per_char': 0.3474183195320039, 'uncond_total_prob': 4.004433411704602e-07, 'norm_correct_prob': 0.2243872413881873, 'norm_correct_prob_per_token': 0.2467846282685914, 'norm_correct_prob_per_char': 0.2447600572005231, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 7,500 |
small aux 3
|
5xC
| 2,457,600,000 | 1,444,281,974,784,000,000 |
{'predicted_index_raw': 1.8381502890173411, 'predicted_index_per_token': 1.754335260115607, 'predicted_index_per_char': 1.6416184971098267, 'predicted_index_per_byte': 1.6416184971098267, 'predicted_index_uncond': 1.3179190751445087, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21965317919075145, 'acc_per_token': 0.21098265895953758, 'acc_per_char': 0.18786127167630057, 'acc_per_byte': 0.18786127167630057, 'acc_uncond': 0.24277456647398843, 'no_answer': 0.0, 'sum_logits_corr': -36.89014691011065, 'logits_per_token_corr': -4.23050610465203, 'logits_per_char_corr': -0.8331420691210882, 'bits_per_byte_corr': 1.2019699314778276, 'correct_prob': 1.379881870546159e-05, 'correct_prob_per_token': 0.02729184114352619, 'correct_prob_per_char': 0.4486151578350775, 'margin': -9.468043960680283e-05, 'margin_per_token': -0.01913972786722519, 'margin_per_char': -0.07791315686847036, 'total_prob': 0.0001341846721110454, 'total_prob_per_token': 0.11187127515032365, 'total_prob_per_char': 1.8260360294414406, 'uncond_correct_prob': 1.1199499544657256e-07, 'uncond_correct_prob_per_token': 0.010209716638546001, 'uncond_correct_prob_per_char': 0.355179488970516, 'uncond_total_prob': 8.803076180012638e-07, 'norm_correct_prob': 0.2227926654954726, 'norm_correct_prob_per_token': 0.24958319326831518, 'norm_correct_prob_per_char': 0.24523334748729975, 'primary_metric': 0.21965317919075145}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 7,500 |
default
|
5xC
| 2,457,600,000 | 1,444,281,974,784,000,000 |
{'predicted_index_raw': 1.800578034682081, 'predicted_index_per_token': 1.8526011560693643, 'predicted_index_per_char': 1.6647398843930636, 'predicted_index_per_byte': 1.4017341040462428, 'predicted_index_uncond': 1.222543352601156, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2138728323699422, 'acc_per_token': 0.23121387283236994, 'acc_per_char': 0.21098265895953758, 'acc_per_byte': 0.26878612716763006, 'acc_uncond': 0.24277456647398843, 'no_answer': 0.0, 'sum_logits_corr': -37.40975761413574, 'logits_per_token_corr': -4.317564087532429, 'logits_per_char_corr': -0.8484240085516306, 'logits_per_byte_corr': 1.2240171097094525, 'correct_prob': 1.3852180530103167e-05, 'correct_prob_per_token': 0.026825441219886843, 'correct_prob_per_char': 0.4433900930683695, 'margin': -6.244393929079414e-05, 'margin_per_token': -0.02064288407197624, 'margin_per_char': -0.07884487067618813, 'total_prob': 0.00010078172891876432, 'total_prob_per_token': 0.11183987607535825, 'total_prob_per_char': 1.8102940403148575, 'uncond_correct_prob': 1.4556001896362997e-07, 'uncond_correct_prob_per_token': 0.011613333954486375, 'uncond_correct_prob_per_char': 0.3625338764017142, 'uncond_total_prob': 1.2139804444304553e-06, 'norm_correct_prob': 0.21985026901129456, 'norm_correct_prob_per_token': 0.2471258056388605, 'norm_correct_prob_per_char': 0.24448337766080214, 'primary_metric': 0.2138728323699422}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 8,750 |
small aux 2
|
5xC
| 2,867,200,000 | 1,684,995,637,248,000,000 |
{'predicted_index_raw': 1.8092485549132948, 'predicted_index_per_token': 1.8872832369942196, 'predicted_index_per_char': 1.7196531791907514, 'predicted_index_per_byte': 1.7196531791907514, 'predicted_index_uncond': 1.3381502890173411, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21676300578034682, 'acc_per_token': 0.20520231213872833, 'acc_per_char': 0.1791907514450867, 'acc_per_byte': 0.1791907514450867, 'acc_uncond': 0.27167630057803466, 'no_answer': 0.0, 'sum_logits_corr': -36.89717053264552, 'logits_per_token_corr': -4.238944509813808, 'logits_per_char_corr': -0.8360509675501842, 'bits_per_byte_corr': 1.206166584815937, 'correct_prob': 1.9410090020151353e-05, 'correct_prob_per_token': 0.027654130620104828, 'correct_prob_per_char': 0.44845801272983044, 'margin': -0.0001097200715830916, 'margin_per_token': -0.024460800504939213, 'margin_per_char': -0.08629400486903359, 'total_prob': 0.00017082761829748263, 'total_prob_per_token': 0.12048848855102277, 'total_prob_per_char': 1.8412526190939893, 'uncond_correct_prob': 1.1578198065201066e-07, 'uncond_correct_prob_per_token': 0.010892151383696296, 'uncond_correct_prob_per_char': 0.35721273130058845, 'uncond_total_prob': 1.207475677675476e-06, 'norm_correct_prob': 0.21960212506246526, 'norm_correct_prob_per_token': 0.24198078479499524, 'norm_correct_prob_per_char': 0.2431664780977527, 'primary_metric': 0.21676300578034682}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 8,750 |
small aux 3
|
5xC
| 2,867,200,000 | 1,684,995,637,248,000,000 |
{'predicted_index_raw': 1.8034682080924855, 'predicted_index_per_token': 1.8352601156069364, 'predicted_index_per_char': 1.722543352601156, 'predicted_index_per_byte': 1.722543352601156, 'predicted_index_uncond': 1.3757225433526012, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21965317919075145, 'acc_per_token': 0.23410404624277456, 'acc_per_char': 0.18497109826589594, 'acc_per_byte': 0.18497109826589594, 'acc_uncond': 0.23410404624277456, 'no_answer': 0.0, 'sum_logits_corr': -37.45612367591417, 'logits_per_token_corr': -4.302995012205346, 'logits_per_char_corr': -0.8465213106311296, 'bits_per_byte_corr': 1.2212720968552335, 'correct_prob': 1.3219533182163119e-05, 'correct_prob_per_token': 0.026931543385635213, 'correct_prob_per_char': 0.44394637182324953, 'margin': -0.00012661725403580155, 'margin_per_token': -0.01971118655312241, 'margin_per_char': -0.07878203725760106, 'total_prob': 0.00016924853818923834, 'total_prob_per_token': 0.11039821210591015, 'total_prob_per_char': 1.8113971521501802, 'uncond_correct_prob': 8.049614282562567e-08, 'uncond_correct_prob_per_token': 0.010587204005693818, 'uncond_correct_prob_per_char': 0.3571044394870524, 'uncond_total_prob': 7.696750657355382e-07, 'norm_correct_prob': 0.22128139755190307, 'norm_correct_prob_per_token': 0.24782787435116815, 'norm_correct_prob_per_char': 0.2444684076291188, 'primary_metric': 0.21965317919075145}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 8,750 |
default
|
5xC
| 2,867,200,000 | 1,684,995,637,248,000,000 |
{'predicted_index_raw': 1.8583815028901733, 'predicted_index_per_token': 1.9335260115606936, 'predicted_index_per_char': 1.7572254335260116, 'predicted_index_per_byte': 1.3815028901734103, 'predicted_index_uncond': 1.323699421965318, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.23410404624277456, 'acc_per_char': 0.20520231213872833, 'acc_per_byte': 0.30057803468208094, 'acc_uncond': 0.2398843930635838, 'no_answer': 0.0, 'sum_logits_corr': -37.5414705235145, 'logits_per_token_corr': -4.371451603790221, 'logits_per_char_corr': -0.859002127138849, 'logits_per_byte_corr': 1.2392781089371787, 'correct_prob': 8.998004688458995e-06, 'correct_prob_per_token': 0.02666125962056703, 'correct_prob_per_char': 0.4401828612184061, 'margin': -3.191913802170488e-05, 'margin_per_token': -0.022961970704692344, 'margin_per_char': -0.08174752353187492, 'total_prob': 5.7167826965762155e-05, 'total_prob_per_token': 0.11219801215125652, 'total_prob_per_char': 1.7953366133856374, 'uncond_correct_prob': 1.3238428158033596e-07, 'uncond_correct_prob_per_token': 0.011218203499851219, 'uncond_correct_prob_per_char': 0.36138750652954493, 'uncond_total_prob': 1.006256312858076e-06, 'norm_correct_prob': 0.22303947171461863, 'norm_correct_prob_per_token': 0.24675578474998694, 'norm_correct_prob_per_char': 0.24512534892074458, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 10,000 |
small aux 2
|
5xC
| 3,276,800,000 | 1,925,709,299,712,000,000 |
{'predicted_index_raw': 1.7514450867052023, 'predicted_index_per_token': 1.6445086705202312, 'predicted_index_per_char': 1.5375722543352601, 'predicted_index_per_byte': 1.5375722543352601, 'predicted_index_uncond': 1.2485549132947977, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23121387283236994, 'acc_per_token': 0.2254335260115607, 'acc_per_char': 0.2138728323699422, 'acc_per_byte': 0.2138728323699422, 'acc_uncond': 0.25722543352601157, 'no_answer': 0.0, 'sum_logits_corr': -36.40031872732791, 'logits_per_token_corr': -4.158836140980016, 'logits_per_char_corr': -0.8232102763586898, 'bits_per_byte_corr': 1.1876413833123678, 'correct_prob': 2.2796402721696133e-05, 'correct_prob_per_token': 0.02871247384650339, 'correct_prob_per_char': 0.4539970206986702, 'margin': -0.00021205515416580057, 'margin_per_token': -0.022436558667170082, 'margin_per_char': -0.0819104928159909, 'total_prob': 0.0002776126462034763, 'total_prob_per_token': 0.12165310153671428, 'total_prob_per_char': 1.8573993324762446, 'uncond_correct_prob': 9.447626817917484e-08, 'uncond_correct_prob_per_token': 0.01024112936655741, 'uncond_correct_prob_per_char': 0.355376049790435, 'uncond_total_prob': 9.353506304109907e-07, 'norm_correct_prob': 0.22441799022751177, 'norm_correct_prob_per_token': 0.2450403418024839, 'norm_correct_prob_per_char': 0.2439541037167107, 'primary_metric': 0.23121387283236994}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 10,000 |
small aux 3
|
5xC
| 3,276,800,000 | 1,925,709,299,712,000,000 |
{'predicted_index_raw': 1.7890173410404624, 'predicted_index_per_token': 1.8150289017341041, 'predicted_index_per_char': 1.6358381502890174, 'predicted_index_per_byte': 1.6358381502890174, 'predicted_index_uncond': 1.3815028901734103, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21965317919075145, 'acc_per_token': 0.2138728323699422, 'acc_per_char': 0.18786127167630057, 'acc_per_byte': 0.18786127167630057, 'acc_uncond': 0.23410404624277456, 'no_answer': 0.0, 'sum_logits_corr': -36.74608306113006, 'logits_per_token_corr': -4.1934633500164, 'logits_per_char_corr': -0.8251435243279188, 'bits_per_byte_corr': 1.1904304705703854, 'correct_prob': 2.0066478434755613e-05, 'correct_prob_per_token': 0.028485257895246332, 'correct_prob_per_char': 0.4520659853841144, 'margin': -8.405245305740266e-05, 'margin_per_token': -0.02269229071428977, 'margin_per_char': -0.08087248867914594, 'total_prob': 0.00013545610734783594, 'total_prob_per_token': 0.1193627926550024, 'total_prob_per_char': 1.8427757683518438, 'uncond_correct_prob': 1.5629741957665886e-07, 'uncond_correct_prob_per_token': 0.011413588052597389, 'uncond_correct_prob_per_char': 0.36800392304478924, 'uncond_total_prob': 1.2670053823773368e-06, 'norm_correct_prob': 0.22434030210162753, 'norm_correct_prob_per_token': 0.24702142655003276, 'norm_correct_prob_per_char': 0.2449996812978247, 'primary_metric': 0.21965317919075145}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 10,000 |
default
|
5xC
| 3,276,800,000 | 1,925,709,299,712,000,000 |
{'predicted_index_raw': 1.846820809248555, 'predicted_index_per_token': 1.829479768786127, 'predicted_index_per_char': 1.69364161849711, 'predicted_index_per_byte': 1.4682080924855492, 'predicted_index_uncond': 1.283236994219653, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.20520231213872833, 'acc_per_token': 0.23121387283236994, 'acc_per_char': 0.22254335260115607, 'acc_per_byte': 0.26011560693641617, 'acc_uncond': 0.22254335260115607, 'no_answer': 0.0, 'sum_logits_corr': -36.33767948674329, 'logits_per_token_corr': -4.18227847282999, 'logits_per_char_corr': -0.8224722314508963, 'logits_per_byte_corr': 1.1865766095839405, 'correct_prob': 1.7408880511001093e-05, 'correct_prob_per_token': 0.03009308318935353, 'correct_prob_per_char': 0.4542045092569551, 'margin': -0.0001152176749811008, 'margin_per_token': -0.01957641378145625, 'margin_per_char': -0.07526859951978644, 'total_prob': 0.00016982597916149217, 'total_prob_per_token': 0.1200264193387333, 'total_prob_per_char': 1.8432609297504923, 'uncond_correct_prob': 1.0413031954463618e-07, 'uncond_correct_prob_per_token': 0.011830089901980654, 'uncond_correct_prob_per_char': 0.36362866123780174, 'uncond_total_prob': 9.377724479003889e-07, 'norm_correct_prob': 0.21859521416145405, 'norm_correct_prob_per_token': 0.25295977880256065, 'norm_correct_prob_per_char': 0.2460592545741577, 'primary_metric': 0.20520231213872833}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 11,250 |
small aux 2
|
5xC
| 3,686,400,000 | 2,166,422,962,176,000,000 |
{'predicted_index_raw': 1.7485549132947977, 'predicted_index_per_token': 1.6647398843930636, 'predicted_index_per_char': 1.569364161849711, 'predicted_index_per_byte': 1.569364161849711, 'predicted_index_uncond': 1.3352601156069364, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23410404624277456, 'acc_per_token': 0.24855491329479767, 'acc_per_char': 0.20809248554913296, 'acc_per_byte': 0.20809248554913296, 'acc_uncond': 0.2398843930635838, 'no_answer': 0.0, 'sum_logits_corr': -35.63602989533044, 'logits_per_token_corr': -4.062433222769074, 'logits_per_char_corr': -0.8001379687493737, 'bits_per_byte_corr': 1.154355079542519, 'correct_prob': 5.14129814771371e-05, 'correct_prob_per_token': 0.03159218901057474, 'correct_prob_per_char': 0.4630939108565494, 'margin': -0.00015977048898772918, 'margin_per_token': -0.022480539438450884, 'margin_per_char': -0.08021821837947336, 'total_prob': 0.00029888627194471224, 'total_prob_per_token': 0.13158462235031415, 'total_prob_per_char': 1.8874052037992204, 'uncond_correct_prob': 5.857751846358145e-08, 'uncond_correct_prob_per_token': 0.01124350776234785, 'uncond_correct_prob_per_char': 0.3580151913417422, 'uncond_total_prob': 4.800003915284442e-07, 'norm_correct_prob': 0.22912783808890141, 'norm_correct_prob_per_token': 0.25211083349791363, 'norm_correct_prob_per_char': 0.2452825234535345, 'primary_metric': 0.23410404624277456}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 11,250 |
small aux 3
|
5xC
| 3,686,400,000 | 2,166,422,962,176,000,000 |
{'predicted_index_raw': 1.8728323699421965, 'predicted_index_per_token': 1.8554913294797688, 'predicted_index_per_char': 1.6907514450867052, 'predicted_index_per_byte': 1.6907514450867052, 'predicted_index_uncond': 1.369942196531792, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21676300578034682, 'acc_per_token': 0.21965317919075145, 'acc_per_char': 0.1907514450867052, 'acc_per_byte': 0.1907514450867052, 'acc_uncond': 0.24277456647398843, 'no_answer': 0.0, 'sum_logits_corr': -37.280448666886784, 'logits_per_token_corr': -4.326587170961439, 'logits_per_char_corr': -0.850260371392592, 'bits_per_byte_corr': 1.2266664212733818, 'correct_prob': 8.149778128221744e-06, 'correct_prob_per_token': 0.027351036270050387, 'correct_prob_per_char': 0.4430010884113247, 'margin': -3.807356511054951e-05, 'margin_per_token': -0.020797241159081305, 'margin_per_char': -0.08025404414408625, 'total_prob': 6.251564190890695e-05, 'total_prob_per_token': 0.11320183437003642, 'total_prob_per_char': 1.8079769896535287, 'uncond_correct_prob': 8.000133252406543e-08, 'uncond_correct_prob_per_token': 0.010664822536189165, 'uncond_correct_prob_per_char': 0.35889112780743726, 'uncond_total_prob': 7.936343694853924e-07, 'norm_correct_prob': 0.223253753950151, 'norm_correct_prob_per_token': 0.24593268613080102, 'norm_correct_prob_per_char': 0.24474161753923596, 'primary_metric': 0.21676300578034682}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 11,250 |
default
|
5xC
| 3,686,400,000 | 2,166,422,962,176,000,000 |
{'predicted_index_raw': 1.7861271676300579, 'predicted_index_per_token': 1.8092485549132948, 'predicted_index_per_char': 1.6502890173410405, 'predicted_index_per_byte': 1.4508670520231215, 'predicted_index_uncond': 1.3728323699421965, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2138728323699422, 'acc_per_token': 0.23699421965317918, 'acc_per_char': 0.22254335260115607, 'acc_per_byte': 0.26878612716763006, 'acc_uncond': 0.25722543352601157, 'no_answer': 0.0, 'sum_logits_corr': -36.520774540873624, 'logits_per_token_corr': -4.198937288862946, 'logits_per_char_corr': -0.8279879710572984, 'logits_per_byte_corr': 1.194534139760937, 'correct_prob': 2.104579531192756e-05, 'correct_prob_per_token': 0.029373572006722526, 'correct_prob_per_char': 0.4527082678415116, 'margin': -0.00014457519905898927, 'margin_per_token': -0.020691147604492807, 'margin_per_char': -0.0766224496481579, 'total_prob': 0.0002024694965704232, 'total_prob_per_token': 0.11989842070028647, 'total_prob_per_char': 1.8403194810341363, 'uncond_correct_prob': 2.622737922264574e-08, 'uncond_correct_prob_per_token': 0.009783963816967072, 'uncond_correct_prob_per_char': 0.34801046802662633, 'uncond_total_prob': 2.501444328062451e-07, 'norm_correct_prob': 0.21917443559328242, 'norm_correct_prob_per_token': 0.25089189536869494, 'norm_correct_prob_per_char': 0.2457356931514354, 'primary_metric': 0.2138728323699422}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 12,500 |
small aux 2
|
5xC
| 4,096,000,000 | 2,407,136,624,640,000,000 |
{'predicted_index_raw': 1.8150289017341041, 'predicted_index_per_token': 1.7890173410404624, 'predicted_index_per_char': 1.6791907514450868, 'predicted_index_per_byte': 1.6791907514450868, 'predicted_index_uncond': 1.369942196531792, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.22832369942196531, 'acc_per_token': 0.23699421965317918, 'acc_per_char': 0.22254335260115607, 'acc_per_byte': 0.22254335260115607, 'acc_uncond': 0.24855491329479767, 'no_answer': 0.0, 'sum_logits_corr': -36.187497830804375, 'logits_per_token_corr': -4.167361487038851, 'logits_per_char_corr': -0.8193690188083205, 'bits_per_byte_corr': 1.1820996200936689, 'correct_prob': 2.0600440995088827e-05, 'correct_prob_per_token': 0.030039621373506072, 'correct_prob_per_char': 0.4554078073704002, 'margin': -7.732381167613511e-05, 'margin_per_token': -0.021628324373832733, 'margin_per_char': -0.07927399988201408, 'total_prob': 0.00013904587371522514, 'total_prob_per_token': 0.12409391195194723, 'total_prob_per_char': 1.8572924441234024, 'uncond_correct_prob': 3.9273840292005146e-08, 'uncond_correct_prob_per_token': 0.010657516889534785, 'uncond_correct_prob_per_char': 0.35569209368308813, 'uncond_total_prob': 4.243097654408649e-07, 'norm_correct_prob': 0.22432125013884446, 'norm_correct_prob_per_token': 0.24802502543457886, 'norm_correct_prob_per_char': 0.24481477037501273, 'primary_metric': 0.22832369942196531}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 12,500 |
small aux 3
|
5xC
| 4,096,000,000 | 2,407,136,624,640,000,000 |
{'predicted_index_raw': 1.7803468208092486, 'predicted_index_per_token': 1.8439306358381502, 'predicted_index_per_char': 1.6618497109826589, 'predicted_index_per_byte': 1.6618497109826589, 'predicted_index_uncond': 1.3410404624277457, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23121387283236994, 'acc_per_token': 0.2254335260115607, 'acc_per_char': 0.18786127167630057, 'acc_per_byte': 0.18786127167630057, 'acc_uncond': 0.27167630057803466, 'no_answer': 0.0, 'sum_logits_corr': -35.79762658631871, 'logits_per_token_corr': -4.0485452456161575, 'logits_per_char_corr': -0.7973148449235548, 'bits_per_byte_corr': 1.150282172799192, 'correct_prob': 2.9849301429191206e-05, 'correct_prob_per_token': 0.03154107325779981, 'correct_prob_per_char': 0.4639058251408204, 'margin': -0.00011504850103635188, 'margin_per_token': -0.023663952744780378, 'margin_per_char': -0.07908082876583777, 'total_prob': 0.00019608375833712495, 'total_prob_per_token': 0.1307661808239569, 'total_prob_per_char': 1.8872765202191806, 'uncond_correct_prob': 8.464886137969483e-08, 'uncond_correct_prob_per_token': 0.010815761816267094, 'uncond_correct_prob_per_char': 0.36064221232435656, 'uncond_total_prob': 6.412814273639522e-07, 'norm_correct_prob': 0.23323739118790193, 'norm_correct_prob_per_token': 0.25050692137102054, 'norm_correct_prob_per_char': 0.2455269303560973, 'primary_metric': 0.23121387283236994}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 12,500 |
default
|
5xC
| 4,096,000,000 | 2,407,136,624,640,000,000 |
{'predicted_index_raw': 1.8323699421965318, 'predicted_index_per_token': 1.7745664739884393, 'predicted_index_per_char': 1.5867052023121386, 'predicted_index_per_byte': 1.4364161849710984, 'predicted_index_uncond': 1.2572254335260116, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.20520231213872833, 'acc_per_token': 0.23121387283236994, 'acc_per_char': 0.2023121387283237, 'acc_per_byte': 0.28901734104046245, 'acc_uncond': 0.24855491329479767, 'no_answer': 0.0, 'sum_logits_corr': -36.301014147741945, 'logits_per_token_corr': -4.2026504849479345, 'logits_per_char_corr': -0.8252110361548137, 'logits_per_byte_corr': 1.190527869548248, 'correct_prob': 6.768656003382327e-06, 'correct_prob_per_token': 0.029258638899408825, 'correct_prob_per_char': 0.45286344549392277, 'margin': -7.44375785923154e-05, 'margin_per_token': -0.02133710579728085, 'margin_per_char': -0.07805325103610673, 'total_prob': 9.75655055702182e-05, 'total_prob_per_token': 0.12087336194433465, 'total_prob_per_char': 1.843075373368691, 'uncond_correct_prob': 7.166183587884686e-08, 'uncond_correct_prob_per_token': 0.011586326744834716, 'uncond_correct_prob_per_char': 0.363280797094656, 'uncond_total_prob': 8.263041360705087e-07, 'norm_correct_prob': 0.21354238955519045, 'norm_correct_prob_per_token': 0.2501924942289516, 'norm_correct_prob_per_char': 0.24562394840370594, 'primary_metric': 0.20520231213872833}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 13,750 |
small aux 2
|
5xC
| 4,505,600,000 | 2,647,850,287,104,000,000 |
{'predicted_index_raw': 1.716763005780347, 'predicted_index_per_token': 1.6011560693641618, 'predicted_index_per_char': 1.4710982658959537, 'predicted_index_per_byte': 1.4710982658959537, 'predicted_index_uncond': 1.3901734104046244, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.22832369942196531, 'acc_per_token': 0.24277456647398843, 'acc_per_char': 0.22254335260115607, 'acc_per_byte': 0.22254335260115607, 'acc_uncond': 0.2745664739884393, 'no_answer': 0.0, 'sum_logits_corr': -35.097717950798874, 'logits_per_token_corr': -3.974236800916797, 'logits_per_char_corr': -0.7839805442583171, 'bits_per_byte_corr': 1.1310448433557172, 'correct_prob': 5.475996408013425e-05, 'correct_prob_per_token': 0.033149494503128936, 'correct_prob_per_char': 0.46960175181079333, 'margin': -0.00021650094252567332, 'margin_per_token': -0.021829796934742792, 'margin_per_char': -0.07485994900141016, 'total_prob': 0.0003610954322645793, 'total_prob_per_token': 0.1344665590151103, 'total_prob_per_char': 1.9052111716941047, 'uncond_correct_prob': 4.4508225391950196e-08, 'uncond_correct_prob_per_token': 0.01125447418351274, 'uncond_correct_prob_per_char': 0.36232105836097156, 'uncond_total_prob': 6.252158117101032e-07, 'norm_correct_prob': 0.23119672797937338, 'norm_correct_prob_per_token': 0.2537066216886085, 'norm_correct_prob_per_char': 0.2463858164969466, 'primary_metric': 0.22832369942196531}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 13,750 |
small aux 3
|
5xC
| 4,505,600,000 | 2,647,850,287,104,000,000 |
{'predicted_index_raw': 1.8323699421965318, 'predicted_index_per_token': 1.8121387283236994, 'predicted_index_per_char': 1.6560693641618498, 'predicted_index_per_byte': 1.6560693641618498, 'predicted_index_uncond': 1.3179190751445087, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23121387283236994, 'acc_per_token': 0.25722543352601157, 'acc_per_char': 0.23121387283236994, 'acc_per_byte': 0.23121387283236994, 'acc_uncond': 0.2658959537572254, 'no_answer': 0.0, 'sum_logits_corr': -35.91834256139105, 'logits_per_token_corr': -4.11700225913256, 'logits_per_char_corr': -0.8097727802205966, 'bits_per_byte_corr': 1.1682551742719627, 'correct_prob': 5.1300437108118154e-05, 'correct_prob_per_token': 0.03187214511334639, 'correct_prob_per_char': 0.46034287550483555, 'margin': -0.0002496633414899281, 'margin_per_token': -0.020551601479344664, 'margin_per_char': -0.07560672006021035, 'total_prob': 0.0003780749449813547, 'total_prob_per_token': 0.12632506476510424, 'total_prob_per_char': 1.8598698040143413, 'uncond_correct_prob': 3.842309750450701e-08, 'uncond_correct_prob_per_token': 0.01028972693448187, 'uncond_correct_prob_per_char': 0.3520833390442534, 'uncond_total_prob': 3.5331816308653505e-07, 'norm_correct_prob': 0.22924532739608489, 'norm_correct_prob_per_token': 0.25487506797755877, 'norm_correct_prob_per_char': 0.24722537068898767, 'primary_metric': 0.23121387283236994}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 13,750 |
default
|
5xC
| 4,505,600,000 | 2,647,850,287,104,000,000 |
{'predicted_index_raw': 1.7890173410404624, 'predicted_index_per_token': 1.7976878612716762, 'predicted_index_per_char': 1.6618497109826589, 'predicted_index_per_byte': 1.416184971098266, 'predicted_index_uncond': 1.2803468208092486, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.23410404624277456, 'acc_per_char': 0.2023121387283237, 'acc_per_byte': 0.2658959537572254, 'acc_uncond': 0.2658959537572254, 'no_answer': 0.0, 'sum_logits_corr': -35.39155343640057, 'logits_per_token_corr': -4.049609062125488, 'logits_per_char_corr': -0.7944276038179774, 'logits_per_byte_corr': 1.1461167643743215, 'correct_prob': 2.1204804743080465e-05, 'correct_prob_per_token': 0.033608150486445956, 'correct_prob_per_char': 0.4658664473063895, 'margin': -0.00011163620555728513, 'margin_per_token': -0.024132607883750642, 'margin_per_char': -0.07838609062817804, 'total_prob': 0.00017542318874770534, 'total_prob_per_token': 0.13715306528754753, 'total_prob_per_char': 1.8916645275735, 'uncond_correct_prob': 9.174869208054179e-08, 'uncond_correct_prob_per_token': 0.011818444287767446, 'uncond_correct_prob_per_char': 0.3636922184226909, 'uncond_total_prob': 6.511517997336487e-07, 'norm_correct_prob': 0.22285817363563218, 'norm_correct_prob_per_token': 0.2527214175092945, 'norm_correct_prob_per_char': 0.24610816439667993, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 15,000 |
small aux 2
|
5xC
| 4,915,200,000 | 2,888,563,949,568,000,000 |
{'predicted_index_raw': 1.80635838150289, 'predicted_index_per_token': 1.7514450867052023, 'predicted_index_per_char': 1.6618497109826589, 'predicted_index_per_byte': 1.6618497109826589, 'predicted_index_uncond': 1.3757225433526012, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.24566473988439305, 'acc_per_token': 0.2543352601156069, 'acc_per_char': 0.21965317919075145, 'acc_per_byte': 0.21965317919075145, 'acc_uncond': 0.27167630057803466, 'no_answer': 0.0, 'sum_logits_corr': -35.68949997907429, 'logits_per_token_corr': -4.086213914203731, 'logits_per_char_corr': -0.8073291914335934, 'bits_per_byte_corr': 1.1647298208469785, 'correct_prob': 3.988966609096437e-05, 'correct_prob_per_token': 0.031157913405903497, 'correct_prob_per_char': 0.4606606739943957, 'margin': -0.00014540529945434506, 'margin_per_token': -0.02285254581757392, 'margin_per_char': -0.07703951843592084, 'total_prob': 0.0002401450184817641, 'total_prob_per_token': 0.12968658800358152, 'total_prob_per_char': 1.8775532472157888, 'uncond_correct_prob': 1.6370976655070883e-08, 'uncond_correct_prob_per_token': 0.01063777498648137, 'uncond_correct_prob_per_char': 0.3521719511551108, 'uncond_total_prob': 4.0096551749979133e-07, 'norm_correct_prob': 0.23317369093535062, 'norm_correct_prob_per_token': 0.2489077490634598, 'norm_correct_prob_per_char': 0.24504981119644295, 'primary_metric': 0.24566473988439305}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 15,000 |
small aux 3
|
5xC
| 4,915,200,000 | 2,888,563,949,568,000,000 |
{'predicted_index_raw': 1.8179190751445087, 'predicted_index_per_token': 1.8988439306358382, 'predicted_index_per_char': 1.800578034682081, 'predicted_index_per_byte': 1.800578034682081, 'predicted_index_uncond': 1.3641618497109826, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.22832369942196531, 'acc_per_char': 0.1936416184971098, 'acc_per_byte': 0.1936416184971098, 'acc_uncond': 0.22254335260115607, 'no_answer': 0.0, 'sum_logits_corr': -35.84864213011858, 'logits_per_token_corr': -4.062430108560365, 'logits_per_char_corr': -0.7977928890384987, 'bits_per_byte_corr': 1.150971844673148, 'correct_prob': 3.156044019752692e-05, 'correct_prob_per_token': 0.03309033883787946, 'correct_prob_per_char': 0.46458939491285534, 'margin': -0.0002783071923606673, 'margin_per_token': -0.027048125265198456, 'margin_per_char': -0.0826969332567261, 'total_prob': 0.00038112339348388483, 'total_prob_per_token': 0.13807172224397263, 'total_prob_per_char': 1.8930463122856356, 'uncond_correct_prob': 5.149133437139929e-08, 'uncond_correct_prob_per_token': 0.01032732959639479, 'uncond_correct_prob_per_char': 0.3548817574752998, 'uncond_total_prob': 3.8877863713658475e-07, 'norm_correct_prob': 0.22952664552090554, 'norm_correct_prob_per_token': 0.247897258268901, 'norm_correct_prob_per_char': 0.24535654917887537, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 15,000 |
default
|
5xC
| 4,915,200,000 | 2,888,563,949,568,000,000 |
{'predicted_index_raw': 1.8121387283236994, 'predicted_index_per_token': 1.73121387283237, 'predicted_index_per_char': 1.6589595375722543, 'predicted_index_per_byte': 1.476878612716763, 'predicted_index_uncond': 1.2976878612716762, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2023121387283237, 'acc_per_token': 0.24855491329479767, 'acc_per_char': 0.1936416184971098, 'acc_per_byte': 0.2658959537572254, 'acc_uncond': 0.2543352601156069, 'no_answer': 0.0, 'sum_logits_corr': -35.824262999385766, 'logits_per_token_corr': -4.126402295940216, 'logits_per_char_corr': -0.8117549601034737, 'logits_per_byte_corr': 1.171114855359141, 'correct_prob': 2.921507792437098e-05, 'correct_prob_per_token': 0.03127345006041239, 'correct_prob_per_char': 0.4590150076983632, 'margin': -0.0001445812051939957, 'margin_per_token': -0.02338779391871943, 'margin_per_char': -0.07866375537560961, 'total_prob': 0.00022939028052566094, 'total_prob_per_token': 0.12853781183061003, 'total_prob_per_char': 1.8673789049582366, 'uncond_correct_prob': 1.0942198243398112e-07, 'uncond_correct_prob_per_token': 0.011716682605932399, 'uncond_correct_prob_per_char': 0.36324390038793025, 'uncond_total_prob': 7.687842787031209e-07, 'norm_correct_prob': 0.21126338476830997, 'norm_correct_prob_per_token': 0.25031341923391187, 'norm_correct_prob_per_char': 0.24542502510003153, 'primary_metric': 0.2023121387283237}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 17,500 |
small aux 2
|
5xC
| 5,734,400,000 | 3,369,991,274,496,000,000 |
{'predicted_index_raw': 1.777456647398844, 'predicted_index_per_token': 1.676300578034682, 'predicted_index_per_char': 1.5780346820809248, 'predicted_index_per_byte': 1.5780346820809248, 'predicted_index_uncond': 1.3150289017341041, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2138728323699422, 'acc_per_token': 0.24566473988439305, 'acc_per_char': 0.20520231213872833, 'acc_per_byte': 0.20520231213872833, 'acc_uncond': 0.2543352601156069, 'no_answer': 0.0, 'sum_logits_corr': -34.56821157201866, 'logits_per_token_corr': -3.9162620527595773, 'logits_per_char_corr': -0.7711808716685842, 'bits_per_byte_corr': 1.1125788191854937, 'correct_prob': 5.225711692402834e-05, 'correct_prob_per_token': 0.03558062640777654, 'correct_prob_per_char': 0.475589811391792, 'margin': -0.00023606129376936758, 'margin_per_token': -0.023806492330090568, 'margin_per_char': -0.07672092984269713, 'total_prob': 0.00038596393065671355, 'total_prob_per_token': 0.1451160118600723, 'total_prob_per_char': 1.934303473390382, 'uncond_correct_prob': 3.522431042876396e-08, 'uncond_correct_prob_per_token': 0.011366209880129126, 'uncond_correct_prob_per_char': 0.35929237788090856, 'uncond_total_prob': 5.656668977350806e-07, 'norm_correct_prob': 0.22446518517667294, 'norm_correct_prob_per_token': 0.2526272705845066, 'norm_correct_prob_per_char': 0.2457316516706181, 'primary_metric': 0.2138728323699422}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 17,500 |
small aux 3
|
5xC
| 5,734,400,000 | 3,369,991,274,496,000,000 |
{'predicted_index_raw': 1.8121387283236994, 'predicted_index_per_token': 1.8988439306358382, 'predicted_index_per_char': 1.69364161849711, 'predicted_index_per_byte': 1.69364161849711, 'predicted_index_uncond': 1.3381502890173411, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.23699421965317918, 'acc_per_char': 0.1936416184971098, 'acc_per_byte': 0.1936416184971098, 'acc_uncond': 0.24855491329479767, 'no_answer': 0.0, 'sum_logits_corr': -35.295085904226134, 'logits_per_token_corr': -4.002823895495065, 'logits_per_char_corr': -0.7863278018444398, 'bits_per_byte_corr': 1.1344312202349078, 'correct_prob': 3.769637598647535e-05, 'correct_prob_per_token': 0.034505705705967483, 'correct_prob_per_char': 0.4694507999808472, 'margin': -0.00024343118780546742, 'margin_per_token': -0.02698291605405481, 'margin_per_char': -0.08028619663883993, 'total_prob': 0.00033870766668775816, 'total_prob_per_token': 0.14372651142180656, 'total_prob_per_char': 1.9073930113815585, 'uncond_correct_prob': 4.542314163753243e-08, 'uncond_correct_prob_per_token': 0.010843920679735334, 'uncond_correct_prob_per_char': 0.35662661669105855, 'uncond_total_prob': 5.210400370398624e-07, 'norm_correct_prob': 0.23641396841253334, 'norm_correct_prob_per_token': 0.25027343759290743, 'norm_correct_prob_per_char': 0.24615654400311227, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 17,500 |
default
|
5xC
| 5,734,400,000 | 3,369,991,274,496,000,000 |
{'predicted_index_raw': 1.8034682080924855, 'predicted_index_per_token': 1.7890173410404624, 'predicted_index_per_char': 1.653179190751445, 'predicted_index_per_byte': 1.407514450867052, 'predicted_index_uncond': 1.260115606936416, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.22254335260115607, 'acc_per_token': 0.24277456647398843, 'acc_per_char': 0.21965317919075145, 'acc_per_byte': 0.2976878612716763, 'acc_uncond': 0.23410404624277456, 'no_answer': 0.0, 'sum_logits_corr': -35.04367573964113, 'logits_per_token_corr': -4.004317381901896, 'logits_per_char_corr': -0.7881822020703656, 'logits_per_byte_corr': 1.1371065542446763, 'correct_prob': 5.8090055281920095e-05, 'correct_prob_per_token': 0.03427756137846335, 'correct_prob_per_char': 0.4689962777665612, 'margin': -0.0003085801521359004, 'margin_per_token': -0.02464682120205172, 'margin_per_char': -0.07857608874496498, 'total_prob': 0.00046684405242932515, 'total_prob_per_token': 0.13961902426477332, 'total_prob_per_char': 1.9009008704645178, 'uncond_correct_prob': 5.736488527692531e-08, 'uncond_correct_prob_per_token': 0.011596795625077575, 'uncond_correct_prob_per_char': 0.35994228780289206, 'uncond_total_prob': 4.7192540042671636e-07, 'norm_correct_prob': 0.22079076224041308, 'norm_correct_prob_per_token': 0.25215508142130083, 'norm_correct_prob_per_char': 0.24644201061676754, 'primary_metric': 0.22254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 18,750 |
small aux 2
|
5xC
| 6,144,000,000 | 3,610,704,936,960,000,000 |
{'predicted_index_raw': 1.7947976878612717, 'predicted_index_per_token': 1.7138728323699421, 'predicted_index_per_char': 1.6242774566473988, 'predicted_index_per_byte': 1.6242774566473988, 'predicted_index_uncond': 1.3786127167630058, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23699421965317918, 'acc_per_token': 0.25722543352601157, 'acc_per_char': 0.23121387283236994, 'acc_per_byte': 0.23121387283236994, 'acc_uncond': 0.2630057803468208, 'no_answer': 0.0, 'sum_logits_corr': -34.512225731259825, 'logits_per_token_corr': -3.9175186085408105, 'logits_per_char_corr': -0.7708804288006462, 'bits_per_byte_corr': 1.112145371749849, 'correct_prob': 9.456753349061465e-05, 'correct_prob_per_token': 0.036197751001622244, 'correct_prob_per_char': 0.4763164274147966, 'margin': -0.00014103102622159508, 'margin_per_token': -0.024026120707151592, 'margin_per_char': -0.07661892764053693, 'total_prob': 0.0004099397752348952, 'total_prob_per_token': 0.14741284905632807, 'total_prob_per_char': 1.934354300015641, 'uncond_correct_prob': 6.706262816880394e-08, 'uncond_correct_prob_per_token': 0.01231619295204537, 'uncond_correct_prob_per_char': 0.3670136799752228, 'uncond_total_prob': 9.725053684141225e-07, 'norm_correct_prob': 0.22951129589728672, 'norm_correct_prob_per_token': 0.2549236396356108, 'norm_correct_prob_per_char': 0.2460671703302637, 'primary_metric': 0.23699421965317918}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 18,750 |
small aux 3
|
5xC
| 6,144,000,000 | 3,610,704,936,960,000,000 |
{'predicted_index_raw': 1.7485549132947977, 'predicted_index_per_token': 1.869942196531792, 'predicted_index_per_char': 1.6560693641618498, 'predicted_index_per_byte': 1.6560693641618498, 'predicted_index_uncond': 1.323699421965318, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21676300578034682, 'acc_per_token': 0.2398843930635838, 'acc_per_char': 0.21098265895953758, 'acc_per_byte': 0.21098265895953758, 'acc_uncond': 0.2543352601156069, 'no_answer': 0.0, 'sum_logits_corr': -34.542556783367445, 'logits_per_token_corr': -3.8783223923449537, 'logits_per_char_corr': -0.7632816236065226, 'bits_per_byte_corr': 1.1011826131795976, 'correct_prob': 6.198287094038352e-05, 'correct_prob_per_token': 0.03689177200380987, 'correct_prob_per_char': 0.47934607136409296, 'margin': -0.00021002954809540303, 'margin_per_token': -0.024314573522308654, 'margin_per_char': -0.075875551379987, 'total_prob': 0.000367786990826312, 'total_prob_per_token': 0.14800021754592357, 'total_prob_per_char': 1.9428829510839045, 'uncond_correct_prob': 6.369487547724961e-08, 'uncond_correct_prob_per_token': 0.011470428190595657, 'uncond_correct_prob_per_char': 0.3636280564797825, 'uncond_total_prob': 4.182858016777604e-07, 'norm_correct_prob': 0.22718421532290248, 'norm_correct_prob_per_token': 0.2515473063788422, 'norm_correct_prob_per_char': 0.24640015424458953, 'primary_metric': 0.21676300578034682}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 18,750 |
default
|
5xC
| 6,144,000,000 | 3,610,704,936,960,000,000 |
{'predicted_index_raw': 1.745664739884393, 'predicted_index_per_token': 1.6849710982658959, 'predicted_index_per_char': 1.6329479768786128, 'predicted_index_per_byte': 1.4335260115606936, 'predicted_index_uncond': 1.2803468208092486, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2138728323699422, 'acc_per_token': 0.24277456647398843, 'acc_per_char': 0.2023121387283237, 'acc_per_byte': 0.2832369942196532, 'acc_uncond': 0.24855491329479767, 'no_answer': 0.0, 'sum_logits_corr': -35.26785655931241, 'logits_per_token_corr': -4.050315120196467, 'logits_per_char_corr': -0.7950007361940139, 'logits_per_byte_corr': 1.1469436196110028, 'correct_prob': 3.588307278201513e-05, 'correct_prob_per_token': 0.033086390177186134, 'correct_prob_per_char': 0.4656442353273697, 'margin': -0.00016706634274771875, 'margin_per_token': -0.023092790570940585, 'margin_per_char': -0.07761368289770999, 'total_prob': 0.00028019821757545825, 'total_prob_per_token': 0.13583782545873876, 'total_prob_per_char': 1.8930680029668003, 'uncond_correct_prob': 3.923235681677628e-08, 'uncond_correct_prob_per_token': 0.011353348080635964, 'uncond_correct_prob_per_char': 0.3563486495470566, 'uncond_total_prob': 4.818654964802548e-07, 'norm_correct_prob': 0.21998721028640736, 'norm_correct_prob_per_token': 0.25190948023440524, 'norm_correct_prob_per_char': 0.24578319358866188, 'primary_metric': 0.2138728323699422}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 20,000 |
small aux 2
|
5xC
| 6,553,600,000 | 3,851,418,599,424,000,000 |
{'predicted_index_raw': 1.760115606936416, 'predicted_index_per_token': 1.699421965317919, 'predicted_index_per_char': 1.5549132947976878, 'predicted_index_per_byte': 1.5549132947976878, 'predicted_index_uncond': 1.3439306358381502, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21098265895953758, 'acc_per_token': 0.2543352601156069, 'acc_per_char': 0.21965317919075145, 'acc_per_byte': 0.21965317919075145, 'acc_uncond': 0.27167630057803466, 'no_answer': 0.0, 'sum_logits_corr': -34.015401803000124, 'logits_per_token_corr': -3.84137922289931, 'logits_per_char_corr': -0.7572497853032203, 'bits_per_byte_corr': 1.092480509971973, 'correct_prob': 7.850152082453821e-05, 'correct_prob_per_token': 0.037295089985762156, 'correct_prob_per_char': 0.48180186216287757, 'margin': -0.0003536402789543251, 'margin_per_token': -0.02405384083239159, 'margin_per_char': -0.07487025878386623, 'total_prob': 0.0006021620991086071, 'total_prob_per_token': 0.15103984676322024, 'total_prob_per_char': 1.9522008251212344, 'uncond_correct_prob': 1.2035049110586204e-07, 'uncond_correct_prob_per_token': 0.01261099130619991, 'uncond_correct_prob_per_char': 0.3659888248545629, 'uncond_total_prob': 1.3488556983495194e-06, 'norm_correct_prob': 0.22678857392464905, 'norm_correct_prob_per_token': 0.2545808795483821, 'norm_correct_prob_per_char': 0.24673248193168223, 'primary_metric': 0.21098265895953758}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 20,000 |
small aux 3
|
5xC
| 6,553,600,000 | 3,851,418,599,424,000,000 |
{'predicted_index_raw': 1.8208092485549132, 'predicted_index_per_token': 1.907514450867052, 'predicted_index_per_char': 1.7109826589595376, 'predicted_index_per_byte': 1.7109826589595376, 'predicted_index_uncond': 1.3901734104046244, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23410404624277456, 'acc_per_token': 0.23121387283236994, 'acc_per_char': 0.2023121387283237, 'acc_per_byte': 0.2023121387283237, 'acc_uncond': 0.24277456647398843, 'no_answer': 0.0, 'sum_logits_corr': -34.382245932011244, 'logits_per_token_corr': -3.8847029217117517, 'logits_per_char_corr': -0.7606279545373249, 'bits_per_byte_corr': 1.0973541779733031, 'correct_prob': 4.313477111922469e-05, 'correct_prob_per_token': 0.03866265529581288, 'correct_prob_per_char': 0.48113631405673707, 'margin': -0.00020108215094942803, 'margin_per_token': -0.025953316568732458, 'margin_per_char': -0.07754394453939417, 'total_prob': 0.00031725447443039693, 'total_prob_per_token': 0.15422405722935092, 'total_prob_per_char': 1.9483121384216202, 'uncond_correct_prob': 9.776779128431715e-08, 'uncond_correct_prob_per_token': 0.012058513778277841, 'uncond_correct_prob_per_char': 0.36813618407576687, 'uncond_total_prob': 7.773617334938122e-07, 'norm_correct_prob': 0.23351040189642586, 'norm_correct_prob_per_token': 0.2520808990375154, 'norm_correct_prob_per_char': 0.2466598026135637, 'primary_metric': 0.23410404624277456}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 20,000 |
default
|
5xC
| 6,553,600,000 | 3,851,418,599,424,000,000 |
{'predicted_index_raw': 1.7514450867052023, 'predicted_index_per_token': 1.7023121387283238, 'predicted_index_per_char': 1.5289017341040463, 'predicted_index_per_byte': 1.4682080924855492, 'predicted_index_uncond': 1.2947976878612717, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21676300578034682, 'acc_per_token': 0.24855491329479767, 'acc_per_char': 0.1907514450867052, 'acc_per_byte': 0.2832369942196532, 'acc_uncond': 0.23699421965317918, 'no_answer': 0.0, 'sum_logits_corr': -34.701122261885274, 'logits_per_token_corr': -3.9474423640140124, 'logits_per_char_corr': -0.77589253877149, 'logits_per_byte_corr': 1.1193763179491805, 'correct_prob': 4.833747163056563e-05, 'correct_prob_per_token': 0.03445145230305853, 'correct_prob_per_char': 0.4729496852676392, 'margin': -0.0003785375472863224, 'margin_per_token': -0.025208116528470225, 'margin_per_char': -0.07889927456127983, 'total_prob': 0.0005178904554370433, 'total_prob_per_token': 0.14411953836180313, 'total_prob_per_char': 1.9235544628650703, 'uncond_correct_prob': 8.10151416957174e-08, 'uncond_correct_prob_per_token': 0.012021755638872611, 'uncond_correct_prob_per_char': 0.3638412902410921, 'uncond_total_prob': 7.478604698143196e-07, 'norm_correct_prob': 0.22021051012406478, 'norm_correct_prob_per_token': 0.25262999413270837, 'norm_correct_prob_per_char': 0.24577230860177307, 'primary_metric': 0.21676300578034682}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 21,250 |
small aux 2
|
5xC
| 6,963,200,000 | 4,092,132,261,888,000,000 |
{'predicted_index_raw': 1.7803468208092486, 'predicted_index_per_token': 1.6445086705202312, 'predicted_index_per_char': 1.5867052023121386, 'predicted_index_per_byte': 1.5867052023121386, 'predicted_index_uncond': 1.3641618497109826, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23699421965317918, 'acc_per_token': 0.26011560693641617, 'acc_per_char': 0.21965317919075145, 'acc_per_byte': 0.21965317919075145, 'acc_uncond': 0.2514450867052023, 'no_answer': 0.0, 'sum_logits_corr': -34.23271054752989, 'logits_per_token_corr': -3.870310000236531, 'logits_per_char_corr': -0.7627838108120464, 'bits_per_byte_corr': 1.1004644211297152, 'correct_prob': 7.67879787343247e-05, 'correct_prob_per_token': 0.037207427010015615, 'correct_prob_per_char': 0.47975809765074023, 'margin': -0.0002525671672244021, 'margin_per_token': -0.02421628133840397, 'margin_per_char': -0.07809704812826168, 'total_prob': 0.0004628861408319451, 'total_prob_per_token': 0.15123274017145416, 'total_prob_per_char': 1.951085520600363, 'uncond_correct_prob': 4.2475017448846634e-08, 'uncond_correct_prob_per_token': 0.011736000082671942, 'uncond_correct_prob_per_char': 0.3575113071942187, 'uncond_total_prob': 5.83929266683692e-07, 'norm_correct_prob': 0.23486293684802026, 'norm_correct_prob_per_token': 0.25251971283354513, 'norm_correct_prob_per_char': 0.24576484325201917, 'primary_metric': 0.23699421965317918}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 21,250 |
small aux 3
|
5xC
| 6,963,200,000 | 4,092,132,261,888,000,000 |
{'predicted_index_raw': 1.8092485549132948, 'predicted_index_per_token': 1.8988439306358382, 'predicted_index_per_char': 1.7572254335260116, 'predicted_index_per_byte': 1.7572254335260116, 'predicted_index_uncond': 1.3988439306358382, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23121387283236994, 'acc_per_token': 0.24566473988439305, 'acc_per_char': 0.1936416184971098, 'acc_per_byte': 0.1936416184971098, 'acc_uncond': 0.25722543352601157, 'no_answer': 0.0, 'sum_logits_corr': -34.43791994331889, 'logits_per_token_corr': -3.8666166844086742, 'logits_per_char_corr': -0.7601607884411593, 'bits_per_byte_corr': 1.0966801997630928, 'correct_prob': 9.699688811471961e-05, 'correct_prob_per_token': 0.03838156181891767, 'correct_prob_per_char': 0.48144323249404813, 'margin': -0.00037545892329269296, 'margin_per_token': -0.028864838432592854, 'margin_per_char': -0.08016741917380939, 'total_prob': 0.0006305509649174258, 'total_prob_per_token': 0.15705133792236942, 'total_prob_per_char': 1.9540323193406677, 'uncond_correct_prob': 5.428009180086629e-08, 'uncond_correct_prob_per_token': 0.011057708792466845, 'uncond_correct_prob_per_char': 0.3583351305438377, 'uncond_total_prob': 5.090732191490512e-07, 'norm_correct_prob': 0.23488930543214578, 'norm_correct_prob_per_token': 0.25053393778164934, 'norm_correct_prob_per_char': 0.24620110081070748, 'primary_metric': 0.23121387283236994}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 21,250 |
default
|
5xC
| 6,963,200,000 | 4,092,132,261,888,000,000 |
{'predicted_index_raw': 1.777456647398844, 'predicted_index_per_token': 1.7254335260115607, 'predicted_index_per_char': 1.6502890173410405, 'predicted_index_per_byte': 1.3988439306358382, 'predicted_index_uncond': 1.2861271676300579, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.2745664739884393, 'acc_per_char': 0.22832369942196531, 'acc_per_byte': 0.2745664739884393, 'acc_uncond': 0.24855491329479767, 'no_answer': 0.0, 'sum_logits_corr': -34.17411714068727, 'logits_per_token_corr': -3.8681212637816875, 'logits_per_char_corr': -0.7596257718465437, 'logits_per_byte_corr': 1.095908333975247, 'correct_prob': 8.110925399066165e-05, 'correct_prob_per_token': 0.0380579497765898, 'correct_prob_per_char': 0.4808297469553985, 'margin': -0.00017916983493206797, 'margin_per_token': -0.02337472246814667, 'margin_per_char': -0.07417183435104174, 'total_prob': 0.0003759596118065586, 'total_prob_per_token': 0.1513823344662088, 'total_prob_per_char': 1.9456774369160945, 'uncond_correct_prob': 7.91469897951521e-08, 'uncond_correct_prob_per_token': 0.011602071831067748, 'uncond_correct_prob_per_char': 0.3557896277045141, 'uncond_total_prob': 6.357057949283831e-07, 'norm_correct_prob': 0.22493562855425597, 'norm_correct_prob_per_token': 0.25723147726308826, 'norm_correct_prob_per_char': 0.24685484015712755, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 22,500 |
small aux 2
|
5xC
| 7,372,800,000 | 4,332,845,924,352,000,000 |
{'predicted_index_raw': 1.760115606936416, 'predicted_index_per_token': 1.6676300578034682, 'predicted_index_per_char': 1.5751445086705202, 'predicted_index_per_byte': 1.5751445086705202, 'predicted_index_uncond': 1.3728323699421965, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23410404624277456, 'acc_per_token': 0.2398843930635838, 'acc_per_char': 0.20809248554913296, 'acc_per_byte': 0.20809248554913296, 'acc_uncond': 0.25722543352601157, 'no_answer': 0.0, 'sum_logits_corr': -33.70398494411755, 'logits_per_token_corr': -3.794614011214309, 'logits_per_char_corr': -0.7466080021680811, 'bits_per_byte_corr': 1.077127662216681, 'correct_prob': 0.00012422384418283086, 'correct_prob_per_token': 0.04009612123247013, 'correct_prob_per_char': 0.48725106712573807, 'margin': -0.0006169933610265994, 'margin_per_token': -0.026118853347942648, 'margin_per_char': -0.07729161851376568, 'total_prob': 0.0009640459951588021, 'total_prob_per_token': 0.16242056288406953, 'total_prob_per_char': 1.9782876616751977, 'uncond_correct_prob': 5.9267933712358785e-08, 'uncond_correct_prob_per_token': 0.012487620340986142, 'uncond_correct_prob_per_char': 0.3644620342461845, 'uncond_total_prob': 6.722461323242105e-07, 'norm_correct_prob': 0.2378389836620933, 'norm_correct_prob_per_token': 0.2539807203087487, 'norm_correct_prob_per_char': 0.24606904401417076, 'primary_metric': 0.23410404624277456}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 22,500 |
small aux 3
|
5xC
| 7,372,800,000 | 4,332,845,924,352,000,000 |
{'predicted_index_raw': 1.76878612716763, 'predicted_index_per_token': 1.8381502890173411, 'predicted_index_per_char': 1.6878612716763006, 'predicted_index_per_byte': 1.6878612716763006, 'predicted_index_uncond': 1.3439306358381502, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2138728323699422, 'acc_per_token': 0.2398843930635838, 'acc_per_char': 0.19653179190751446, 'acc_per_byte': 0.19653179190751446, 'acc_uncond': 0.2543352601156069, 'no_answer': 0.0, 'sum_logits_corr': -34.19258905835235, 'logits_per_token_corr': -3.8395711164149655, 'logits_per_char_corr': -0.7553671868428248, 'bits_per_byte_corr': 1.0897644945091736, 'correct_prob': 9.29305856705231e-05, 'correct_prob_per_token': 0.03837087234133083, 'correct_prob_per_char': 0.4830590687693001, 'margin': -0.0007406609204127156, 'margin_per_token': -0.028059703922377933, 'margin_per_char': -0.08051121796844239, 'total_prob': 0.0010082293286925814, 'total_prob_per_token': 0.1582016759935419, 'total_prob_per_char': 1.9636249381121487, 'uncond_correct_prob': 6.347258863126956e-08, 'uncond_correct_prob_per_token': 0.011833242294641022, 'uncond_correct_prob_per_char': 0.36492205219859586, 'uncond_total_prob': 5.112389089500093e-07, 'norm_correct_prob': 0.22422576112374126, 'norm_correct_prob_per_token': 0.24918356257311644, 'norm_correct_prob_per_char': 0.2457528568383511, 'primary_metric': 0.2138728323699422}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 22,500 |
default
|
5xC
| 7,372,800,000 | 4,332,845,924,352,000,000 |
{'predicted_index_raw': 1.7919075144508672, 'predicted_index_per_token': 1.760115606936416, 'predicted_index_per_char': 1.7023121387283238, 'predicted_index_per_byte': 1.3786127167630058, 'predicted_index_uncond': 1.2976878612716762, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.2543352601156069, 'acc_per_char': 0.20809248554913296, 'acc_per_byte': 0.2658959537572254, 'acc_uncond': 0.27167630057803466, 'no_answer': 0.0, 'sum_logits_corr': -34.37826430453041, 'logits_per_token_corr': -3.933558575299534, 'logits_per_char_corr': -0.7689917082570966, 'logits_per_byte_corr': 1.109420523988043, 'correct_prob': 2.134698127745973e-05, 'correct_prob_per_token': 0.03714604864871953, 'correct_prob_per_char': 0.47652655245015674, 'margin': -0.00020762767633759926, 'margin_per_token': -0.023810721601450667, 'margin_per_char': -0.07522652506170241, 'total_prob': 0.00027031115648730447, 'total_prob_per_token': 0.14832829940718983, 'total_prob_per_char': 1.9267841262982355, 'uncond_correct_prob': 5.705235296512191e-08, 'uncond_correct_prob_per_token': 0.011645489916139697, 'uncond_correct_prob_per_char': 0.3569694689870668, 'uncond_total_prob': 5.300393689626044e-07, 'norm_correct_prob': 0.22058730149598887, 'norm_correct_prob_per_token': 0.2556909079695549, 'norm_correct_prob_per_char': 0.2472709792628791, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 23,750 |
small aux 2
|
5xC
| 7,782,400,000 | 4,573,559,586,816,000,000 |
{'predicted_index_raw': 1.745664739884393, 'predicted_index_per_token': 1.6473988439306357, 'predicted_index_per_char': 1.546242774566474, 'predicted_index_per_byte': 1.546242774566474, 'predicted_index_uncond': 1.3381502890173411, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23410404624277456, 'acc_per_token': 0.2630057803468208, 'acc_per_char': 0.22254335260115607, 'acc_per_byte': 0.22254335260115607, 'acc_uncond': 0.25722543352601157, 'no_answer': 0.0, 'sum_logits_corr': -33.370188728233295, 'logits_per_token_corr': -3.729931646765212, 'logits_per_char_corr': -0.7348947148292445, 'bits_per_byte_corr': 1.0602289606604218, 'correct_prob': 0.00018570186035741696, 'correct_prob_per_token': 0.041770735800853394, 'correct_prob_per_char': 0.4926191532684179, 'margin': -0.0005550185874919979, 'margin_per_token': -0.02548127191435845, 'margin_per_char': -0.07489869748500232, 'total_prob': 0.001068866383668162, 'total_prob_per_token': 0.16642060542412015, 'total_prob_per_char': 1.9927277610770322, 'uncond_correct_prob': 6.40248184418048e-08, 'uncond_correct_prob_per_token': 0.012946998086963323, 'uncond_correct_prob_per_char': 0.36579682105559885, 'uncond_total_prob': 7.938492531866569e-07, 'norm_correct_prob': 0.2398853239651531, 'norm_correct_prob_per_token': 0.25616182148535066, 'norm_correct_prob_per_char': 0.24696396880107566, 'primary_metric': 0.23410404624277456}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 23,750 |
small aux 3
|
5xC
| 7,782,400,000 | 4,573,559,586,816,000,000 |
{'predicted_index_raw': 1.7716763005780347, 'predicted_index_per_token': 1.823699421965318, 'predicted_index_per_char': 1.6098265895953756, 'predicted_index_per_byte': 1.6098265895953756, 'predicted_index_uncond': 1.3439306358381502, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21965317919075145, 'acc_per_token': 0.23699421965317918, 'acc_per_char': 0.21098265895953758, 'acc_per_byte': 0.21098265895953758, 'acc_uncond': 0.2514450867052023, 'no_answer': 0.0, 'sum_logits_corr': -34.0784821675692, 'logits_per_token_corr': -3.842132496058924, 'logits_per_char_corr': -0.7558407530760771, 'bits_per_byte_corr': 1.0904477061654192, 'correct_prob': 6.644676956625428e-05, 'correct_prob_per_token': 0.038478117562915304, 'correct_prob_per_char': 0.48309471011669863, 'margin': -0.00041979810879027717, 'margin_per_token': -0.02640686923478594, 'margin_per_char': -0.07868023517990107, 'total_prob': 0.0006024303423655949, 'total_prob_per_token': 0.1558646361743139, 'total_prob_per_char': 1.959038719076578, 'uncond_correct_prob': 5.834315941842364e-08, 'uncond_correct_prob_per_token': 0.011703578827724144, 'uncond_correct_prob_per_char': 0.3642381267416412, 'uncond_total_prob': 5.542149714942993e-07, 'norm_correct_prob': 0.22983128578442186, 'norm_correct_prob_per_token': 0.2517039862902748, 'norm_correct_prob_per_char': 0.24631986578839607, 'primary_metric': 0.21965317919075145}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 23,750 |
default
|
5xC
| 7,782,400,000 | 4,573,559,586,816,000,000 |
{'predicted_index_raw': 1.7861271676300579, 'predicted_index_per_token': 1.7427745664739884, 'predicted_index_per_char': 1.6329479768786128, 'predicted_index_per_byte': 1.3786127167630058, 'predicted_index_uncond': 1.300578034682081, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.26011560693641617, 'acc_per_char': 0.2254335260115607, 'acc_per_byte': 0.26878612716763006, 'acc_uncond': 0.2774566473988439, 'no_answer': 0.0, 'sum_logits_corr': -34.01854062080383, 'logits_per_token_corr': -3.8782054626232143, 'logits_per_char_corr': -0.7611247079352271, 'logits_per_byte_corr': 1.0980708416370017, 'correct_prob': 4.163149392489894e-05, 'correct_prob_per_token': 0.037723479657113326, 'correct_prob_per_char': 0.48023080156504844, 'margin': -0.00020394331197981852, 'margin_per_token': -0.024974597071829353, 'margin_per_char': -0.07566166414380894, 'total_prob': 0.00031656049130695694, 'total_prob_per_token': 0.15206617861504682, 'total_prob_per_char': 1.9432946183407949, 'uncond_correct_prob': 6.677073606838062e-08, 'uncond_correct_prob_per_token': 0.011589337950050942, 'uncond_correct_prob_per_char': 0.35852367659118606, 'uncond_total_prob': 4.885038430009156e-07, 'norm_correct_prob': 0.22880551266902566, 'norm_correct_prob_per_token': 0.2558892532667065, 'norm_correct_prob_per_char': 0.2469817219482921, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 25,000 |
small aux 2
|
5xC
| 8,192,000,000 | 4,814,273,249,280,000,000 |
{'predicted_index_raw': 1.7427745664739884, 'predicted_index_per_token': 1.739884393063584, 'predicted_index_per_char': 1.5895953757225434, 'predicted_index_per_byte': 1.5895953757225434, 'predicted_index_uncond': 1.3381502890173411, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23410404624277456, 'acc_per_token': 0.2514450867052023, 'acc_per_char': 0.2254335260115607, 'acc_per_byte': 0.2254335260115607, 'acc_uncond': 0.26011560693641617, 'no_answer': 0.0, 'sum_logits_corr': -33.55586449810535, 'logits_per_token_corr': -3.78911394460348, 'logits_per_char_corr': -0.7462755332714804, 'bits_per_byte_corr': 1.076648010988305, 'correct_prob': 9.108823470651957e-05, 'correct_prob_per_token': 0.0400928677617007, 'correct_prob_per_char': 0.4876771401127192, 'margin': -0.0003854542900244558, 'margin_per_token': -0.024806228392517123, 'margin_per_char': -0.07424609568846585, 'total_prob': 0.0006418942109857375, 'total_prob_per_token': 0.16043135922711987, 'total_prob_per_char': 1.9727418394076013, 'uncond_correct_prob': 6.478108502884434e-08, 'uncond_correct_prob_per_token': 0.013520545588186366, 'uncond_correct_prob_per_char': 0.37167804781631375, 'uncond_total_prob': 8.263709320216663e-07, 'norm_correct_prob': 0.23891732647933414, 'norm_correct_prob_per_token': 0.2575613614310299, 'norm_correct_prob_per_char': 0.24704949591874925, 'primary_metric': 0.23410404624277456}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 25,000 |
small aux 3
|
5xC
| 8,192,000,000 | 4,814,273,249,280,000,000 |
{'predicted_index_raw': 1.8034682080924855, 'predicted_index_per_token': 1.8323699421965318, 'predicted_index_per_char': 1.6358381502890174, 'predicted_index_per_byte': 1.6358381502890174, 'predicted_index_uncond': 1.3323699421965318, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.21676300578034682, 'acc_per_token': 0.2514450867052023, 'acc_per_char': 0.18208092485549132, 'acc_per_byte': 0.18208092485549132, 'acc_uncond': 0.24566473988439305, 'no_answer': 0.0, 'sum_logits_corr': -34.13200585966165, 'logits_per_token_corr': -3.8660297953810345, 'logits_per_char_corr': -0.7600620725014413, 'bits_per_byte_corr': 1.096537782766405, 'correct_prob': 8.499790527378886e-05, 'correct_prob_per_token': 0.0383462097167929, 'correct_prob_per_char': 0.48176640103527735, 'margin': -0.000267463058275152, 'margin_per_token': -0.02678351513175913, 'margin_per_char': -0.07941743940668108, 'total_prob': 0.000488898373946386, 'total_prob_per_token': 0.1568020372354882, 'total_prob_per_char': 1.959201091400211, 'uncond_correct_prob': 6.150232318924729e-08, 'uncond_correct_prob_per_token': 0.011949070313595316, 'uncond_correct_prob_per_char': 0.3654178869253884, 'uncond_total_prob': 5.641830834528844e-07, 'norm_correct_prob': 0.22320538396534448, 'norm_correct_prob_per_token': 0.24899211876359711, 'norm_correct_prob_per_char': 0.24550728002222227, 'primary_metric': 0.21676300578034682}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 25,000 |
default
|
5xC
| 8,192,000,000 | 4,814,273,249,280,000,000 |
{'predicted_index_raw': 1.754335260115607, 'predicted_index_per_token': 1.7109826589595376, 'predicted_index_per_char': 1.6011560693641618, 'predicted_index_per_byte': 1.416184971098266, 'predicted_index_uncond': 1.2745664739884393, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23699421965317918, 'acc_per_token': 0.25722543352601157, 'acc_per_char': 0.21676300578034682, 'acc_per_byte': 0.2658959537572254, 'acc_uncond': 0.2630057803468208, 'no_answer': 0.0, 'sum_logits_corr': -33.97566960588356, 'logits_per_token_corr': -3.88152009851122, 'logits_per_char_corr': -0.7622607386918641, 'logits_per_byte_corr': 1.0997097875759003, 'correct_prob': 3.7983645591979695e-05, 'correct_prob_per_token': 0.03715810057103422, 'correct_prob_per_char': 0.47944988973995534, 'margin': -0.00021721091867934458, 'margin_per_token': -0.024149778368622015, 'margin_per_char': -0.07466116867428886, 'total_prob': 0.00032247992864874197, 'total_prob_per_token': 0.15014324466032095, 'total_prob_per_char': 1.9406807886406818, 'uncond_correct_prob': 1.0367606034719097e-07, 'uncond_correct_prob_per_token': 0.012977733372789802, 'uncond_correct_prob_per_char': 0.36695819038310246, 'uncond_total_prob': 8.262074391558455e-07, 'norm_correct_prob': 0.22496579407374184, 'norm_correct_prob_per_token': 0.2551889551358888, 'norm_correct_prob_per_char': 0.24695077985892763, 'primary_metric': 0.23699421965317918}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 27,500 |
small aux 2
|
5xC
| 9,011,200,000 | 5,295,700,574,208,000,000 |
{'predicted_index_raw': 1.7369942196531791, 'predicted_index_per_token': 1.6560693641618498, 'predicted_index_per_char': 1.5549132947976878, 'predicted_index_per_byte': 1.5549132947976878, 'predicted_index_uncond': 1.3150289017341041, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2398843930635838, 'acc_per_token': 0.2630057803468208, 'acc_per_char': 0.2254335260115607, 'acc_per_byte': 0.2254335260115607, 'acc_uncond': 0.25722543352601157, 'no_answer': 0.0, 'sum_logits_corr': -33.081951842831735, 'logits_per_token_corr': -3.7147551697071832, 'logits_per_char_corr': -0.7314459218932948, 'bits_per_byte_corr': 1.0552534041946706, 'correct_prob': 0.00011908941063226286, 'correct_prob_per_token': 0.04283173027596152, 'correct_prob_per_char': 0.4944620437573158, 'margin': -0.00045507220729648634, 'margin_per_token': -0.024887462477582785, 'margin_per_char': -0.07314822598865055, 'total_prob': 0.0007837800828900161, 'total_prob_per_token': 0.16873749371482838, 'total_prob_per_char': 1.996117021176188, 'uncond_correct_prob': 1.1295483953283727e-07, 'uncond_correct_prob_per_token': 0.01418355176552624, 'uncond_correct_prob_per_char': 0.37437881218403357, 'uncond_total_prob': 1.801615815134321e-06, 'norm_correct_prob': 0.24493797556427382, 'norm_correct_prob_per_token': 0.25902046324785055, 'norm_correct_prob_per_char': 0.24753645394109847, 'primary_metric': 0.2398843930635838}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 27,500 |
small aux 3
|
5xC
| 9,011,200,000 | 5,295,700,574,208,000,000 |
{'predicted_index_raw': 1.7745664739884393, 'predicted_index_per_token': 1.7745664739884393, 'predicted_index_per_char': 1.546242774566474, 'predicted_index_per_byte': 1.546242774566474, 'predicted_index_uncond': 1.3208092485549132, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.2254335260115607, 'acc_per_token': 0.24277456647398843, 'acc_per_char': 0.19653179190751446, 'acc_per_byte': 0.19653179190751446, 'acc_uncond': 0.24855491329479767, 'no_answer': 0.0, 'sum_logits_corr': -33.73677499583691, 'logits_per_token_corr': -3.809410934027737, 'logits_per_char_corr': -0.7483586020039847, 'bits_per_byte_corr': 1.0796532439185218, 'correct_prob': 8.972862699755127e-05, 'correct_prob_per_token': 0.0397352750940625, 'correct_prob_per_char': 0.4864876328619482, 'margin': -0.0003929638350804745, 'margin_per_token': -0.02708346998245124, 'margin_per_char': -0.07804978708433248, 'total_prob': 0.0006198511370815745, 'total_prob_per_token': 0.1609799583805506, 'total_prob_per_char': 1.9723263909162423, 'uncond_correct_prob': 7.93247445781454e-08, 'uncond_correct_prob_per_token': 0.012663343913807022, 'uncond_correct_prob_per_char': 0.37088421236572877, 'uncond_total_prob': 7.086366657026562e-07, 'norm_correct_prob': 0.2280031421250389, 'norm_correct_prob_per_token': 0.25202565793195825, 'norm_correct_prob_per_char': 0.24643361661296898, 'primary_metric': 0.2254335260115607}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 27,500 |
default
|
5xC
| 9,011,200,000 | 5,295,700,574,208,000,000 |
{'predicted_index_raw': 1.7803468208092486, 'predicted_index_per_token': 1.7341040462427746, 'predicted_index_per_char': 1.6184971098265897, 'predicted_index_per_byte': 1.4190751445086704, 'predicted_index_uncond': 1.2861271676300579, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.22832369942196531, 'acc_per_token': 0.2543352601156069, 'acc_per_char': 0.21098265895953758, 'acc_per_byte': 0.26011560693641617, 'acc_uncond': 0.2543352601156069, 'no_answer': 0.0, 'sum_logits_corr': -33.69865976868337, 'logits_per_token_corr': -3.8352633227490345, 'logits_per_char_corr': -0.7519486173175379, 'logits_per_byte_corr': 1.0848325412081041, 'correct_prob': 4.8586249055369046e-05, 'correct_prob_per_token': 0.03897045831557201, 'correct_prob_per_char': 0.4840642398958331, 'margin': -0.00024762859504294803, 'margin_per_token': -0.025095728107670107, 'margin_per_char': -0.07424040259794852, 'total_prob': 0.00038621739610948474, 'total_prob_per_token': 0.15702085752695616, 'total_prob_per_char': 1.9597714795354209, 'uncond_correct_prob': 1.1677021165975214e-07, 'uncond_correct_prob_per_token': 0.013112757662445065, 'uncond_correct_prob_per_char': 0.3681560202046783, 'uncond_total_prob': 8.190769683423936e-07, 'norm_correct_prob': 0.22847688075049483, 'norm_correct_prob_per_token': 0.25477622520114074, 'norm_correct_prob_per_char': 0.24687582715853235, 'primary_metric': 0.22832369942196531}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 28,750 |
small aux 2
|
5xC
| 9,420,800,000 | 5,536,414,236,672,000,000 |
{'predicted_index_raw': 1.745664739884393, 'predicted_index_per_token': 1.6329479768786128, 'predicted_index_per_char': 1.5635838150289016, 'predicted_index_per_byte': 1.5635838150289016, 'predicted_index_uncond': 1.3381502890173411, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.24277456647398843, 'acc_per_token': 0.25722543352601157, 'acc_per_char': 0.22254335260115607, 'acc_per_byte': 0.22254335260115607, 'acc_uncond': 0.26011560693641617, 'no_answer': 0.0, 'sum_logits_corr': -33.040118917564435, 'logits_per_token_corr': -3.7081675702987753, 'logits_per_char_corr': -0.7307581838547923, 'bits_per_byte_corr': 1.0542612079370914, 'correct_prob': 0.0001289673540049721, 'correct_prob_per_token': 0.042759503538062765, 'correct_prob_per_char': 0.49473847715927244, 'margin': -0.0005866451728015564, 'margin_per_token': -0.02583127598729332, 'margin_per_char': -0.07449712266167313, 'total_prob': 0.0009467760990869259, 'total_prob_per_token': 0.17049971598713845, 'total_prob_per_char': 2.0012150034645595, 'uncond_correct_prob': 8.26971703347254e-08, 'uncond_correct_prob_per_token': 0.01380845901789067, 'uncond_correct_prob_per_char': 0.3724321560257989, 'uncond_total_prob': 1.5808914651298809e-06, 'norm_correct_prob': 0.24218688524202087, 'norm_correct_prob_per_token': 0.2572529035280044, 'norm_correct_prob_per_char': 0.2470681332634633, 'primary_metric': 0.24277456647398843}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 28,750 |
small aux 3
|
5xC
| 9,420,800,000 | 5,536,414,236,672,000,000 |
{'predicted_index_raw': 1.777456647398844, 'predicted_index_per_token': 1.760115606936416, 'predicted_index_per_char': 1.569364161849711, 'predicted_index_per_byte': 1.569364161849711, 'predicted_index_uncond': 1.323699421965318, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23121387283236994, 'acc_per_token': 0.24566473988439305, 'acc_per_char': 0.20520231213872833, 'acc_per_byte': 0.20520231213872833, 'acc_uncond': 0.25722543352601157, 'no_answer': 0.0, 'sum_logits_corr': -33.618062748385306, 'logits_per_token_corr': -3.7918474618507223, 'logits_per_char_corr': -0.7449628459754704, 'bits_per_byte_corr': 1.074754203536112, 'correct_prob': 8.192093614890994e-05, 'correct_prob_per_token': 0.04015936557277183, 'correct_prob_per_char': 0.487919529788383, 'margin': -0.0004045338093793195, 'margin_per_token': -0.02673539997844883, 'margin_per_char': -0.07750925420472454, 'total_prob': 0.000625768457948604, 'total_prob_per_token': 0.16217476313600762, 'total_prob_per_char': 1.9779255833405718, 'uncond_correct_prob': 9.234376863532815e-08, 'uncond_correct_prob_per_token': 0.013130697105728914, 'uncond_correct_prob_per_char': 0.3733535451552472, 'uncond_total_prob': 8.162146299007932e-07, 'norm_correct_prob': 0.22801153831738413, 'norm_correct_prob_per_token': 0.25222961495587753, 'norm_correct_prob_per_char': 0.2464617742318126, 'primary_metric': 0.23121387283236994}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 28,750 |
default
|
5xC
| 9,420,800,000 | 5,536,414,236,672,000,000 |
{'predicted_index_raw': 1.777456647398844, 'predicted_index_per_token': 1.7138728323699421, 'predicted_index_per_char': 1.5809248554913296, 'predicted_index_per_byte': 1.430635838150289, 'predicted_index_uncond': 1.3034682080924855, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23121387283236994, 'acc_per_token': 0.24566473988439305, 'acc_per_char': 0.2138728323699422, 'acc_per_byte': 0.2658959537572254, 'acc_uncond': 0.2745664739884393, 'no_answer': 0.0, 'sum_logits_corr': -33.63545357285207, 'logits_per_token_corr': -3.8388036404768973, 'logits_per_char_corr': -0.7529677456836763, 'logits_per_byte_corr': 1.0863028326479622, 'correct_prob': 4.6334887927217755e-05, 'correct_prob_per_token': 0.03898559749598249, 'correct_prob_per_char': 0.48377927294343714, 'margin': -0.00022026400580552237, 'margin_per_token': -0.02530233887734046, 'margin_per_char': -0.07450942973954412, 'total_prob': 0.00034909875697232217, 'total_prob_per_token': 0.15732051880686931, 'total_prob_per_char': 1.9585277488627508, 'uncond_correct_prob': 9.881356373206887e-08, 'uncond_correct_prob_per_token': 0.012993571311038088, 'uncond_correct_prob_per_char': 0.3665972292948974, 'uncond_total_prob': 7.461795391633822e-07, 'norm_correct_prob': 0.2300657607356479, 'norm_correct_prob_per_token': 0.25525801564685435, 'norm_correct_prob_per_char': 0.2469203430604396, 'primary_metric': 0.23121387283236994}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 29,901 |
small aux 2
|
5xC
| 9,797,959,680 | 5,758,063,377,068,851,000 |
{'predicted_index_raw': 1.7630057803468209, 'predicted_index_per_token': 1.6502890173410405, 'predicted_index_per_char': 1.5433526011560694, 'predicted_index_per_byte': 1.5433526011560694, 'predicted_index_uncond': 1.3410404624277457, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.24277456647398843, 'acc_per_token': 0.27167630057803466, 'acc_per_char': 0.22832369942196531, 'acc_per_byte': 0.22832369942196531, 'acc_uncond': 0.26011560693641617, 'no_answer': 0.0, 'sum_logits_corr': -32.9977566727324, 'logits_per_token_corr': -3.7039830547984525, 'logits_per_char_corr': -0.7299634564692963, 'bits_per_byte_corr': 1.0531146586791769, 'correct_prob': 0.0001312091429664023, 'correct_prob_per_token': 0.04273909100103787, 'correct_prob_per_char': 0.49495655515229237, 'margin': -0.0005842988267023341, 'margin_per_token': -0.02546148368857955, 'margin_per_char': -0.07394616660885457, 'total_prob': 0.0009498938588571057, 'total_prob_per_token': 0.16980869671036358, 'total_prob_per_char': 2.0006607337922087, 'uncond_correct_prob': 7.533216877689179e-08, 'uncond_correct_prob_per_token': 0.013591015001934648, 'uncond_correct_prob_per_char': 0.3710382421818976, 'uncond_total_prob': 1.1972440064789837e-06, 'norm_correct_prob': 0.24346383248605624, 'norm_correct_prob_per_token': 0.25802614236143623, 'norm_correct_prob_per_char': 0.24728179624003577, 'primary_metric': 0.24277456647398843}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 29,901 |
small aux 3
|
5xC
| 9,797,959,680 | 5,758,063,377,068,851,000 |
{'predicted_index_raw': 1.7803468208092486, 'predicted_index_per_token': 1.745664739884393, 'predicted_index_per_char': 1.5635838150289016, 'predicted_index_per_byte': 1.5635838150289016, 'predicted_index_uncond': 1.329479768786127, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23410404624277456, 'acc_per_token': 0.2514450867052023, 'acc_per_char': 0.1936416184971098, 'acc_per_byte': 0.1936416184971098, 'acc_uncond': 0.2630057803468208, 'no_answer': 0.0, 'sum_logits_corr': -33.51787652583481, 'logits_per_token_corr': -3.7756798641615643, 'logits_per_char_corr': -0.7420530985465975, 'bits_per_byte_corr': 1.0705563253502346, 'correct_prob': 8.88486088551572e-05, 'correct_prob_per_token': 0.04041660792946479, 'correct_prob_per_char': 0.4891505960598214, 'margin': -0.00036412237507164966, 'margin_per_token': -0.027140262306582136, 'margin_per_char': -0.0778109872727916, 'total_prob': 0.0006000719455598323, 'total_prob_per_token': 0.1636965583216187, 'total_prob_per_char': 1.9830533750896309, 'uncond_correct_prob': 7.628622110498699e-08, 'uncond_correct_prob_per_token': 0.012730523047580993, 'uncond_correct_prob_per_char': 0.3700876978776558, 'uncond_total_prob': 6.730352733598383e-07, 'norm_correct_prob': 0.22674159074841296, 'norm_correct_prob_per_token': 0.2518436894875307, 'norm_correct_prob_per_char': 0.24645243913973827, 'primary_metric': 0.23410404624277456}
|
90M
|
FineWeb-Pro
|
mmlu_moral_disputes
| 29,901 |
default
|
5xC
| 9,797,959,680 | 5,758,063,377,068,851,000 |
{'predicted_index_raw': 1.7716763005780347, 'predicted_index_per_token': 1.7052023121387283, 'predicted_index_per_char': 1.6098265895953756, 'predicted_index_per_byte': 1.4104046242774566, 'predicted_index_uncond': 1.2630057803468209, 'correct_choice': 1.4710982658959537, 'acc_raw': 0.23699421965317918, 'acc_per_token': 0.24566473988439305, 'acc_per_char': 0.20520231213872833, 'acc_per_byte': 0.2745664739884393, 'acc_uncond': 0.2745664739884393, 'no_answer': 0.0, 'sum_logits_corr': -33.55856073522843, 'logits_per_token_corr': -3.8225956556482052, 'logits_per_char_corr': -0.7499744786060178, 'logits_per_byte_corr': 1.0819844610789653, 'correct_prob': 4.883412143394837e-05, 'correct_prob_per_token': 0.039443288767388555, 'correct_prob_per_char': 0.48521854289020844, 'margin': -0.0002302572825196133, 'margin_per_token': -0.025812027204591508, 'margin_per_char': -0.07535411429664547, 'total_prob': 0.0003653547439989859, 'total_prob_per_token': 0.15958143476118994, 'total_prob_per_char': 1.965695533534674, 'uncond_correct_prob': 1.0270760277449958e-07, 'uncond_correct_prob_per_token': 0.013014491086827359, 'uncond_correct_prob_per_char': 0.366649064943023, 'uncond_total_prob': 7.317374046988386e-07, 'norm_correct_prob': 0.22891775668582176, 'norm_correct_prob_per_token': 0.2546548387063855, 'norm_correct_prob_per_char': 0.24673231074930663, 'primary_metric': 0.23699421965317918}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 0 |
small aux 2
|
5xC
| 0 | 0 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 0.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.23798882681564246, 'no_answer': 0.0, 'sum_logits_corr': -43.51998098575869, 'logits_per_token_corr': -10.747847213745116, 'logits_per_char_corr': -2.5261230041429372, 'bits_per_byte_corr': 3.6444251307551636, 'correct_prob': 6.8899304035117534e-15, 'correct_prob_per_token': 2.2054125981129872e-05, 'correct_prob_per_char': 0.08023048005354394, 'margin': -1.5287806207879346e-14, 'margin_per_token': -6.515241295633252e-06, 'margin_per_char': -0.008060152226702519, 'total_prob': 2.906790600728042e-14, 'total_prob_per_token': 8.910935968621762e-05, 'total_prob_per_char': 0.3220897151807853, 'uncond_correct_prob': 1.9925678023729882e-15, 'uncond_correct_prob_per_token': 1.97330012106676e-05, 'uncond_correct_prob_per_char': 0.07845111734947696, 'uncond_total_prob': 8.372518180592928e-15, 'norm_correct_prob': 0.23798891584355822, 'norm_correct_prob_per_token': 0.24751886607312348, 'norm_correct_prob_per_char': 0.24909521314828767, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 0 |
small aux 3
|
5xC
| 0 | 0 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 3.0, 'predicted_index_per_char': 1.0, 'predicted_index_per_byte': 1.0, 'predicted_index_uncond': 0.6089385474860335, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.27262569832402234, 'acc_per_char': 0.2424581005586592, 'acc_per_byte': 0.2424581005586592, 'acc_uncond': 0.2324022346368715, 'no_answer': 0.0, 'sum_logits_corr': -43.85612644216868, 'logits_per_token_corr': -10.895434158296778, 'logits_per_char_corr': -2.55983399657907, 'bits_per_byte_corr': 3.693059812366254, 'correct_prob': 7.497993495323644e-16, 'correct_prob_per_token': 1.8758127165600775e-05, 'correct_prob_per_char': 0.07732357157138828, 'margin': -1.6509985113940224e-15, 'margin_per_token': -2.779316154901512e-06, 'margin_per_char': -0.001096967185821151, 'total_prob': 3.1507576793444526e-15, 'total_prob_per_token': 7.458489315309901e-05, 'total_prob_per_char': 0.3092539052873513, 'uncond_correct_prob': 2.235693683734287e-16, 'uncond_correct_prob_per_token': 1.4806702209585267e-05, 'uncond_correct_prob_per_char': 0.07288434737373901, 'uncond_total_prob': 9.394086553380118e-16, 'norm_correct_prob': 0.2379892947014349, 'norm_correct_prob_per_token': 0.2515008069263571, 'norm_correct_prob_per_char': 0.2500326368178394, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 0 |
default
|
5xC
| 0 | 0 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 2.0972067039106146, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 3.0, 'predicted_index_uncond': 3.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.2424581005586592, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.27262569832402234, 'acc_uncond': 0.27262569832402234, 'no_answer': 0.0, 'sum_logits_corr': -44.62595889555009, 'logits_per_token_corr': -11.06654662183765, 'logits_per_char_corr': -2.600338807291033, 'logits_per_byte_corr': 3.7514959019125897, 'correct_prob': 7.713911737832807e-16, 'correct_prob_per_token': 1.5642096188799354e-05, 'correct_prob_per_char': 0.07426705569331746, 'margin': -1.7034195454055866e-15, 'margin_per_token': -6.863901533481361e-07, 'margin_per_char': -0.00206901730873933, 'total_prob': 3.2462778611622015e-15, 'total_prob_per_token': 6.247366251464457e-05, 'total_prob_per_char': 0.2973497014842234, 'uncond_correct_prob': 3.0797513681839276e-15, 'uncond_correct_prob_per_token': 1.5672690490150613e-05, 'uncond_correct_prob_per_char': 0.07387659768755109, 'uncond_total_prob': 1.2940736894958522e-14, 'norm_correct_prob': 0.23798907520215368, 'norm_correct_prob_per_token': 0.25037769886179373, 'norm_correct_prob_per_char': 0.2497633457050085, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 1,250 |
small aux 2
|
5xC
| 409,600,000 | 240,713,662,464,000,000 |
{'predicted_index_raw': 1.4905027932960895, 'predicted_index_per_token': 2.700558659217877, 'predicted_index_per_char': 2.617877094972067, 'predicted_index_per_byte': 2.617877094972067, 'predicted_index_uncond': 3.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.26033519553072626, 'acc_per_char': 0.25139664804469275, 'acc_per_byte': 0.25139664804469275, 'acc_uncond': 0.27262569832402234, 'no_answer': 0.0, 'sum_logits_corr': -7.984460805647866, 'logits_per_token_corr': -2.0217810252436488, 'logits_per_char_corr': -0.47446737099230873, 'bits_per_byte_corr': 0.6845117231947195, 'correct_prob': 0.0006066707946856224, 'correct_prob_per_token': 0.14139928531387613, 'correct_prob_per_char': 0.6243782279346103, 'margin': -0.0005399052252687334, 'margin_per_token': -0.04880947827821937, 'margin_per_char': -0.050205584207427434, 'total_prob': 0.0024688872928639903, 'total_prob_per_token': 0.5590399800997417, 'total_prob_per_char': 2.490848826022159, 'uncond_correct_prob': 2.4645565058752076e-09, 'uncond_correct_prob_per_token': 0.0026453993873784363, 'uncond_correct_prob_per_char': 0.2441882808457978, 'uncond_total_prob': 1.0354295954994744e-08, 'norm_correct_prob': 0.24510823950067973, 'norm_correct_prob_per_token': 0.25302649384349507, 'norm_correct_prob_per_char': 0.2506735633082764, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 1,250 |
small aux 3
|
5xC
| 409,600,000 | 240,713,662,464,000,000 |
{'predicted_index_raw': 0.41675977653631285, 'predicted_index_per_token': 3.0, 'predicted_index_per_char': 2.9966480446927375, 'predicted_index_per_byte': 2.9966480446927375, 'predicted_index_uncond': 2.2458100558659218, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.24692737430167597, 'acc_per_token': 0.27262569832402234, 'acc_per_char': 0.27150837988826815, 'acc_per_byte': 0.27150837988826815, 'acc_uncond': 0.2324022346368715, 'no_answer': 0.0, 'sum_logits_corr': -6.208080083431478, 'logits_per_token_corr': -1.5755686792714634, 'logits_per_char_corr': -0.36969553558210144, 'bits_per_byte_corr': 0.5333579158234701, 'correct_prob': 0.0024826499338370034, 'correct_prob_per_token': 0.21330003593111238, 'correct_prob_per_char': 0.6920383320756835, 'margin': -0.0020435449073540573, 'margin_per_token': -0.05092300530628516, 'margin_per_char': -0.0362889591616153, 'total_prob': 0.010319814850017853, 'total_prob_per_token': 0.8476830773214202, 'total_prob_per_char': 2.763833406444113, 'uncond_correct_prob': 4.817291549462198e-10, 'uncond_correct_prob_per_token': 0.0025542153394937086, 'uncond_correct_prob_per_char': 0.23664056171981837, 'uncond_total_prob': 2.023139363680199e-09, 'norm_correct_prob': 0.24419763909164777, 'norm_correct_prob_per_token': 0.25185143804263227, 'norm_correct_prob_per_char': 0.25040466559729113, 'primary_metric': 0.24692737430167597}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 1,250 |
default
|
5xC
| 409,600,000 | 240,713,662,464,000,000 |
{'predicted_index_raw': 2.036871508379888, 'predicted_index_per_token': 3.0, 'predicted_index_per_char': 3.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.24804469273743016, 'acc_per_token': 0.27262569832402234, 'acc_per_char': 0.27262569832402234, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -6.882875072223515, 'logits_per_token_corr': -1.7797863658358042, 'logits_per_char_corr': -0.4170684375382888, 'logits_per_byte_corr': 0.60170256654823, 'correct_prob': 0.001190677614816911, 'correct_prob_per_token': 0.18425242871722672, 'correct_prob_per_char': 0.6622472442217693, 'margin': -0.0006859019441714873, 'margin_per_token': -0.06162073556215722, 'margin_per_char': -0.053989060979678954, 'total_prob': 0.004715727382726092, 'total_prob_per_token': 0.7247858904742361, 'total_prob_per_char': 2.638740980265192, 'uncond_correct_prob': 3.3937576226524136e-09, 'uncond_correct_prob_per_token': 0.0023709186688148673, 'uncond_correct_prob_per_char': 0.23432664363348027, 'uncond_total_prob': 1.4259448663686169e-08, 'norm_correct_prob': 0.25200984262032, 'norm_correct_prob_per_token': 0.25411025832948625, 'norm_correct_prob_per_char': 0.2509626281498025, 'primary_metric': 0.24804469273743016}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 2,500 |
small aux 2
|
5xC
| 819,200,000 | 481,427,324,928,000,000 |
{'predicted_index_raw': 1.5106145251396648, 'predicted_index_per_token': 2.0670391061452515, 'predicted_index_per_char': 2.0569832402234636, 'predicted_index_per_byte': 2.0569832402234636, 'predicted_index_uncond': 1.08268156424581, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.26033519553072626, 'acc_per_token': 0.24804469273743016, 'acc_per_char': 0.24804469273743016, 'acc_per_byte': 0.24804469273743016, 'acc_uncond': 0.24804469273743016, 'no_answer': 0.0, 'sum_logits_corr': -4.851321365846602, 'logits_per_token_corr': -1.2123942070167157, 'logits_per_char_corr': -0.2848002671411649, 'bits_per_byte_corr': 0.4108799330487058, 'correct_prob': 0.01976445407218653, 'correct_prob_per_token': 0.3205161274108902, 'correct_prob_per_char': 0.7555853579557987, 'margin': -0.018390947936855476, 'margin_per_token': -0.1015399094651423, 'margin_per_char': -0.05946802617603057, 'total_prob': 0.07959216106131492, 'total_prob_per_token': 1.2731732427363462, 'total_prob_per_char': 3.0172816571695753, 'uncond_correct_prob': 2.977919028562854e-10, 'uncond_correct_prob_per_token': 0.001896962953827696, 'uncond_correct_prob_per_char': 0.21820202181899068, 'uncond_total_prob': 1.2504981044976308e-09, 'norm_correct_prob': 0.2464145299937782, 'norm_correct_prob_per_token': 0.25167570643713366, 'norm_correct_prob_per_char': 0.25041714865531023, 'primary_metric': 0.26033519553072626}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 2,500 |
small aux 3
|
5xC
| 819,200,000 | 481,427,324,928,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.21564245810055865, 'predicted_index_per_char': 0.11620111731843576, 'predicted_index_per_byte': 0.11620111731843576, 'predicted_index_uncond': 2.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.24022346368715083, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.24692737430167597, 'no_answer': 0.0, 'sum_logits_corr': -5.6327202770297085, 'logits_per_token_corr': -1.3804144872189459, 'logits_per_char_corr': -0.32460539864214977, 'bits_per_byte_corr': 0.468306598867151, 'correct_prob': 0.00805042631489102, 'correct_prob_per_token': 0.25321525940618494, 'correct_prob_per_char': 0.723145252302897, 'margin': -0.012039772916268348, 'margin_per_token': -0.03235124421763227, 'margin_per_char': -0.024121771032233452, 'total_prob': 0.033579249319597285, 'total_prob_per_token': 1.0169335533779476, 'total_prob_per_char': 2.895682655811134, 'uncond_correct_prob': 2.8601622178862774e-09, 'uncond_correct_prob_per_token': 0.0026677912833648503, 'uncond_correct_prob_per_char': 0.2436633936603138, 'uncond_total_prob': 1.2017168313924391e-08, 'norm_correct_prob': 0.240412319152521, 'norm_correct_prob_per_token': 0.24902498154967898, 'norm_correct_prob_per_char': 0.24973341326214404, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 2,500 |
default
|
5xC
| 819,200,000 | 481,427,324,928,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 2.0, 'predicted_index_uncond': 1.6458100558659219, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.24692737430167597, 'acc_uncond': 0.264804469273743, 'no_answer': 0.0, 'sum_logits_corr': -5.537389528351789, 'logits_per_token_corr': -1.2987576387536812, 'logits_per_char_corr': -0.30636259964364027, 'logits_per_byte_corr': 0.44198780322004827, 'correct_prob': 0.06790467612785353, 'correct_prob_per_token': 0.3282736043904987, 'correct_prob_per_char': 0.7439984003706128, 'margin': -0.13638341522621214, 'margin_per_token': -0.24589535418362132, 'margin_per_char': -0.13108988011729591, 'total_prob': 0.28375945436362154, 'total_prob_per_token': 1.3363801511042928, 'total_prob_per_char': 2.987879098720501, 'uncond_correct_prob': 1.3313273378105211e-07, 'uncond_correct_prob_per_token': 0.004228995214793355, 'uncond_correct_prob_per_char': 0.2695804765623729, 'uncond_total_prob': 5.594055115234212e-07, 'norm_correct_prob': 0.2388991402114045, 'norm_correct_prob_per_token': 0.24565433567335837, 'norm_correct_prob_per_char': 0.24900961411543449, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 3,750 |
small aux 2
|
5xC
| 1,228,800,000 | 722,140,987,392,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.987709497206704, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.24581005586592178, 'no_answer': 0.0, 'sum_logits_corr': -7.871431203128239, 'logits_per_token_corr': -1.9023733412620074, 'logits_per_char_corr': -0.44782746377915356, 'bits_per_byte_corr': 0.6460784611685308, 'correct_prob': 0.005505793682668126, 'correct_prob_per_token': 0.16234793101657424, 'correct_prob_per_char': 0.6420872679792053, 'margin': -0.011276972474457846, 'margin_per_token': -0.08636517975486505, 'margin_per_char': -0.07819715327749409, 'total_prob': 0.022681339960526264, 'total_prob_per_token': 0.6535348270093144, 'total_prob_per_char': 2.572217664573628, 'uncond_correct_prob': 3.42803514273356e-10, 'uncond_correct_prob_per_token': 0.0020216331844174974, 'uncond_correct_prob_per_char': 0.21978635242392133, 'uncond_total_prob': 1.4399853929041733e-09, 'norm_correct_prob': 0.23859880861046537, 'norm_correct_prob_per_token': 0.24827606641603903, 'norm_correct_prob_per_char': 0.2496178645672285, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 3,750 |
small aux 3
|
5xC
| 1,228,800,000 | 722,140,987,392,000,000 |
{'predicted_index_raw': 0.0022346368715083797, 'predicted_index_per_token': 0.17430167597765364, 'predicted_index_per_char': 0.10949720670391061, 'predicted_index_per_byte': 0.10949720670391061, 'predicted_index_uncond': 2.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23910614525139665, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.2324022346368715, 'acc_per_byte': 0.2324022346368715, 'acc_uncond': 0.24692737430167597, 'no_answer': 0.0, 'sum_logits_corr': -6.021717243620803, 'logits_per_token_corr': -1.453536776785966, 'logits_per_char_corr': -0.342142561167173, 'bits_per_byte_corr': 0.49360737627328405, 'correct_prob': 0.012376966982689563, 'correct_prob_per_token': 0.24499501262117987, 'correct_prob_per_char': 0.7122048792758089, 'margin': -0.02392897000830053, 'margin_per_token': -0.0871844608030782, 'margin_per_char': -0.0598260641011396, 'total_prob': 0.05322815226544786, 'total_prob_per_token': 0.991309210322727, 'total_prob_per_char': 2.856323847347166, 'uncond_correct_prob': 1.4335118985324372e-09, 'uncond_correct_prob_per_token': 0.0024236012966126234, 'uncond_correct_prob_per_char': 0.2364044924604478, 'uncond_total_prob': 6.022837367811979e-09, 'norm_correct_prob': 0.2387533679911906, 'norm_correct_prob_per_token': 0.24722137031978678, 'norm_correct_prob_per_char': 0.2493438527389073, 'primary_metric': 0.23910614525139665}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 3,750 |
default
|
5xC
| 1,228,800,000 | 722,140,987,392,000,000 |
{'predicted_index_raw': 0.6357541899441341, 'predicted_index_per_token': 1.1977653631284917, 'predicted_index_per_char': 1.1385474860335196, 'predicted_index_per_byte': 2.0, 'predicted_index_uncond': 2.6581005586592177, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.24692737430167597, 'acc_per_token': 0.25251396648044694, 'acc_per_char': 0.2435754189944134, 'acc_per_byte': 0.24692737430167597, 'acc_uncond': 0.2636871508379888, 'no_answer': 0.0, 'sum_logits_corr': -3.913086677396764, 'logits_per_token_corr': -0.9798291004614235, 'logits_per_char_corr': -0.23017960542911797, 'logits_per_byte_corr': 0.3320789752666054, 'correct_prob': 0.04862161287039049, 'correct_prob_per_token': 0.4092088607544564, 'correct_prob_per_char': 0.7987294071607169, 'margin': -0.042978018834438275, 'margin_per_token': -0.12343505559738821, 'margin_per_char': -0.06295396210829092, 'total_prob': 0.19721926461782074, 'total_prob_per_token': 1.6289742849899156, 'total_prob_per_char': 3.1911089716142316, 'uncond_correct_prob': 1.2441286220034836e-08, 'uncond_correct_prob_per_token': 0.003733760148508792, 'uncond_correct_prob_per_char': 0.26225459440087956, 'uncond_total_prob': 5.227365184497418e-08, 'norm_correct_prob': 0.24676546502246857, 'norm_correct_prob_per_token': 0.2511474575153172, 'norm_correct_prob_per_char': 0.25029447265360477, 'primary_metric': 0.24692737430167597}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 6,250 |
small aux 2
|
5xC
| 2,048,000,000 | 1,203,568,312,320,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 2.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.24692737430167597, 'no_answer': 0.0, 'sum_logits_corr': -4.961497603714799, 'logits_per_token_corr': -1.181232100452125, 'logits_per_char_corr': -0.2783707586206229, 'bits_per_byte_corr': 0.4016041129907598, 'correct_prob': 0.049576296041344295, 'correct_prob_per_token': 0.33632090735896886, 'correct_prob_per_char': 0.7609484396218252, 'margin': -0.10116498854881886, 'margin_per_token': -0.18636892918164757, 'margin_per_char': -0.09589533941637642, 'total_prob': 0.20995467841458584, 'total_prob_per_token': 1.363051029411457, 'total_prob_per_char': 3.052135680941513, 'uncond_correct_prob': 7.198374795465783e-10, 'uncond_correct_prob_per_token': 0.0017741520580156819, 'uncond_correct_prob_per_char': 0.21786729398613483, 'uncond_total_prob': 3.02452282217037e-09, 'norm_correct_prob': 0.23804820986093436, 'norm_correct_prob_per_token': 0.24679959100157098, 'norm_correct_prob_per_char': 0.2493167526010313, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 6,250 |
small aux 3
|
5xC
| 2,048,000,000 | 1,203,568,312,320,000,000 |
{'predicted_index_raw': 0.9430167597765363, 'predicted_index_per_token': 0.9988826815642458, 'predicted_index_per_char': 0.9977653631284916, 'predicted_index_per_byte': 0.9977653631284916, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.24804469273743016, 'acc_per_token': 0.2435754189944134, 'acc_per_char': 0.2424581005586592, 'acc_per_byte': 0.2424581005586592, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -4.126095052537972, 'logits_per_token_corr': -1.0114957840327223, 'logits_per_char_corr': -0.23785366498754948, 'bits_per_byte_corr': 0.34315030293504906, 'correct_prob': 0.05089324362521685, 'correct_prob_per_token': 0.38916648083859323, 'correct_prob_per_char': 0.7913740397648331, 'margin': -0.06770890419868598, 'margin_per_token': -0.16597661065317404, 'margin_per_char': -0.07773004571525281, 'total_prob': 0.21132781672196582, 'total_prob_per_token': 1.56499104749082, 'total_prob_per_char': 3.168749459290853, 'uncond_correct_prob': 7.766052237110055e-09, 'uncond_correct_prob_per_token': 0.002949075457392641, 'uncond_correct_prob_per_char': 0.2487098104552916, 'uncond_total_prob': 3.263107195794468e-08, 'norm_correct_prob': 0.24055294373602168, 'norm_correct_prob_per_token': 0.24862856333752892, 'norm_correct_prob_per_char': 0.24974127367595214, 'primary_metric': 0.24804469273743016}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 6,250 |
default
|
5xC
| 2,048,000,000 | 1,203,568,312,320,000,000 |
{'predicted_index_raw': 2.993296089385475, 'predicted_index_per_token': 3.0, 'predicted_index_per_char': 3.0, 'predicted_index_per_byte': 1.0491620111731843, 'predicted_index_uncond': 3.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.27150837988826815, 'acc_per_token': 0.27262569832402234, 'acc_per_char': 0.27262569832402234, 'acc_per_byte': 0.24134078212290502, 'acc_uncond': 0.27262569832402234, 'no_answer': 0.0, 'sum_logits_corr': -2.8608823425942957, 'logits_per_token_corr': -0.7570093786427873, 'logits_per_char_corr': -0.17723028598569918, 'logits_per_byte_corr': 0.25568925468708464, 'correct_prob': 0.09710831871541896, 'correct_prob_per_token': 0.49629349903213027, 'correct_prob_per_char': 0.8400879928900933, 'margin': -0.09533826960758823, 'margin_per_token': -0.18252831060297559, 'margin_per_char': -0.0693252855026303, 'total_prob': 0.36523880726976843, 'total_prob_per_token': 1.9466480344618, 'total_prob_per_char': 3.345602043959695, 'uncond_correct_prob': 1.3926824933832708e-08, 'uncond_correct_prob_per_token': 0.0037071628998784105, 'uncond_correct_prob_per_char': 0.26199365704450794, 'uncond_total_prob': 5.851571374467662e-08, 'norm_correct_prob': 0.26588778001626334, 'norm_correct_prob_per_token': 0.2552460834400703, 'norm_correct_prob_per_char': 0.25112392608793155, 'primary_metric': 0.27150837988826815}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 7,500 |
small aux 2
|
5xC
| 2,457,600,000 | 1,444,281,974,784,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.9474860335195532, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.25251396648044694, 'no_answer': 0.0, 'sum_logits_corr': -5.271446807424449, 'logits_per_token_corr': -1.235344006939751, 'logits_per_char_corr': -0.29144044448544965, 'bits_per_byte_corr': 0.4204596839739355, 'correct_prob': 0.08158949986432429, 'correct_prob_per_token': 0.3386832440047616, 'correct_prob_per_char': 0.7532887461708839, 'margin': -0.16923265298705129, 'margin_per_token': -0.25542403716671047, 'margin_per_char': -0.1241181101566835, 'total_prob': 0.3379092426582562, 'total_prob_per_token': 1.3750718300536382, 'total_prob_per_char': 3.023277290683195, 'uncond_correct_prob': 1.1561396540649193e-08, 'uncond_correct_prob_per_token': 0.002581632692816636, 'uncond_correct_prob_per_char': 0.24059897361912724, 'uncond_total_prob': 4.857919157915474e-08, 'norm_correct_prob': 0.23835275734132402, 'norm_correct_prob_per_token': 0.2462762921514728, 'norm_correct_prob_per_char': 0.24916765102328153, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 7,500 |
small aux 3
|
5xC
| 2,457,600,000 | 1,444,281,974,784,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.0905027932960893, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.23575418994413408, 'no_answer': 0.0, 'sum_logits_corr': -4.65339543856056, 'logits_per_token_corr': -1.0591980251853026, 'logits_per_char_corr': -0.2503191173359177, 'bits_per_byte_corr': 0.3611341492204905, 'correct_prob': 0.15492817690971802, 'correct_prob_per_token': 0.41812180720247194, 'correct_prob_per_char': 0.7867729806665819, 'margin': -0.31599027411284825, 'margin_per_token': -0.3339597011337685, 'margin_per_char': -0.14375439765098155, 'total_prob': 0.65340558718159, 'total_prob_per_token': 1.7155754968816903, 'total_prob_per_char': 3.1676196214831562, 'uncond_correct_prob': 6.916950828112209e-09, 'uncond_correct_prob_per_token': 0.002498884190034933, 'uncond_correct_prob_per_char': 0.2391007156704146, 'uncond_total_prob': 2.9063812053570792e-08, 'norm_correct_prob': 0.23772809228260278, 'norm_correct_prob_per_token': 0.24369965333189667, 'norm_correct_prob_per_char': 0.2483736235024606, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 7,500 |
default
|
5xC
| 2,457,600,000 | 1,444,281,974,784,000,000 |
{'predicted_index_raw': 1.1675977653631284, 'predicted_index_per_token': 2.5094972067039105, 'predicted_index_per_char': 2.4424581005586594, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 2.5195530726256985, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23575418994413408, 'acc_per_token': 0.26033519553072626, 'acc_per_char': 0.2558659217877095, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.26256983240223464, 'no_answer': 0.0, 'sum_logits_corr': -3.4136249149311855, 'logits_per_token_corr': -0.9147498660314016, 'logits_per_char_corr': -0.21384821734733123, 'logits_per_byte_corr': 0.30851776267016157, 'correct_prob': 0.046667938274322265, 'correct_prob_per_token': 0.4361797186388059, 'correct_prob_per_char': 0.8116020521326329, 'margin': -0.029223409916645182, 'margin_per_token': -0.11567200278416545, 'margin_per_char': -0.056839319738064116, 'total_prob': 0.18461363048513244, 'total_prob_per_token': 1.7220370990268605, 'total_prob_per_char': 3.234331914225546, 'uncond_correct_prob': 1.041741699385445e-09, 'uncond_correct_prob_per_token': 0.002020577765176535, 'uncond_correct_prob_per_char': 0.22801763087470953, 'uncond_total_prob': 4.3769080047503904e-09, 'norm_correct_prob': 0.25271900466324315, 'norm_correct_prob_per_token': 0.25334196857590724, 'norm_correct_prob_per_char': 0.25094182651720653, 'primary_metric': 0.23575418994413408}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 8,750 |
small aux 2
|
5xC
| 2,867,200,000 | 1,684,995,637,248,000,000 |
{'predicted_index_raw': 0.00558659217877095, 'predicted_index_per_token': 1.6201117318435754, 'predicted_index_per_char': 1.3631284916201116, 'predicted_index_per_byte': 1.3631284916201116, 'predicted_index_uncond': 2.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23687150837988827, 'acc_per_token': 0.27039106145251396, 'acc_per_char': 0.27150837988826815, 'acc_per_byte': 0.27150837988826815, 'acc_uncond': 0.24692737430167597, 'no_answer': 0.0, 'sum_logits_corr': -3.136825576974027, 'logits_per_token_corr': -0.7796498243657793, 'logits_per_char_corr': -0.1832775994706518, 'bits_per_byte_corr': 0.264413683862533, 'correct_prob': 0.09486623162446724, 'correct_prob_per_token': 0.49381344900949237, 'correct_prob_per_char': 0.8364907235418093, 'margin': -0.08427854880468126, 'margin_per_token': -0.10471938085171818, 'margin_per_char': -0.05005995678699451, 'total_prob': 0.38246798381621294, 'total_prob_per_token': 1.9620277644509068, 'total_prob_per_char': 3.339841435281396, 'uncond_correct_prob': 3.122569891117856e-09, 'uncond_correct_prob_per_token': 0.002726019932925964, 'uncond_correct_prob_per_char': 0.23831603331303036, 'uncond_total_prob': 1.3119519184468e-08, 'norm_correct_prob': 0.24841232079522088, 'norm_correct_prob_per_token': 0.25181209828661, 'norm_correct_prob_per_char': 0.2504665434430895, 'primary_metric': 0.23687150837988827}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 8,750 |
small aux 3
|
5xC
| 2,867,200,000 | 1,684,995,637,248,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 2.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.24692737430167597, 'no_answer': 0.0, 'sum_logits_corr': -5.141502273815304, 'logits_per_token_corr': -1.1900232924740186, 'logits_per_char_corr': -0.2809099160773923, 'bits_per_byte_corr': 0.4052673428616799, 'correct_prob': 0.07715282414573174, 'correct_prob_per_token': 0.34784564354772485, 'correct_prob_per_char': 0.7606978843818042, 'margin': -0.15735727876309918, 'margin_per_token': -0.2405066656480876, 'margin_per_char': -0.11668091663884902, 'total_prob': 0.3208258526783562, 'total_prob_per_token': 1.4177202910635511, 'total_prob_per_char': 3.057699445261446, 'uncond_correct_prob': 2.844711066971211e-09, 'uncond_correct_prob_per_token': 0.0019558184470044263, 'uncond_correct_prob_per_char': 0.226703843085656, 'uncond_total_prob': 1.1952968733704397e-08, 'norm_correct_prob': 0.23858880847708758, 'norm_correct_prob_per_token': 0.24538676022770875, 'norm_correct_prob_per_char': 0.24878573054097008, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 8,750 |
default
|
5xC
| 2,867,200,000 | 1,684,995,637,248,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0011173184357541898, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 2.7441340782122907, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23910614525139665, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.24804469273743016, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -3.4793150880150288, 'logits_per_token_corr': -0.811393869435987, 'logits_per_char_corr': -0.19145807855335353, 'logits_per_byte_corr': 0.2762156204672512, 'correct_prob': 0.12283484925160099, 'correct_prob_per_token': 0.47151306826104905, 'correct_prob_per_char': 0.8284428614707497, 'margin': -0.2256457247170641, 'margin_per_token': -0.21844475741958663, 'margin_per_char': -0.0853628324446032, 'total_prob': 0.5127137217297902, 'total_prob_per_token': 1.9115800566560999, 'total_prob_per_char': 3.324441409713717, 'uncond_correct_prob': 4.6894647835846766e-08, 'uncond_correct_prob_per_token': 0.0033713318899520226, 'uncond_correct_prob_per_char': 0.2547184098595455, 'uncond_total_prob': 1.9704461137638313e-07, 'norm_correct_prob': 0.23855784301997446, 'norm_correct_prob_per_token': 0.24664118380124697, 'norm_correct_prob_per_char': 0.24919696609190217, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 10,000 |
small aux 2
|
5xC
| 3,276,800,000 | 1,925,709,299,712,000,000 |
{'predicted_index_raw': 2.9966480446927375, 'predicted_index_per_token': 3.0, 'predicted_index_per_char': 3.0, 'predicted_index_per_byte': 3.0, 'predicted_index_uncond': 2.463687150837989, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.27262569832402234, 'acc_per_token': 0.27262569832402234, 'acc_per_char': 0.27262569832402234, 'acc_per_byte': 0.27262569832402234, 'acc_uncond': 0.25139664804469275, 'no_answer': 0.0, 'sum_logits_corr': -3.574226753138963, 'logits_per_token_corr': -0.9368666759238785, 'logits_per_char_corr': -0.21941430839369386, 'bits_per_byte_corr': 0.3165479346198913, 'correct_prob': 0.03774608882814242, 'correct_prob_per_token': 0.41217150456725316, 'correct_prob_per_char': 0.8051202039165289, 'margin': -0.02938646344419077, 'margin_per_token': -0.1421985976444143, 'margin_per_char': -0.06262973885503649, 'total_prob': 0.145745011441637, 'total_prob_per_token': 1.6219452781328623, 'total_prob_per_char': 3.208579784860095, 'uncond_correct_prob': 6.048390338091706e-09, 'uncond_correct_prob_per_token': 0.0030134460515952415, 'uncond_correct_prob_per_char': 0.2453690100319778, 'uncond_total_prob': 2.541309502937773e-08, 'norm_correct_prob': 0.2601431871017722, 'norm_correct_prob_per_token': 0.2543752168680943, 'norm_correct_prob_per_char': 0.2509441981901869, 'primary_metric': 0.27262569832402234}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 10,000 |
small aux 3
|
5xC
| 3,276,800,000 | 1,925,709,299,712,000,000 |
{'predicted_index_raw': 0.7430167597765364, 'predicted_index_per_token': 1.3217877094972068, 'predicted_index_per_char': 1.2603351955307263, 'predicted_index_per_byte': 1.2603351955307263, 'predicted_index_uncond': 1.0089385474860335, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.24692737430167597, 'acc_per_token': 0.2547486033519553, 'acc_per_char': 0.25027932960893856, 'acc_per_byte': 0.25027932960893856, 'acc_uncond': 0.24134078212290502, 'no_answer': 0.0, 'sum_logits_corr': -4.44987362616555, 'logits_per_token_corr': -1.1240610818401173, 'logits_per_char_corr': -0.26383088267840127, 'bits_per_byte_corr': 0.3806275060737609, 'correct_prob': 0.0170958002364196, 'correct_prob_per_token': 0.3351542506685331, 'correct_prob_per_char': 0.7694057670889444, 'margin': -0.014282665256633537, 'margin_per_token': -0.07749223913133112, 'margin_per_char': -0.04146340945039094, 'total_prob': 0.06922282663415516, 'total_prob_per_token': 1.3331130465303258, 'total_prob_per_char': 3.074053045984129, 'uncond_correct_prob': 2.8225276853083784e-09, 'uncond_correct_prob_per_token': 0.002583935667133557, 'uncond_correct_prob_per_char': 0.24077364181413083, 'uncond_total_prob': 1.1859061724722141e-08, 'norm_correct_prob': 0.2455271024979863, 'norm_correct_prob_per_token': 0.25132823590123615, 'norm_correct_prob_per_char': 0.25028471970522526, 'primary_metric': 0.24692737430167597}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 10,000 |
default
|
5xC
| 3,276,800,000 | 1,925,709,299,712,000,000 |
{'predicted_index_raw': 1.0, 'predicted_index_per_token': 1.0, 'predicted_index_per_char': 1.0, 'predicted_index_per_byte': 0.24916201117318434, 'predicted_index_uncond': 1.0156424581005588, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.2424581005586592, 'acc_per_token': 0.2424581005586592, 'acc_per_char': 0.2424581005586592, 'acc_per_byte': 0.23128491620111732, 'acc_uncond': 0.2446927374301676, 'no_answer': 0.0, 'sum_logits_corr': -2.6033097989066354, 'logits_per_token_corr': -0.6623636961227467, 'logits_per_char_corr': -0.15533001283376308, 'logits_per_byte_corr': 0.22409383921665, 'correct_prob': 0.1199319860349657, 'correct_prob_per_token': 0.5333626336364098, 'correct_prob_per_char': 0.8576523535467607, 'margin': -0.15794181360953036, 'margin_per_token': -0.1726572646701784, 'margin_per_char': -0.06122732315698133, 'total_prob': 0.4921397123437561, 'total_prob_per_token': 2.134532528930684, 'total_prob_per_char': 3.430401101669186, 'uncond_correct_prob': 7.029526402005845e-10, 'uncond_correct_prob_per_token': 0.002037283948790138, 'uncond_correct_prob_per_char': 0.22726685039659636, 'uncond_total_prob': 2.9534439106464705e-09, 'norm_correct_prob': 0.24382778573932068, 'norm_correct_prob_per_token': 0.2498496605085471, 'norm_correct_prob_per_char': 0.25001333346742843, 'primary_metric': 0.2424581005586592}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 11,250 |
small aux 2
|
5xC
| 3,686,400,000 | 2,166,422,962,176,000,000 |
{'predicted_index_raw': 0.0033519553072625698, 'predicted_index_per_token': 1.7698324022346368, 'predicted_index_per_char': 1.481564245810056, 'predicted_index_per_byte': 1.481564245810056, 'predicted_index_uncond': 1.7664804469273743, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23910614525139665, 'acc_per_token': 0.24692737430167597, 'acc_per_char': 0.25139664804469275, 'acc_per_byte': 0.25139664804469275, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -5.1208879835778776, 'logits_per_token_corr': -1.2730297289125523, 'logits_per_char_corr': -0.2991631518418334, 'bits_per_byte_corr': 0.43160119557923504, 'correct_prob': 0.01212700471160152, 'correct_prob_per_token': 0.28664495645948834, 'correct_prob_per_char': 0.7424219288207013, 'margin': -0.018200767489622665, 'margin_per_token': -0.05964832673833602, 'margin_per_char': -0.036850128479573224, 'total_prob': 0.04972693420340126, 'total_prob_per_token': 1.1422472485650237, 'total_prob_per_char': 2.967791463098054, 'uncond_correct_prob': 3.279852403822828e-10, 'uncond_correct_prob_per_token': 0.002065224310612789, 'uncond_correct_prob_per_char': 0.22359712296175416, 'uncond_total_prob': 1.3777536453524248e-09, 'norm_correct_prob': 0.2425173733100683, 'norm_correct_prob_per_token': 0.25085422085338394, 'norm_correct_prob_per_char': 0.25015447848257566, 'primary_metric': 0.23910614525139665}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 11,250 |
small aux 3
|
5xC
| 3,686,400,000 | 2,166,422,962,176,000,000 |
{'predicted_index_raw': 0.21564245810055865, 'predicted_index_per_token': 0.6994413407821229, 'predicted_index_per_char': 0.6715083798882682, 'predicted_index_per_byte': 0.6715083798882682, 'predicted_index_uncond': 1.2145251396648045, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.2324022346368715, 'acc_per_token': 0.24804469273743016, 'acc_per_char': 0.25139664804469275, 'acc_per_byte': 0.25139664804469275, 'acc_uncond': 0.24916201117318434, 'no_answer': 0.0, 'sum_logits_corr': -4.166433506438186, 'logits_per_token_corr': -1.0127851632298481, 'logits_per_char_corr': -0.23832644554468868, 'bits_per_byte_corr': 0.343832381100263, 'correct_prob': 0.04975714320275277, 'correct_prob_per_token': 0.3883063370181609, 'correct_prob_per_char': 0.7911123744387372, 'margin': -0.06470293044512221, 'margin_per_token': -0.13811082269378808, 'margin_per_char': -0.06915869864022806, 'total_prob': 0.2087847696318494, 'total_prob_per_token': 1.5672168319851971, 'total_prob_per_char': 3.1708558345820017, 'uncond_correct_prob': 7.814021109186771e-10, 'uncond_correct_prob_per_token': 0.0011630425637950362, 'uncond_correct_prob_per_char': 0.20025857329675462, 'uncond_total_prob': 3.2833447319376463e-09, 'norm_correct_prob': 0.23954719557598736, 'norm_correct_prob_per_token': 0.247768559600236, 'norm_correct_prob_per_char': 0.24949279450059594, 'primary_metric': 0.2324022346368715}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 11,250 |
default
|
5xC
| 3,686,400,000 | 2,166,422,962,176,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0011173184357541898, 'predicted_index_per_char': 0.0011173184357541898, 'predicted_index_per_byte': 2.029050279329609, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.24581005586592178, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -4.997507275482795, 'logits_per_token_corr': -1.1728851889010028, 'logits_per_char_corr': -0.27665391211685125, 'logits_per_byte_corr': 0.39912722705379916, 'correct_prob': 0.07217207224757533, 'correct_prob_per_token': 0.3506436163332702, 'correct_prob_per_char': 0.7635515176374903, 'margin': -0.148808365755274, 'margin_per_token': -0.2354230411269248, 'margin_per_char': -0.11499151465866758, 'total_prob': 0.30444435294501826, 'total_prob_per_token': 1.4300726282019665, 'total_prob_per_char': 3.0676347590236475, 'uncond_correct_prob': 3.84664364788692e-08, 'uncond_correct_prob_per_token': 0.004453660311203864, 'uncond_correct_prob_per_char': 0.27603320563536327, 'uncond_total_prob': 1.6162597361067635e-07, 'norm_correct_prob': 0.23773543614784262, 'norm_correct_prob_per_token': 0.24537543837878112, 'norm_correct_prob_per_char': 0.2489169205055804, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 12,500 |
small aux 2
|
5xC
| 4,096,000,000 | 2,407,136,624,640,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.0011173184357542, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.2435754189944134, 'no_answer': 0.0, 'sum_logits_corr': -5.425622630252518, 'logits_per_token_corr': -1.249649927425207, 'logits_per_char_corr': -0.2951472671276883, 'bits_per_byte_corr': 0.425807498617352, 'correct_prob': 0.13073404798801125, 'correct_prob_per_token': 0.35342982817983565, 'correct_prob_per_char': 0.7523891715120099, 'margin': -0.2858965237866393, 'margin_per_token': -0.32601924290351425, 'margin_per_char': -0.14498542142387738, 'total_prob': 0.5510264819200594, 'total_prob_per_token': 1.4494731994972574, 'total_prob_per_char': 3.0272773914426123, 'uncond_correct_prob': 5.157451553928877e-08, 'uncond_correct_prob_per_token': 0.004795673348563654, 'uncond_correct_prob_per_char': 0.28157662000783895, 'uncond_total_prob': 2.1670037068392105e-07, 'norm_correct_prob': 0.23795772093262466, 'norm_correct_prob_per_token': 0.24376457216067893, 'norm_correct_prob_per_char': 0.24852869845519052, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 12,500 |
small aux 3
|
5xC
| 4,096,000,000 | 2,407,136,624,640,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.729608938547486, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.21899441340782122, 'no_answer': 0.0, 'sum_logits_corr': -5.799823513310715, 'logits_per_token_corr': -1.3817957405759858, 'logits_per_char_corr': -0.3257507237807337, 'bits_per_byte_corr': 0.4699589537647927, 'correct_prob': 0.08731859568482794, 'correct_prob_per_token': 0.31556323074850434, 'correct_prob_per_char': 0.730810534151508, 'margin': -0.18728801494211722, 'margin_per_token': -0.2885306961099858, 'margin_per_char': -0.14692718980604993, 'total_prob': 0.36357874716252075, 'total_prob_per_token': 1.2771796532550497, 'total_prob_per_char': 2.9288804361816263, 'uncond_correct_prob': 2.422165019366926e-09, 'uncond_correct_prob_per_token': 0.0019423540594683327, 'uncond_correct_prob_per_char': 0.21963556900018147, 'uncond_total_prob': 1.0177483552170383e-08, 'norm_correct_prob': 0.23828340349444854, 'norm_correct_prob_per_token': 0.24708529627620532, 'norm_correct_prob_per_char': 0.24952823066969843, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 12,500 |
default
|
5xC
| 4,096,000,000 | 2,407,136,624,640,000,000 |
{'predicted_index_raw': 0.0033519553072625698, 'predicted_index_per_token': 0.2122905027932961, 'predicted_index_per_char': 0.17988826815642459, 'predicted_index_per_byte': 2.0, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.2424581005586592, 'acc_per_char': 0.23910614525139665, 'acc_per_byte': 0.24692737430167597, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -4.2253445855732075, 'logits_per_token_corr': -1.0008221694100057, 'logits_per_char_corr': -0.2358954277570653, 'logits_per_byte_corr': 0.34032516379374333, 'correct_prob': 0.07557845787605166, 'correct_prob_per_token': 0.40488720388934885, 'correct_prob_per_char': 0.7943618499831344, 'margin': -0.1094624757125373, 'margin_per_token': -0.18435006282979077, 'margin_per_char': -0.08973029301918513, 'total_prob': 0.3154407663987649, 'total_prob_per_token': 1.6417119793277162, 'total_prob_per_char': 3.1875104498215827, 'uncond_correct_prob': 5.765570185169472e-10, 'uncond_correct_prob_per_token': 0.002673858971320439, 'uncond_correct_prob_per_char': 0.23610683725745585, 'uncond_total_prob': 2.420991737668732e-09, 'norm_correct_prob': 0.23968920030137095, 'norm_correct_prob_per_token': 0.24665374825239106, 'norm_correct_prob_per_char': 0.2492118031156158, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 13,750 |
small aux 2
|
5xC
| 4,505,600,000 | 2,647,850,287,104,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -4.076324250245228, 'logits_per_token_corr': -0.9773663113814272, 'logits_per_char_corr': -0.23027106188635652, 'bits_per_byte_corr': 0.3322109190439208, 'correct_prob': 0.07725761986160942, 'correct_prob_per_token': 0.40184121432383046, 'correct_prob_per_char': 0.7971079093775854, 'margin': -0.1538355621966155, 'margin_per_token': -0.19854194793510316, 'margin_per_char': -0.08639265601282535, 'total_prob': 0.32234621712593, 'total_prob_per_token': 1.6181793029243827, 'total_prob_per_char': 3.192724577643423, 'uncond_correct_prob': 1.4360117349827573e-09, 'uncond_correct_prob_per_token': 0.0022129225643125585, 'uncond_correct_prob_per_char': 0.22802276578558897, 'uncond_total_prob': 6.033527989490803e-09, 'norm_correct_prob': 0.2391251876986048, 'norm_correct_prob_per_token': 0.2482938191083569, 'norm_correct_prob_per_char': 0.2496620858498951, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 13,750 |
small aux 3
|
5xC
| 4,505,600,000 | 2,647,850,287,104,000,000 |
{'predicted_index_raw': 0.01675977653631285, 'predicted_index_per_token': 2.1966480446927372, 'predicted_index_per_char': 1.8826815642458101, 'predicted_index_per_byte': 1.8826815642458101, 'predicted_index_uncond': 1.853631284916201, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23910614525139665, 'acc_per_token': 0.2782122905027933, 'acc_per_char': 0.27932960893854747, 'acc_per_byte': 0.27932960893854747, 'acc_uncond': 0.264804469273743, 'no_answer': 0.0, 'sum_logits_corr': -6.221449176159651, 'logits_per_token_corr': -1.5515818799496586, 'logits_per_char_corr': -0.3644984003606805, 'bits_per_byte_corr': 0.5258600346126915, 'correct_prob': 0.004091591806445321, 'correct_prob_per_token': 0.21671475628681047, 'correct_prob_per_char': 0.6954246363053376, 'margin': -0.006279493142314287, 'margin_per_token': -0.03339679884104945, 'margin_per_char': -0.025450268796800454, 'total_prob': 0.01724059283955332, 'total_prob_per_token': 0.8649839330583634, 'total_prob_per_char': 2.7810379799050327, 'uncond_correct_prob': 1.9712184446690292e-10, 'uncond_correct_prob_per_token': 0.0013364301079152293, 'uncond_correct_prob_per_char': 0.20472417535040707, 'uncond_total_prob': 8.282420415813908e-10, 'norm_correct_prob': 0.24057639842308864, 'norm_correct_prob_per_token': 0.25062749326987127, 'norm_correct_prob_per_char': 0.2500644044042136, 'primary_metric': 0.23910614525139665}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 13,750 |
default
|
5xC
| 4,505,600,000 | 2,647,850,287,104,000,000 |
{'predicted_index_raw': 0.9575418994413408, 'predicted_index_per_token': 0.9955307262569832, 'predicted_index_per_char': 0.994413407821229, 'predicted_index_per_byte': 2.0, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.24134078212290502, 'acc_per_token': 0.2424581005586592, 'acc_per_char': 0.2424581005586592, 'acc_per_byte': 0.24692737430167597, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -3.5933488460892407, 'logits_per_token_corr': -0.8865924820797846, 'logits_per_char_corr': -0.2084354462767915, 'logits_per_byte_corr': 0.300708784689221, 'correct_prob': 0.09518484545035327, 'correct_prob_per_token': 0.45501277269850693, 'correct_prob_per_char': 0.8167409185322306, 'margin': -0.13318900324856833, 'margin_per_token': -0.20576320716866434, 'margin_per_char': -0.08854996553034274, 'total_prob': 0.3952799319955906, 'total_prob_per_token': 1.8266434182595606, 'total_prob_per_char': 3.2687781516745087, 'uncond_correct_prob': 3.7367655889601536e-10, 'uncond_correct_prob_per_token': 0.0015045085918140506, 'uncond_correct_prob_per_char': 0.21179247536129597, 'uncond_total_prob': 1.570058310977743e-09, 'norm_correct_prob': 0.2417725672381, 'norm_correct_prob_per_token': 0.24908111913612338, 'norm_correct_prob_per_char': 0.24985836488341606, 'primary_metric': 0.24134078212290502}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 15,000 |
small aux 2
|
5xC
| 4,915,200,000 | 2,888,563,949,568,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.1340782122905029, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.2446927374301676, 'no_answer': 0.0, 'sum_logits_corr': -3.875452856511377, 'logits_per_token_corr': -0.9261638157372146, 'logits_per_char_corr': -0.21825310635058642, 'bits_per_byte_corr': 0.31487267419082876, 'correct_prob': 0.09077177287504823, 'correct_prob_per_token': 0.425716386259101, 'correct_prob_per_char': 0.807159296902064, 'margin': -0.17638788965822613, 'margin_per_token': -0.20141518099376152, 'margin_per_char': -0.08535529664810862, 'total_prob': 0.37824409726082286, 'total_prob_per_token': 1.7121525053515627, 'total_prob_per_char': 3.2316676198829577, 'uncond_correct_prob': 7.251046210825365e-10, 'uncond_correct_prob_per_token': 0.0030778669483173997, 'uncond_correct_prob_per_char': 0.24303454522923218, 'uncond_total_prob': 3.0433038779197176e-09, 'norm_correct_prob': 0.23976405441897497, 'norm_correct_prob_per_token': 0.24862361290730944, 'norm_correct_prob_per_char': 0.24976395329961046, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 15,000 |
small aux 3
|
5xC
| 4,915,200,000 | 2,888,563,949,568,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -4.512847010583185, 'logits_per_token_corr': -1.0256883601059017, 'logits_per_char_corr': -0.24242536216540642, 'bits_per_byte_corr': 0.34974586778199407, 'correct_prob': 0.16878225926457555, 'correct_prob_per_token': 0.4344171921217875, 'correct_prob_per_char': 0.7933025990388942, 'margin': -0.33269013376683526, 'margin_per_token': -0.3415928836957688, 'margin_per_char': -0.14538492142309778, 'total_prob': 0.7073018114731442, 'total_prob_per_token': 1.7796469627810607, 'total_prob_per_char': 3.1930765448831235, 'uncond_correct_prob': 1.4988216329750242e-09, 'uncond_correct_prob_per_token': 0.001725846119526236, 'uncond_correct_prob_per_char': 0.22067147200176793, 'uncond_total_prob': 6.297761857528863e-09, 'norm_correct_prob': 0.2386989287151603, 'norm_correct_prob_per_token': 0.24406371258064122, 'norm_correct_prob_per_char': 0.2484381665173282, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 15,000 |
default
|
5xC
| 4,915,200,000 | 2,888,563,949,568,000,000 |
{'predicted_index_raw': 0.0022346368715083797, 'predicted_index_per_token': 0.07039106145251396, 'predicted_index_per_char': 0.05921787709497207, 'predicted_index_per_byte': 2.176536312849162, 'predicted_index_uncond': 2.4793296089385475, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23687150837988827, 'acc_per_token': 0.2324022346368715, 'acc_per_char': 0.2324022346368715, 'acc_per_byte': 0.2547486033519553, 'acc_uncond': 0.2547486033519553, 'no_answer': 0.0, 'sum_logits_corr': -2.514910785246162, 'logits_per_token_corr': -0.6033591550932695, 'logits_per_char_corr': -0.1421125004224145, 'logits_per_byte_corr': 0.20502499960789541, 'correct_prob': 0.13307687839634236, 'correct_prob_per_token': 0.5562338908460762, 'correct_prob_per_char': 0.8683423551496541, 'margin': -0.17297557994136953, 'margin_per_token': -0.12160167067107293, 'margin_per_char': -0.04378457827835042, 'total_prob': 0.5503148708253639, 'total_prob_per_token': 2.2349815490688867, 'total_prob_per_char': 3.4771300820619357, 'uncond_correct_prob': 8.080132028074675e-09, 'uncond_correct_prob_per_token': 0.002863691874969631, 'uncond_correct_prob_per_char': 0.24887100146959168, 'uncond_total_prob': 3.3950700976544286e-08, 'norm_correct_prob': 0.24120509168429471, 'norm_correct_prob_per_token': 0.2488186604489495, 'norm_correct_prob_per_char': 0.24972586704371455, 'primary_metric': 0.23687150837988827}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 17,500 |
small aux 2
|
5xC
| 5,734,400,000 | 3,369,991,274,496,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.017877094972067, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.24581005586592178, 'no_answer': 0.0, 'sum_logits_corr': -4.207223537347836, 'logits_per_token_corr': -0.9715318376103371, 'logits_per_char_corr': -0.22941491597175082, 'bits_per_byte_corr': 0.33097576157864084, 'correct_prob': 0.13866129860976725, 'correct_prob_per_token': 0.42408930163075165, 'correct_prob_per_char': 0.7997016214256903, 'margin': -0.28928062883484895, 'margin_per_token': -0.2838767077412711, 'margin_per_char': -0.11335165146592491, 'total_prob': 0.5817206630613972, 'total_prob_per_token': 1.7281049490439178, 'total_prob_per_char': 3.212734505898085, 'uncond_correct_prob': 4.683223267868936e-10, 'uncond_correct_prob_per_token': 0.0029535054394859758, 'uncond_correct_prob_per_char': 0.23927427009336266, 'uncond_total_prob': 1.9646879232446408e-09, 'norm_correct_prob': 0.238300031710966, 'norm_correct_prob_per_token': 0.24548437841018272, 'norm_correct_prob_per_char': 0.24892158473974685, 'primary_metric': 0.23798882681564246}
|
90M
|
FineWeb-Pro
|
mmlu_moral_scenarios
| 17,500 |
small aux 3
|
5xC
| 5,734,400,000 | 3,369,991,274,496,000,000 |
{'predicted_index_raw': 0.0, 'predicted_index_per_token': 0.0, 'predicted_index_per_char': 0.0, 'predicted_index_per_byte': 0.0, 'predicted_index_uncond': 1.0, 'correct_choice': 1.5541899441340783, 'acc_raw': 0.23798882681564246, 'acc_per_token': 0.23798882681564246, 'acc_per_char': 0.23798882681564246, 'acc_per_byte': 0.23798882681564246, 'acc_uncond': 0.2424581005586592, 'no_answer': 0.0, 'sum_logits_corr': -4.47563811860271, 'logits_per_token_corr': -1.0294396668883454, 'logits_per_char_corr': -0.2431260220322633, 'bits_per_byte_corr': 0.3507567062972591, 'correct_prob': 0.12706681586546092, 'correct_prob_per_token': 0.4114966911213554, 'correct_prob_per_char': 0.7903023450465053, 'margin': -0.25698626227192095, 'margin_per_token': -0.29298284279085773, 'margin_per_char': -0.12654017337867776, 'total_prob': 0.5328524036069637, 'total_prob_per_token': 1.6804038679057869, 'total_prob_per_char': 3.177902175624898, 'uncond_correct_prob': 2.238772774836036e-10, 'uncond_correct_prob_per_token': 0.0011948649549980421, 'uncond_correct_prob_per_char': 0.2012160644591828, 'uncond_total_prob': 9.406804494118161e-10, 'norm_correct_prob': 0.23866040533221858, 'norm_correct_prob_per_token': 0.2448499373699553, 'norm_correct_prob_per_char': 0.24868360605950365, 'primary_metric': 0.23798882681564246}
|
Subsets and Splits
150M Checkpoints Data
The query retrieves all records for a specific parameter setting ('150M') and adds a column indicating the total number of distinct steps, providing basic filtering with limited analytical value.