File size: 115,894 Bytes
d19887c
 
 
 
d5d3533
d19887c
 
 
 
 
 
 
 
9e889b2
d5d3533
9e889b2
d5d3533
d19887c
d5d3533
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5d3533
d19887c
 
 
 
 
 
d5d3533
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5d3533
d19887c
 
 
 
 
 
57ddc8f
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5d3533
d19887c
 
 
 
 
 
d5d3533
d19887c
d5d3533
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5d3533
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57ddc8f
 
 
 
 
 
 
 
d19887c
cd5b155
 
 
 
d19887c
 
57ddc8f
d19887c
 
 
 
 
 
 
57ddc8f
d19887c
 
 
 
 
 
 
57ddc8f
d19887c
 
 
 
 
 
 
d5d3533
 
 
 
 
 
d19887c
 
d5d3533
d19887c
 
 
 
 
 
 
d5d3533
d19887c
b96f2eb
 
 
 
d19887c
 
d5d3533
d19887c
 
 
 
 
 
9e889b2
d5d3533
9e889b2
 
 
 
 
 
 
d5d3533
9e889b2
 
 
 
 
 
d19887c
 
d5d3533
d19887c
 
 
d5d3533
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d19887c
d5d3533
d19887c
 
d5d3533
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b96f2eb
 
 
 
d19887c
 
 
 
 
 
 
 
 
c5c298a
 
d19887c
 
 
 
 
b96f2eb
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c5c298a
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c5c298a
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5d3533
d19887c
 
 
 
 
 
d5d3533
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
d5d3533
 
 
 
 
d19887c
 
 
 
2d59889
 
 
 
 
 
d19887c
57ddc8f
d19887c
d7a4441
 
d19887c
 
 
 
 
 
 
 
 
6116fcb
d5d3533
6116fcb
 
d19887c
d7a4441
 
 
 
d19887c
 
 
 
9f640f3
d19887c
b96f2eb
9f09eda
3adcbdf
9380ccd
 
 
 
 
b96f2eb
 
 
 
 
6116fcb
d5d3533
6116fcb
 
2d59889
d5d3533
2d59889
 
9380ccd
 
 
 
d19887c
 
 
b96f2eb
 
 
d5d3533
b96f2eb
 
57ddc8f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d19887c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Defaulting to user installation because normal site-packages is not writeable\n",
      "Requirement already satisfied: pandas in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (2.2.0)\n",
      "Requirement already satisfied: pytz>=2020.1 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n",
      "Requirement already satisfied: numpy<2,>=1.22.4 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (1.26.1)\n",
      "Requirement already satisfied: tzdata>=2022.7 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n",
      "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2.8.2)\n",
      "Requirement already satisfied: six>=1.5 in /Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/site-packages (from python-dateutil>=2.8.2->pandas) (1.15.0)\n",
      "\u001b[33mWARNING: You are using pip version 21.2.4; however, version 24.1.2 is available.\n",
      "You should consider upgrading via the '/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n"
     ]
    }
   ],
   "source": [
    "!pip3 install pandas"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Get the filelist\n",
    "\n",
    "For the full results.json"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 6042 results.json files\n"
     ]
    }
   ],
   "source": [
    "import glob\n",
    "\n",
    "# Specify the path to the folder containing the results.json files\n",
    "folder_path = \"lm-eval-output\"\n",
    "\n",
    "# Use glob to find all the results.json files\n",
    "results_json_files = glob.glob(f\"{folder_path}/**/results.json\", recursive=True)\n",
    "\n",
    "# Show total number of results.json files found\n",
    "print(f\"Found {len(results_json_files)} results.json files\")\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Process all the results.json\n",
    "\n",
    "One file at a time"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Processed example:  {'name': 'mistralai/Mistral-7B-Instruct-v0.2', 'config': {'dtype=bfloat16,trust_remote_code=True': {'confStr': 'dtype=bfloat16,trust_remote_code=True', 'confObj': {'dtype': 'bfloat16', 'trust_remote_code': 'True'}, 'results': {'cola': {'mcc,none': 0.12965656914783247, 'mcc_stderr,none': 0.032433640730190394, 'alias': 'cola'}, 'anli': {'acc,none': 0.470625, 'acc_stderr,none': 0.016323170842139138, 'alias': 'anli'}, 'anli_r1': {'acc,none': 0.482, 'acc_stderr,none': 0.015809045699406728, 'alias': ' - anli_r1'}, 'anli_r2': {'acc,none': 0.46, 'acc_stderr,none': 0.015768596914394382, 'alias': ' - anli_r2'}, 'anli_r3': {'acc,none': 0.47, 'acc_stderr,none': 0.01441375952760986, 'alias': ' - anli_r3'}, 'wsc': {'acc,none': 0.6346153846153846, 'acc_stderr,none': 0.047447333932779195, 'alias': 'wsc'}, 'lambada_cloze': {'perplexity,none': 79.69768925561641, 'perplexity_stderr,none': 21.41458436331952, 'acc,none': 0.2092955559868038, 'acc_stderr,none': 0.048106457496816746, 'alias': 'lambada_cloze'}, 'lambada_openai_cloze_yaml': {'perplexity,none': 37.327498045923896, 'perplexity_stderr,none': 1.1669176082652388, 'acc,none': 0.30487094896176986, 'acc_stderr,none': 0.006413613926848414, 'alias': ' - lambada_openai_cloze_yaml'}, 'lambada_standard_cloze_yaml': {'perplexity,none': 122.06788046530893, 'perplexity_stderr,none': 4.255042040526601, 'acc,none': 0.11372016301183777, 'acc_stderr,none': 0.004422992919917964, 'alias': ' - lambada_standard_cloze_yaml'}, 'glue': {'mcc,none': 0.1275893167139847, 'mcc_stderr,none': 0.0010517992291010554, 'acc,none': 0.5822142909581908, 'acc_stderr,none': 0.032790502714334625, 'f1,none': 0.4411327873267142, 'f1_stderr,none': 0.0012831556331226797, 'alias': 'glue'}, 'mnli': {'acc,none': 0.5529291900152827, 'acc_stderr,none': 0.005018800001869641, 'alias': 'mnli'}, 'mnli_mismatch': {'acc,none': 0.5537021969080553, 'acc_stderr,none': 0.0050136227202211975, 'alias': 'mnli_mismatch'}, 'mrpc': {'acc,none': 0.7328431372549019, 'acc_stderr,none': 0.021932668544150206, 'f1,none': 0.8310077519379845, 'f1_stderr,none': 0.01593988577529229, 'alias': 'mrpc'}, 'qnli': {'acc,none': 0.5842943437671609, 'acc_stderr,none': 0.0066685737728695215, 'alias': 'qnli'}, 'qqp': {'acc,none': 0.5877813504823152, 'acc_stderr,none': 0.00244807782265664, 'f1,none': 0.4384014018061733, 'f1_stderr,none': 0.0036014830728846657, 'alias': 'qqp'}, 'rte': {'acc,none': 0.7003610108303249, 'acc_stderr,none': 0.02757437014529261, 'alias': 'rte'}, 'sst2': {'acc,none': 0.8555045871559633, 'acc_stderr,none': 0.01191321895589123, 'alias': 'sst2'}, 'wnli': {'acc,none': 0.6056338028169014, 'acc_stderr,none': 0.05841251085444426, 'alias': 'wnli'}, 'blimp': {'acc,none': 0.8340597014925373, 'acc_stderr,none': 0.13897696485795538, 'alias': 'blimp'}, 'blimp_adjunct_island': {'acc,none': 0.907, 'acc_stderr,none': 0.009188875634996688, 'alias': ' - blimp_adjunct_island'}, 'blimp_anaphor_gender_agreement': {'acc,none': 0.988, 'acc_stderr,none': 0.0034449771940998383, 'alias': ' - blimp_anaphor_gender_agreement'}, 'blimp_anaphor_number_agreement': {'acc,none': 0.995, 'acc_stderr,none': 0.0022315868748448812, 'alias': ' - blimp_anaphor_number_agreement'}, 'blimp_animate_subject_passive': {'acc,none': 0.793, 'acc_stderr,none': 0.012818553557843983, 'alias': ' - blimp_animate_subject_passive'}, 'blimp_animate_subject_trans': {'acc,none': 0.911, 'acc_stderr,none': 0.009008893392651514, 'alias': ' - blimp_animate_subject_trans'}, 'blimp_causative': {'acc,none': 0.779, 'acc_stderr,none': 0.013127502859696232, 'alias': ' - blimp_causative'}, 'blimp_complex_NP_island': {'acc,none': 0.614, 'acc_stderr,none': 0.015402637476784376, 'alias': ' - blimp_complex_NP_island'}, 'blimp_coordinate_structure_constraint_complex_left_branch': {'acc,none': 0.805, 'acc_stderr,none': 0.012535235623319325, 'alias': ' - blimp_coordinate_structure_constraint_complex_left_branch'}, 'blimp_coordinate_structure_constraint_object_extraction': {'acc,none': 0.892, 'acc_stderr,none': 0.00982000165134572, 'alias': ' - blimp_coordinate_structure_constraint_object_extraction'}, 'blimp_determiner_noun_agreement_1': {'acc,none': 0.985, 'acc_stderr,none': 0.003845749574503001, 'alias': ' - blimp_determiner_noun_agreement_1'}, 'blimp_determiner_noun_agreement_2': {'acc,none': 0.975, 'acc_stderr,none': 0.0049395748196984675, 'alias': ' - blimp_determiner_noun_agreement_2'}, 'blimp_determiner_noun_agreement_irregular_1': {'acc,none': 0.926, 'acc_stderr,none': 0.008282064512704159, 'alias': ' - blimp_determiner_noun_agreement_irregular_1'}, 'blimp_determiner_noun_agreement_irregular_2': {'acc,none': 0.959, 'acc_stderr,none': 0.006273624021118755, 'alias': ' - blimp_determiner_noun_agreement_irregular_2'}, 'blimp_determiner_noun_agreement_with_adj_2': {'acc,none': 0.953, 'acc_stderr,none': 0.00669595667816304, 'alias': ' - blimp_determiner_noun_agreement_with_adj_2'}, 'blimp_determiner_noun_agreement_with_adj_irregular_1': {'acc,none': 0.909, 'acc_stderr,none': 0.009099549538400241, 'alias': ' - blimp_determiner_noun_agreement_with_adj_irregular_1'}, 'blimp_determiner_noun_agreement_with_adj_irregular_2': {'acc,none': 0.932, 'acc_stderr,none': 0.007964887911291603, 'alias': ' - blimp_determiner_noun_agreement_with_adj_irregular_2'}, 'blimp_determiner_noun_agreement_with_adjective_1': {'acc,none': 0.958, 'acc_stderr,none': 0.006346359293033844, 'alias': ' - blimp_determiner_noun_agreement_with_adjective_1'}, 'blimp_distractor_agreement_relational_noun': {'acc,none': 0.922, 'acc_stderr,none': 0.008484573530118581, 'alias': ' - blimp_distractor_agreement_relational_noun'}, 'blimp_distractor_agreement_relative_clause': {'acc,none': 0.835, 'acc_stderr,none': 0.01174363286691615, 'alias': ' - blimp_distractor_agreement_relative_clause'}, 'blimp_drop_argument': {'acc,none': 0.821, 'acc_stderr,none': 0.012128730605719092, 'alias': ' - blimp_drop_argument'}, 'blimp_ellipsis_n_bar_1': {'acc,none': 0.88, 'acc_stderr,none': 0.010281328012747377, 'alias': ' - blimp_ellipsis_n_bar_1'}, 'blimp_ellipsis_n_bar_2': {'acc,none': 0.91, 'acc_stderr,none': 0.009054390204866435, 'alias': ' - blimp_ellipsis_n_bar_2'}, 'blimp_existential_there_object_raising': {'acc,none': 0.832, 'acc_stderr,none': 0.01182860583145427, 'alias': ' - blimp_existential_there_object_raising'}, 'blimp_existential_there_quantifiers_1': {'acc,none': 0.986, 'acc_stderr,none': 0.0037172325482565786, 'alias': ' - blimp_existential_there_quantifiers_1'}, 'blimp_existential_there_quantifiers_2': {'acc,none': 0.541, 'acc_stderr,none': 0.015766025737882165, 'alias': ' - blimp_existential_there_quantifiers_2'}, 'blimp_existential_there_subject_raising': {'acc,none': 0.889, 'acc_stderr,none': 0.009938701010583726, 'alias': ' - blimp_existential_there_subject_raising'}, 'blimp_expletive_it_object_raising': {'acc,none': 0.832, 'acc_stderr,none': 0.01182860583145426, 'alias': ' - blimp_expletive_it_object_raising'}, 'blimp_inchoative': {'acc,none': 0.69, 'acc_stderr,none': 0.0146326386586329, 'alias': ' - blimp_inchoative'}, 'blimp_intransitive': {'acc,none': 0.849, 'acc_stderr,none': 0.011328165223341681, 'alias': ' - blimp_intransitive'}, 'blimp_irregular_past_participle_adjectives': {'acc,none': 0.935, 'acc_stderr,none': 0.007799733061832009, 'alias': ' - blimp_irregular_past_participle_adjectives'}, 'blimp_irregular_past_participle_verbs': {'acc,none': 0.906, 'acc_stderr,none': 0.009233052000787726, 'alias': ' - blimp_irregular_past_participle_verbs'}, 'blimp_irregular_plural_subject_verb_agreement_1': {'acc,none': 0.927, 'acc_stderr,none': 0.008230354715244068, 'alias': ' - blimp_irregular_plural_subject_verb_agreement_1'}, 'blimp_irregular_plural_subject_verb_agreement_2': {'acc,none': 0.919, 'acc_stderr,none': 0.008632121032139973, 'alias': ' - blimp_irregular_plural_subject_verb_agreement_2'}, 'blimp_left_branch_island_echo_question': {'acc,none': 0.584, 'acc_stderr,none': 0.015594460144140601, 'alias': ' - blimp_left_branch_island_echo_question'}, 'blimp_left_branch_island_simple_question': {'acc,none': 0.923, 'acc_stderr,none': 0.008434580140240656, 'alias': ' - blimp_left_branch_island_simple_question'}, 'blimp_matrix_question_npi_licensor_present': {'acc,none': 0.595, 'acc_stderr,none': 0.015531136990453042, 'alias': ' - blimp_matrix_question_npi_licensor_present'}, 'blimp_npi_present_1': {'acc,none': 0.608, 'acc_stderr,none': 0.015445859463771304, 'alias': ' - blimp_npi_present_1'}, 'blimp_npi_present_2': {'acc,none': 0.681, 'acc_stderr,none': 0.014746404865473487, 'alias': ' - blimp_npi_present_2'}, 'blimp_only_npi_licensor_present': {'acc,none': 0.942, 'acc_stderr,none': 0.007395315455792925, 'alias': ' - blimp_only_npi_licensor_present'}, 'blimp_only_npi_scope': {'acc,none': 0.728, 'acc_stderr,none': 0.014078856992462621, 'alias': ' - blimp_only_npi_scope'}, 'blimp_passive_1': {'acc,none': 0.888, 'acc_stderr,none': 0.00997775303139724, 'alias': ' - blimp_passive_1'}, 'blimp_passive_2': {'acc,none': 0.902, 'acc_stderr,none': 0.00940661918462123, 'alias': ' - blimp_passive_2'}, 'blimp_principle_A_c_command': {'acc,none': 0.771, 'acc_stderr,none': 0.013294199326613621, 'alias': ' - blimp_principle_A_c_command'}, 'blimp_principle_A_case_1': {'acc,none': 1.0, 'acc_stderr,none': 0.0, 'alias': ' - blimp_principle_A_case_1'}, 'blimp_principle_A_case_2': {'acc,none': 0.951, 'acc_stderr,none': 0.006829761756140911, 'alias': ' - blimp_principle_A_case_2'}, 'blimp_principle_A_domain_1': {'acc,none': 0.992, 'acc_stderr,none': 0.0028185003005045074, 'alias': ' - blimp_principle_A_domain_1'}, 'blimp_principle_A_domain_2': {'acc,none': 0.808, 'acc_stderr,none': 0.012461592646659988, 'alias': ' - blimp_principle_A_domain_2'}, 'blimp_principle_A_domain_3': {'acc,none': 0.702, 'acc_stderr,none': 0.0144708467411347, 'alias': ' - blimp_principle_A_domain_3'}, 'blimp_principle_A_reconstruction': {'acc,none': 0.449, 'acc_stderr,none': 0.01573679276875202, 'alias': ' - blimp_principle_A_reconstruction'}, 'blimp_regular_plural_subject_verb_agreement_1': {'acc,none': 0.949, 'acc_stderr,none': 0.00696042006257141, 'alias': ' - blimp_regular_plural_subject_verb_agreement_1'}, 'blimp_regular_plural_subject_verb_agreement_2': {'acc,none': 0.908, 'acc_stderr,none': 0.00914437639315109, 'alias': ' - blimp_regular_plural_subject_verb_agreement_2'}, 'blimp_sentential_negation_npi_licensor_present': {'acc,none': 0.994, 'acc_stderr,none': 0.0024433521993298163, 'alias': ' - blimp_sentential_negation_npi_licensor_present'}, 'blimp_sentential_negation_npi_scope': {'acc,none': 0.785, 'acc_stderr,none': 0.012997843819031808, 'alias': ' - blimp_sentential_negation_npi_scope'}, 'blimp_sentential_subject_island': {'acc,none': 0.472, 'acc_stderr,none': 0.015794475789511476, 'alias': ' - blimp_sentential_subject_island'}, 'blimp_superlative_quantifiers_1': {'acc,none': 0.905, 'acc_stderr,none': 0.009276910103103327, 'alias': ' - blimp_superlative_quantifiers_1'}, 'blimp_superlative_quantifiers_2': {'acc,none': 0.779, 'acc_stderr,none': 0.01312750285969624, 'alias': ' - blimp_superlative_quantifiers_2'}, 'blimp_tough_vs_raising_1': {'acc,none': 0.657, 'acc_stderr,none': 0.015019206922356951, 'alias': ' - blimp_tough_vs_raising_1'}, 'blimp_tough_vs_raising_2': {'acc,none': 0.899, 'acc_stderr,none': 0.009533618929340988, 'alias': ' - blimp_tough_vs_raising_2'}, 'blimp_transitive': {'acc,none': 0.879, 'acc_stderr,none': 0.010318210380946087, 'alias': ' - blimp_transitive'}, 'blimp_wh_island': {'acc,none': 0.794, 'acc_stderr,none': 0.012795613612786543, 'alias': ' - blimp_wh_island'}, 'blimp_wh_questions_object_gap': {'acc,none': 0.868, 'acc_stderr,none': 0.01070937396352803, 'alias': ' - blimp_wh_questions_object_gap'}, 'blimp_wh_questions_subject_gap': {'acc,none': 0.925, 'acc_stderr,none': 0.008333333333333337, 'alias': ' - blimp_wh_questions_subject_gap'}, 'blimp_wh_questions_subject_gap_long_distance': {'acc,none': 0.91, 'acc_stderr,none': 0.009054390204866442, 'alias': ' - blimp_wh_questions_subject_gap_long_distance'}, 'blimp_wh_vs_that_no_gap': {'acc,none': 0.972, 'acc_stderr,none': 0.005219506034410054, 'alias': ' - blimp_wh_vs_that_no_gap'}, 'blimp_wh_vs_that_no_gap_long_distance': {'acc,none': 0.97, 'acc_stderr,none': 0.005397140829099197, 'alias': ' - blimp_wh_vs_that_no_gap_long_distance'}, 'blimp_wh_vs_that_with_gap': {'acc,none': 0.394, 'acc_stderr,none': 0.01545972195749338, 'alias': ' - blimp_wh_vs_that_with_gap'}, 'blimp_wh_vs_that_with_gap_long_distance': {'acc,none': 0.343, 'acc_stderr,none': 0.015019206922356951, 'alias': ' - blimp_wh_vs_that_with_gap_long_distance'}, 'sciq': {'acc,none': 0.954, 'acc_stderr,none': 0.006627814717380709, 'acc_norm,none': 0.905, 'acc_norm_stderr,none': 0.009276910103103298, 'alias': 'sciq'}, 'crows_pairs': {'likelihood_diff,none': 4.708786246175203, 'likelihood_diff_stderr,none': 0.4977051425278663, 'pct_stereotype,none': 0.586463923673226, 'pct_stereotype_stderr,none': 0.08249358792815063, 'alias': 'crows_pairs'}, 'crows_pairs_english': {'likelihood_diff,none': 4.7318490926325145, 'likelihood_diff_stderr,none': 0.11342322831385568, 'pct_stereotype,none': 0.654144305307096, 'pct_stereotype_stderr,none': 0.011618424517571955, 'alias': ' - crows_pairs_english'}, 'crows_pairs_english_age': {'likelihood_diff,none': 4.537944248744419, 'likelihood_diff_stderr,none': 0.4369966519695147, 'pct_stereotype,none': 0.6703296703296703, 'pct_stereotype_stderr,none': 0.04955219508596587, 'alias': ' - crows_pairs_english_age'}, 'crows_pairs_english_autre': {'likelihood_diff,none': 7.3852669108997695, 'likelihood_diff_stderr,none': 3.1266330530899724, 'pct_stereotype,none': 0.7272727272727273, 'pct_stereotype_stderr,none': 0.14083575804390605, 'alias': ' - crows_pairs_english_autre'}, 'crows_pairs_english_disability': {'likelihood_diff,none': 7.576054441011869, 'likelihood_diff_stderr,none': 0.8834566291940967, 'pct_stereotype,none': 0.7384615384615385, 'pct_stereotype_stderr,none': 0.05493406483494501, 'alias': ' - crows_pairs_english_disability'}, 'crows_pairs_english_gender': {'likelihood_diff,none': 4.158607739210129, 'likelihood_diff_stderr,none': 0.21464841768933213, 'pct_stereotype,none': 0.553125, 'pct_stereotype_stderr,none': 0.027836160509246814, 'alias': ' - crows_pairs_english_gender'}, 'crows_pairs_english_nationality': {'likelihood_diff,none': 4.096699096538402, 'likelihood_diff_stderr,none': 0.3010726414449652, 'pct_stereotype,none': 0.6342592592592593, 'pct_stereotype_stderr,none': 0.032847388576472056, 'alias': ' - crows_pairs_english_nationality'}, 'crows_pairs_english_physical_appearance': {'likelihood_diff,none': 4.959399302800496, 'likelihood_diff_stderr,none': 0.5163626886218743, 'pct_stereotype,none': 0.6944444444444444, 'pct_stereotype_stderr,none': 0.05466818705978919, 'alias': ' - crows_pairs_english_physical_appearance'}, 'crows_pairs_english_race_color': {'likelihood_diff,none': 4.81732547189307, 'likelihood_diff_stderr,none': 0.2159233883820022, 'pct_stereotype,none': 0.6338582677165354, 'pct_stereotype_stderr,none': 0.021395218002640975, 'alias': ' - crows_pairs_english_race_color'}, 'crows_pairs_english_religion': {'likelihood_diff,none': 4.467977901836774, 'likelihood_diff_stderr,none': 0.41756093801938654, 'pct_stereotype,none': 0.7117117117117117, 'pct_stereotype_stderr,none': 0.04318860867532052, 'alias': ' - crows_pairs_english_religion'}, 'crows_pairs_english_sexual_orientation': {'likelihood_diff,none': 5.250740379415532, 'likelihood_diff_stderr,none': 0.4624374274101435, 'pct_stereotype,none': 0.8494623655913979, 'pct_stereotype_stderr,none': 0.03728212869390004, 'alias': ' - crows_pairs_english_sexual_orientation'}, 'crows_pairs_english_socioeconomic': {'likelihood_diff,none': 4.964149414865594, 'likelihood_diff_stderr,none': 0.2888250878536635, 'pct_stereotype,none': 0.7263157894736842, 'pct_stereotype_stderr,none': 0.03243072906189839, 'alias': ' - crows_pairs_english_socioeconomic'}, 'crows_pairs_french': {'likelihood_diff,none': 4.685419388590786, 'likelihood_diff_stderr,none': 0.1197603523248747, 'pct_stereotype,none': 0.5205724508050089, 'pct_stereotype_stderr,none': 0.012202956874643714, 'alias': ' - crows_pairs_french'}, 'crows_pairs_french_age': {'likelihood_diff,none': 4.185000652737088, 'likelihood_diff_stderr,none': 0.5570851429106586, 'pct_stereotype,none': 0.5111111111111111, 'pct_stereotype_stderr,none': 0.05298680599073449, 'alias': ' - crows_pairs_french_age'}, 'crows_pairs_french_autre': {'likelihood_diff,none': 3.604593717134916, 'likelihood_diff_stderr,none': 1.010792669095801, 'pct_stereotype,none': 0.5384615384615384, 'pct_stereotype_stderr,none': 0.14390989949130545, 'alias': ' - crows_pairs_french_autre'}, 'crows_pairs_french_disability': {'likelihood_diff,none': 5.590537735910127, 'likelihood_diff_stderr,none': 0.6032956432563803, 'pct_stereotype,none': 0.6818181818181818, 'pct_stereotype_stderr,none': 0.057771719027476576, 'alias': ' - crows_pairs_french_disability'}, 'crows_pairs_french_gender': {'likelihood_diff,none': 4.326776760018132, 'likelihood_diff_stderr,none': 0.24224297198072423, 'pct_stereotype,none': 0.5077881619937694, 'pct_stereotype_stderr,none': 0.02794745876935634, 'alias': ' - crows_pairs_french_gender'}, 'crows_pairs_french_nationality': {'likelihood_diff,none': 5.112244511781474, 'likelihood_diff_stderr,none': 0.3158921225152337, 'pct_stereotype,none': 0.383399209486166, 'pct_stereotype_stderr,none': 0.030628616122857784, 'alias': ' - crows_pairs_french_nationality'}, 'crows_pairs_french_physical_appearance': {'likelihood_diff,none': 3.9224590725368924, 'likelihood_diff_stderr,none': 0.457093252573342, 'pct_stereotype,none': 0.5694444444444444, 'pct_stereotype_stderr,none': 0.05876396677084613, 'alias': ' - crows_pairs_french_physical_appearance'}, 'crows_pairs_french_race_color': {'likelihood_diff,none': 4.7455005562823755, 'likelihood_diff_stderr,none': 0.24087972130069762, 'pct_stereotype,none': 0.4782608695652174, 'pct_stereotype_stderr,none': 0.023315932363473738, 'alias': ' - crows_pairs_french_race_color'}, 'crows_pairs_french_religion': {'likelihood_diff,none': 3.60599773241126, 'likelihood_diff_stderr,none': 0.3299566111671841, 'pct_stereotype,none': 0.5043478260869565, 'pct_stereotype_stderr,none': 0.04682752006203916, 'alias': ' - crows_pairs_french_religion'}, 'crows_pairs_french_sexual_orientation': {'likelihood_diff,none': 5.098599989335615, 'likelihood_diff_stderr,none': 0.4065199601405249, 'pct_stereotype,none': 0.7582417582417582, 'pct_stereotype_stderr,none': 0.04513082148355001, 'alias': ' - crows_pairs_french_sexual_orientation'}, 'crows_pairs_french_socioeconomic': {'likelihood_diff,none': 5.311142940910495, 'likelihood_diff_stderr,none': 0.43270114350722544, 'pct_stereotype,none': 0.6071428571428571, 'pct_stereotype_stderr,none': 0.03497401292852224, 'alias': ' - crows_pairs_french_socioeconomic'}, 'multirc': {'acc,none': 0.33745874587458746, 'acc_stderr,none': 0.006791728192424027, 'alias': 'multirc'}, 'ceval-valid': {'acc,none': 0.43684992570579495, 'acc_stderr,none': 0.14261591689704498, 'acc_norm,none': 0.43684992570579495, 'acc_norm_stderr,none': 0.14261591689704498, 'alias': 'ceval-valid'}, 'ceval-valid_accountant': {'acc,none': 0.3877551020408163, 'acc_stderr,none': 0.07032677934739909, 'acc_norm,none': 0.3877551020408163, 'acc_norm_stderr,none': 0.07032677934739909, 'alias': ' - ceval-valid_accountant'}, 'ceval-valid_advanced_mathematics': {'acc,none': 0.2631578947368421, 'acc_stderr,none': 0.10379087338771256, 'acc_norm,none': 0.2631578947368421, 'acc_norm_stderr,none': 0.10379087338771256, 'alias': ' - ceval-valid_advanced_mathematics'}, 'ceval-valid_art_studies': {'acc,none': 0.30303030303030304, 'acc_stderr,none': 0.08124094920275461, 'acc_norm,none': 0.30303030303030304, 'acc_norm_stderr,none': 0.08124094920275461, 'alias': ' - ceval-valid_art_studies'}, 'ceval-valid_basic_medicine': {'acc,none': 0.3684210526315789, 'acc_stderr,none': 0.11369720523522558, 'acc_norm,none': 0.3684210526315789, 'acc_norm_stderr,none': 0.11369720523522558, 'alias': ' - ceval-valid_basic_medicine'}, 'ceval-valid_business_administration': {'acc,none': 0.3939393939393939, 'acc_stderr,none': 0.08637692614387409, 'acc_norm,none': 0.3939393939393939, 'acc_norm_stderr,none': 0.08637692614387409, 'alias': ' - ceval-valid_business_administration'}, 'ceval-valid_chinese_language_and_literature': {'acc,none': 0.30434782608695654, 'acc_stderr,none': 0.09810018692482896, 'acc_norm,none': 0.30434782608695654, 'acc_norm_stderr,none': 0.09810018692482896, 'alias': ' - ceval-valid_chinese_language_and_literature'}, 'ceval-valid_civil_servant': {'acc,none': 0.3404255319148936, 'acc_stderr,none': 0.06986570800554745, 'acc_norm,none': 0.3404255319148936, 'acc_norm_stderr,none': 0.06986570800554745, 'alias': ' - ceval-valid_civil_servant'}, 'ceval-valid_clinical_medicine': {'acc,none': 0.45454545454545453, 'acc_stderr,none': 0.10865714630312667, 'acc_norm,none': 0.45454545454545453, 'acc_norm_stderr,none': 0.10865714630312667, 'alias': ' - ceval-valid_clinical_medicine'}, 'ceval-valid_college_chemistry': {'acc,none': 0.375, 'acc_stderr,none': 0.10094660663590604, 'acc_norm,none': 0.375, 'acc_norm_stderr,none': 0.10094660663590604, 'alias': ' - ceval-valid_college_chemistry'}, 'ceval-valid_college_economics': {'acc,none': 0.41818181818181815, 'acc_stderr,none': 0.0671242332357016, 'acc_norm,none': 0.41818181818181815, 'acc_norm_stderr,none': 0.0671242332357016, 'alias': ' - ceval-valid_college_economics'}, 'ceval-valid_college_physics': {'acc,none': 0.2631578947368421, 'acc_stderr,none': 0.10379087338771256, 'acc_norm,none': 0.2631578947368421, 'acc_norm_stderr,none': 0.10379087338771256, 'alias': ' - ceval-valid_college_physics'}, 'ceval-valid_college_programming': {'acc,none': 0.5675675675675675, 'acc_stderr,none': 0.08256893144064577, 'acc_norm,none': 0.5675675675675675, 'acc_norm_stderr,none': 0.08256893144064577, 'alias': ' - ceval-valid_college_programming'}, 'ceval-valid_computer_architecture': {'acc,none': 0.5714285714285714, 'acc_stderr,none': 0.11065666703449763, 'acc_norm,none': 0.5714285714285714, 'acc_norm_stderr,none': 0.11065666703449763, 'alias': ' - ceval-valid_computer_architecture'}, 'ceval-valid_computer_network': {'acc,none': 0.5789473684210527, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.5789473684210527, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_computer_network'}, 'ceval-valid_discrete_mathematics': {'acc,none': 0.3125, 'acc_stderr,none': 0.11967838846954226, 'acc_norm,none': 0.3125, 'acc_norm_stderr,none': 0.11967838846954226, 'alias': ' - ceval-valid_discrete_mathematics'}, 'ceval-valid_education_science': {'acc,none': 0.4482758620689655, 'acc_stderr,none': 0.09398415777506855, 'acc_norm,none': 0.4482758620689655, 'acc_norm_stderr,none': 0.09398415777506855, 'alias': ' - ceval-valid_education_science'}, 'ceval-valid_electrical_engineer': {'acc,none': 0.4594594594594595, 'acc_stderr,none': 0.08305895907471073, 'acc_norm,none': 0.4594594594594595, 'acc_norm_stderr,none': 0.08305895907471073, 'alias': ' - ceval-valid_electrical_engineer'}, 'ceval-valid_environmental_impact_assessment_engineer': {'acc,none': 0.45161290322580644, 'acc_stderr,none': 0.09085862440549508, 'acc_norm,none': 0.45161290322580644, 'acc_norm_stderr,none': 0.09085862440549508, 'alias': ' - ceval-valid_environmental_impact_assessment_engineer'}, 'ceval-valid_fire_engineer': {'acc,none': 0.3870967741935484, 'acc_stderr,none': 0.08892934678767887, 'acc_norm,none': 0.3870967741935484, 'acc_norm_stderr,none': 0.08892934678767887, 'alias': ' - ceval-valid_fire_engineer'}, 'ceval-valid_high_school_biology': {'acc,none': 0.42105263157894735, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.42105263157894735, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_high_school_biology'}, 'ceval-valid_high_school_chemistry': {'acc,none': 0.42105263157894735, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.42105263157894735, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_high_school_chemistry'}, 'ceval-valid_high_school_chinese': {'acc,none': 0.42105263157894735, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.42105263157894735, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_high_school_chinese'}, 'ceval-valid_high_school_geography': {'acc,none': 0.5263157894736842, 'acc_stderr,none': 0.11768778828946262, 'acc_norm,none': 0.5263157894736842, 'acc_norm_stderr,none': 0.11768778828946262, 'alias': ' - ceval-valid_high_school_geography'}, 'ceval-valid_high_school_history': {'acc,none': 0.65, 'acc_stderr,none': 0.10942433098048311, 'acc_norm,none': 0.65, 'acc_norm_stderr,none': 0.10942433098048311, 'alias': ' - ceval-valid_high_school_history'}, 'ceval-valid_high_school_mathematics': {'acc,none': 0.2222222222222222, 'acc_stderr,none': 0.10083169033033672, 'acc_norm,none': 0.2222222222222222, 'acc_norm_stderr,none': 0.10083169033033672, 'alias': ' - ceval-valid_high_school_mathematics'}, 'ceval-valid_high_school_physics': {'acc,none': 0.42105263157894735, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.42105263157894735, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_high_school_physics'}, 'ceval-valid_high_school_politics': {'acc,none': 0.631578947368421, 'acc_stderr,none': 0.11369720523522561, 'acc_norm,none': 0.631578947368421, 'acc_norm_stderr,none': 0.11369720523522561, 'alias': ' - ceval-valid_high_school_politics'}, 'ceval-valid_ideological_and_moral_cultivation': {'acc,none': 0.6842105263157895, 'acc_stderr,none': 0.10956136839295434, 'acc_norm,none': 0.6842105263157895, 'acc_norm_stderr,none': 0.10956136839295434, 'alias': ' - ceval-valid_ideological_and_moral_cultivation'}, 'ceval-valid_law': {'acc,none': 0.3333333333333333, 'acc_stderr,none': 0.0982946374365981, 'acc_norm,none': 0.3333333333333333, 'acc_norm_stderr,none': 0.0982946374365981, 'alias': ' - ceval-valid_law'}, 'ceval-valid_legal_professional': {'acc,none': 0.6956521739130435, 'acc_stderr,none': 0.09810018692482893, 'acc_norm,none': 0.6956521739130435, 'acc_norm_stderr,none': 0.09810018692482893, 'alias': ' - ceval-valid_legal_professional'}, 'ceval-valid_logic': {'acc,none': 0.5, 'acc_stderr,none': 0.10910894511799618, 'acc_norm,none': 0.5, 'acc_norm_stderr,none': 0.10910894511799618, 'alias': ' - ceval-valid_logic'}, 'ceval-valid_mao_zedong_thought': {'acc,none': 0.5, 'acc_stderr,none': 0.1042572070285374, 'acc_norm,none': 0.5, 'acc_norm_stderr,none': 0.1042572070285374, 'alias': ' - ceval-valid_mao_zedong_thought'}, 'ceval-valid_marxism': {'acc,none': 0.42105263157894735, 'acc_stderr,none': 0.11637279966159299, 'acc_norm,none': 0.42105263157894735, 'acc_norm_stderr,none': 0.11637279966159299, 'alias': ' - ceval-valid_marxism'}, 'ceval-valid_metrology_engineer': {'acc,none': 0.4583333333333333, 'acc_stderr,none': 0.10389457216622948, 'acc_norm,none': 0.4583333333333333, 'acc_norm_stderr,none': 0.10389457216622948, 'alias': ' - ceval-valid_metrology_engineer'}, 'ceval-valid_middle_school_biology': {'acc,none': 0.5714285714285714, 'acc_stderr,none': 0.11065666703449763, 'acc_norm,none': 0.5714285714285714, 'acc_norm_stderr,none': 0.11065666703449763, 'alias': ' - ceval-valid_middle_school_biology'}, 'ceval-valid_middle_school_chemistry': {'acc,none': 0.3, 'acc_stderr,none': 0.10513149660756933, 'acc_norm,none': 0.3, 'acc_norm_stderr,none': 0.10513149660756933, 'alias': ' - ceval-valid_middle_school_chemistry'}, 'ceval-valid_middle_school_geography': {'acc,none': 0.5, 'acc_stderr,none': 0.15075567228888181, 'acc_norm,none': 0.5, 'acc_norm_stderr,none': 0.15075567228888181, 'alias': ' - ceval-valid_middle_school_geography'}, 'ceval-valid_middle_school_history': {'acc,none': 0.5909090909090909, 'acc_stderr,none': 0.10729033533674225, 'acc_norm,none': 0.5909090909090909, 'acc_norm_stderr,none': 0.10729033533674225, 'alias': ' - ceval-valid_middle_school_history'}, 'ceval-valid_middle_school_mathematics': {'acc,none': 0.15789473684210525, 'acc_stderr,none': 0.08594700851870798, 'acc_norm,none': 0.15789473684210525, 'acc_norm_stderr,none': 0.08594700851870798, 'alias': ' - ceval-valid_middle_school_mathematics'}, 'ceval-valid_middle_school_physics': {'acc,none': 0.5263157894736842, 'acc_stderr,none': 0.1176877882894626, 'acc_norm,none': 0.5263157894736842, 'acc_norm_stderr,none': 0.1176877882894626, 'alias': ' - ceval-valid_middle_school_physics'}, 'ceval-valid_middle_school_politics': {'acc,none': 0.5714285714285714, 'acc_stderr,none': 0.11065666703449763, 'acc_norm,none': 0.5714285714285714, 'acc_norm_stderr,none': 0.11065666703449763, 'alias': ' - ceval-valid_middle_school_politics'}, 'ceval-valid_modern_chinese_history': {'acc,none': 0.391304347826087, 'acc_stderr,none': 0.10405096111532161, 'acc_norm,none': 0.391304347826087, 'acc_norm_stderr,none': 0.10405096111532161, 'alias': ' - ceval-valid_modern_chinese_history'}, 'ceval-valid_operating_system': {'acc,none': 0.47368421052631576, 'acc_stderr,none': 0.11768778828946262, 'acc_norm,none': 0.47368421052631576, 'acc_norm_stderr,none': 0.11768778828946262, 'alias': ' - ceval-valid_operating_system'}, 'ceval-valid_physician': {'acc,none': 0.32653061224489793, 'acc_stderr,none': 0.06768622021133469, 'acc_norm,none': 0.32653061224489793, 'acc_norm_stderr,none': 0.06768622021133469, 'alias': ' - ceval-valid_physician'}, 'ceval-valid_plant_protection': {'acc,none': 0.5, 'acc_stderr,none': 0.10910894511799618, 'acc_norm,none': 0.5, 'acc_norm_stderr,none': 0.10910894511799618, 'alias': ' - ceval-valid_plant_protection'}, 'ceval-valid_probability_and_statistics': {'acc,none': 0.2222222222222222, 'acc_stderr,none': 0.10083169033033672, 'acc_norm,none': 0.2222222222222222, 'acc_norm_stderr,none': 0.10083169033033672, 'alias': ' - ceval-valid_probability_and_statistics'}, 'ceval-valid_professional_tour_guide': {'acc,none': 0.4827586206896552, 'acc_stderr,none': 0.09443492370778725, 'acc_norm,none': 0.4827586206896552, 'acc_norm_stderr,none': 0.09443492370778725, 'alias': ' - ceval-valid_professional_tour_guide'}, 'ceval-valid_sports_science': {'acc,none': 0.3157894736842105, 'acc_stderr,none': 0.10956136839295434, 'acc_norm,none': 0.3157894736842105, 'acc_norm_stderr,none': 0.10956136839295434, 'alias': ' - ceval-valid_sports_science'}, 'ceval-valid_tax_accountant': {'acc,none': 0.3673469387755102, 'acc_stderr,none': 0.06958255967849925, 'acc_norm,none': 0.3673469387755102, 'acc_norm_stderr,none': 0.06958255967849925, 'alias': ' - ceval-valid_tax_accountant'}, 'ceval-valid_teacher_qualification': {'acc,none': 0.5909090909090909, 'acc_stderr,none': 0.07497837474124878, 'acc_norm,none': 0.5909090909090909, 'acc_norm_stderr,none': 0.07497837474124878, 'alias': ' - ceval-valid_teacher_qualification'}, 'ceval-valid_urban_and_rural_planner': {'acc,none': 0.5652173913043478, 'acc_stderr,none': 0.07389883353033021, 'acc_norm,none': 0.5652173913043478, 'acc_norm_stderr,none': 0.07389883353033021, 'alias': ' - ceval-valid_urban_and_rural_planner'}, 'ceval-valid_veterinary_medicine': {'acc,none': 0.34782608695652173, 'acc_stderr,none': 0.10154334054280735, 'acc_norm,none': 0.34782608695652173, 'acc_norm_stderr,none': 0.10154334054280735, 'alias': ' - ceval-valid_veterinary_medicine'}, 'copa': {'acc,none': 0.92, 'acc_stderr,none': 0.027265992434429086, 'alias': 'copa'}, 'freebase': {'exact_match,none': 0.061515748031496065, 'exact_match_stderr,none': 0.005331527918306684, 'alias': 'freebase'}, 'webqs': {'exact_match,none': 0.061515748031496065, 'exact_match_stderr,none': 0.005331527918306684, 'alias': 'webqs'}, 'cmmlu': {'acc,none': 0.42220687273355206, 'acc_stderr,none': 0.10478605809778388, 'acc_norm,none': 0.42220687273355206, 'acc_norm_stderr,none': 0.10478605809778388, 'alias': 'cmmlu'}, 'cmmlu_agronomy': {'acc,none': 0.3905325443786982, 'acc_stderr,none': 0.03763996705629265, 'acc_norm,none': 0.3905325443786982, 'acc_norm_stderr,none': 0.03763996705629265, 'alias': ' - cmmlu_agronomy'}, 'cmmlu_anatomy': {'acc,none': 0.25675675675675674, 'acc_stderr,none': 0.036030290036472144, 'acc_norm,none': 0.25675675675675674, 'acc_norm_stderr,none': 0.036030290036472144, 'alias': ' - cmmlu_anatomy'}, 'cmmlu_ancient_chinese': {'acc,none': 0.25, 'acc_stderr,none': 0.03391617237346009, 'acc_norm,none': 0.25, 'acc_norm_stderr,none': 0.03391617237346009, 'alias': ' - cmmlu_ancient_chinese'}, 'cmmlu_arts': {'acc,none': 0.36875, 'acc_stderr,none': 0.03826204233503226, 'acc_norm,none': 0.36875, 'acc_norm_stderr,none': 0.03826204233503226, 'alias': ' - cmmlu_arts'}, 'cmmlu_astronomy': {'acc,none': 0.36363636363636365, 'acc_stderr,none': 0.03756335775187897, 'acc_norm,none': 0.36363636363636365, 'acc_norm_stderr,none': 0.03756335775187897, 'alias': ' - cmmlu_astronomy'}, 'cmmlu_business_ethics': {'acc,none': 0.49760765550239233, 'acc_stderr,none': 0.03466836542150577, 'acc_norm,none': 0.49760765550239233, 'acc_norm_stderr,none': 0.03466836542150577, 'alias': ' - cmmlu_business_ethics'}, 'cmmlu_chinese_civil_service_exam': {'acc,none': 0.35, 'acc_stderr,none': 0.03782614981812041, 'acc_norm,none': 0.35, 'acc_norm_stderr,none': 0.03782614981812041, 'alias': ' - cmmlu_chinese_civil_service_exam'}, 'cmmlu_chinese_driving_rule': {'acc,none': 0.6335877862595419, 'acc_stderr,none': 0.04225875451969636, 'acc_norm,none': 0.6335877862595419, 'acc_norm_stderr,none': 0.04225875451969636, 'alias': ' - cmmlu_chinese_driving_rule'}, 'cmmlu_chinese_food_culture': {'acc,none': 0.3161764705882353, 'acc_stderr,none': 0.040019338846834944, 'acc_norm,none': 0.3161764705882353, 'acc_norm_stderr,none': 0.040019338846834944, 'alias': ' - cmmlu_chinese_food_culture'}, 'cmmlu_chinese_foreign_policy': {'acc,none': 0.42990654205607476, 'acc_stderr,none': 0.048084723494299535, 'acc_norm,none': 0.42990654205607476, 'acc_norm_stderr,none': 0.048084723494299535, 'alias': ' - cmmlu_chinese_foreign_policy'}, 'cmmlu_chinese_history': {'acc,none': 0.4674922600619195, 'acc_stderr,none': 0.027804957713129835, 'acc_norm,none': 0.4674922600619195, 'acc_norm_stderr,none': 0.027804957713129835, 'alias': ' - cmmlu_chinese_history'}, 'cmmlu_chinese_literature': {'acc,none': 0.3627450980392157, 'acc_stderr,none': 0.033744993563193555, 'acc_norm,none': 0.3627450980392157, 'acc_norm_stderr,none': 0.033744993563193555, 'alias': ' - cmmlu_chinese_literature'}, 'cmmlu_chinese_teacher_qualification': {'acc,none': 0.5195530726256983, 'acc_stderr,none': 0.037447917191364796, 'acc_norm,none': 0.5195530726256983, 'acc_norm_stderr,none': 0.037447917191364796, 'alias': ' - cmmlu_chinese_teacher_qualification'}, 'cmmlu_clinical_knowledge': {'acc,none': 0.3333333333333333, 'acc_stderr,none': 0.0306858205966108, 'acc_norm,none': 0.3333333333333333, 'acc_norm_stderr,none': 0.0306858205966108, 'alias': ' - cmmlu_clinical_knowledge'}, 'cmmlu_college_actuarial_science': {'acc,none': 0.3018867924528302, 'acc_stderr,none': 0.044801270921106716, 'acc_norm,none': 0.3018867924528302, 'acc_norm_stderr,none': 0.044801270921106716, 'alias': ' - cmmlu_college_actuarial_science'}, 'cmmlu_college_education': {'acc,none': 0.4485981308411215, 'acc_stderr,none': 0.04830698295619321, 'acc_norm,none': 0.4485981308411215, 'acc_norm_stderr,none': 0.04830698295619321, 'alias': ' - cmmlu_college_education'}, 'cmmlu_college_engineering_hydrology': {'acc,none': 0.4716981132075472, 'acc_stderr,none': 0.04871677165040775, 'acc_norm,none': 0.4716981132075472, 'acc_norm_stderr,none': 0.04871677165040775, 'alias': ' - cmmlu_college_engineering_hydrology'}, 'cmmlu_college_law': {'acc,none': 0.28703703703703703, 'acc_stderr,none': 0.043733130409147614, 'acc_norm,none': 0.28703703703703703, 'acc_norm_stderr,none': 0.043733130409147614, 'alias': ' - cmmlu_college_law'}, 'cmmlu_college_mathematics': {'acc,none': 0.26666666666666666, 'acc_stderr,none': 0.04336290903919942, 'acc_norm,none': 0.26666666666666666, 'acc_norm_stderr,none': 0.04336290903919942, 'alias': ' - cmmlu_college_mathematics'}, 'cmmlu_college_medical_statistics': {'acc,none': 0.46226415094339623, 'acc_stderr,none': 0.04865583757821749, 'acc_norm,none': 0.46226415094339623, 'acc_norm_stderr,none': 0.04865583757821749, 'alias': ' - cmmlu_college_medical_statistics'}, 'cmmlu_college_medicine': {'acc,none': 0.37362637362637363, 'acc_stderr,none': 0.02933263256052554, 'acc_norm,none': 0.37362637362637363, 'acc_norm_stderr,none': 0.02933263256052554, 'alias': ' - cmmlu_college_medicine'}, 'cmmlu_computer_science': {'acc,none': 0.5147058823529411, 'acc_stderr,none': 0.03507793834791324, 'acc_norm,none': 0.5147058823529411, 'acc_norm_stderr,none': 0.03507793834791324, 'alias': ' - cmmlu_computer_science'}, 'cmmlu_computer_security': {'acc,none': 0.6023391812865497, 'acc_stderr,none': 0.0375363895576169, 'acc_norm,none': 0.6023391812865497, 'acc_norm_stderr,none': 0.0375363895576169, 'alias': ' - cmmlu_computer_security'}, 'cmmlu_conceptual_physics': {'acc,none': 0.41496598639455784, 'acc_stderr,none': 0.040777479727739804, 'acc_norm,none': 0.41496598639455784, 'acc_norm_stderr,none': 0.040777479727739804, 'alias': ' - cmmlu_conceptual_physics'}, 'cmmlu_construction_project_management': {'acc,none': 0.381294964028777, 'acc_stderr,none': 0.041345934945119074, 'acc_norm,none': 0.381294964028777, 'acc_norm_stderr,none': 0.041345934945119074, 'alias': ' - cmmlu_construction_project_management'}, 'cmmlu_economics': {'acc,none': 0.5031446540880503, 'acc_stderr,none': 0.03977707748639468, 'acc_norm,none': 0.5031446540880503, 'acc_norm_stderr,none': 0.03977707748639468, 'alias': ' - cmmlu_economics'}, 'cmmlu_education': {'acc,none': 0.4785276073619632, 'acc_stderr,none': 0.0392474687675113, 'acc_norm,none': 0.4785276073619632, 'acc_norm_stderr,none': 0.0392474687675113, 'alias': ' - cmmlu_education'}, 'cmmlu_electrical_engineering': {'acc,none': 0.43023255813953487, 'acc_stderr,none': 0.03786189925946143, 'acc_norm,none': 0.43023255813953487, 'acc_norm_stderr,none': 0.03786189925946143, 'alias': ' - cmmlu_electrical_engineering'}, 'cmmlu_elementary_chinese': {'acc,none': 0.2619047619047619, 'acc_stderr,none': 0.027751792418790923, 'acc_norm,none': 0.2619047619047619, 'acc_norm_stderr,none': 0.027751792418790923, 'alias': ' - cmmlu_elementary_chinese'}, 'cmmlu_elementary_commonsense': {'acc,none': 0.40404040404040403, 'acc_stderr,none': 0.034961309720561266, 'acc_norm,none': 0.40404040404040403, 'acc_norm_stderr,none': 0.034961309720561266, 'alias': ' - cmmlu_elementary_commonsense'}, 'cmmlu_elementary_information_and_technology': {'acc,none': 0.6764705882352942, 'acc_stderr,none': 0.030388353551886793, 'acc_norm,none': 0.6764705882352942, 'acc_norm_stderr,none': 0.030388353551886793, 'alias': ' - cmmlu_elementary_information_and_technology'}, 'cmmlu_elementary_mathematics': {'acc,none': 0.24347826086956523, 'acc_stderr,none': 0.028361099300075063, 'acc_norm,none': 0.24347826086956523, 'acc_norm_stderr,none': 0.028361099300075063, 'alias': ' - cmmlu_elementary_mathematics'}, 'cmmlu_ethnology': {'acc,none': 0.34814814814814815, 'acc_stderr,none': 0.041153246103369526, 'acc_norm,none': 0.34814814814814815, 'acc_norm_stderr,none': 0.041153246103369526, 'alias': ' - cmmlu_ethnology'}, 'cmmlu_food_science': {'acc,none': 0.4125874125874126, 'acc_stderr,none': 0.04131287692392344, 'acc_norm,none': 0.4125874125874126, 'acc_norm_stderr,none': 0.04131287692392344, 'alias': ' - cmmlu_food_science'}, 'cmmlu_genetics': {'acc,none': 0.36363636363636365, 'acc_stderr,none': 0.036363636363636376, 'acc_norm,none': 0.36363636363636365, 'acc_norm_stderr,none': 0.036363636363636376, 'alias': ' - cmmlu_genetics'}, 'cmmlu_global_facts': {'acc,none': 0.42953020134228187, 'acc_stderr,none': 0.04068949724015223, 'acc_norm,none': 0.42953020134228187, 'acc_norm_stderr,none': 0.04068949724015223, 'alias': ' - cmmlu_global_facts'}, 'cmmlu_high_school_biology': {'acc,none': 0.39644970414201186, 'acc_stderr,none': 0.03773949997679294, 'acc_norm,none': 0.39644970414201186, 'acc_norm_stderr,none': 0.03773949997679294, 'alias': ' - cmmlu_high_school_biology'}, 'cmmlu_high_school_chemistry': {'acc,none': 0.3409090909090909, 'acc_stderr,none': 0.04141487016241484, 'acc_norm,none': 0.3409090909090909, 'acc_norm_stderr,none': 0.04141487016241484, 'alias': ' - cmmlu_high_school_chemistry'}, 'cmmlu_high_school_geography': {'acc,none': 0.423728813559322, 'acc_stderr,none': 0.04568404181144862, 'acc_norm,none': 0.423728813559322, 'acc_norm_stderr,none': 0.04568404181144862, 'alias': ' - cmmlu_high_school_geography'}, 'cmmlu_high_school_mathematics': {'acc,none': 0.23780487804878048, 'acc_stderr,none': 0.03334645408665339, 'acc_norm,none': 0.23780487804878048, 'acc_norm_stderr,none': 0.03334645408665339, 'alias': ' - cmmlu_high_school_mathematics'}, 'cmmlu_high_school_physics': {'acc,none': 0.4090909090909091, 'acc_stderr,none': 0.047093069786618966, 'acc_norm,none': 0.4090909090909091, 'acc_norm_stderr,none': 0.047093069786618966, 'alias': ' - cmmlu_high_school_physics'}, 'cmmlu_high_school_politics': {'acc,none': 0.4195804195804196, 'acc_stderr,none': 0.041412787292137106, 'acc_norm,none': 0.4195804195804196, 'acc_norm_stderr,none': 0.041412787292137106, 'alias': ' - cmmlu_high_school_politics'}, 'cmmlu_human_sexuality': {'acc,none': 0.5, 'acc_stderr,none': 0.04472135954999579, 'acc_norm,none': 0.5, 'acc_norm_stderr,none': 0.04472135954999579, 'alias': ' - cmmlu_human_sexuality'}, 'cmmlu_international_law': {'acc,none': 0.3945945945945946, 'acc_stderr,none': 0.0360321188626959, 'acc_norm,none': 0.3945945945945946, 'acc_norm_stderr,none': 0.0360321188626959, 'alias': ' - cmmlu_international_law'}, 'cmmlu_journalism': {'acc,none': 0.5058139534883721, 'acc_stderr,none': 0.03823337064994852, 'acc_norm,none': 0.5058139534883721, 'acc_norm_stderr,none': 0.03823337064994852, 'alias': ' - cmmlu_journalism'}, 'cmmlu_jurisprudence': {'acc,none': 0.44525547445255476, 'acc_stderr,none': 0.02454478420191345, 'acc_norm,none': 0.44525547445255476, 'acc_norm_stderr,none': 0.02454478420191345, 'alias': ' - cmmlu_jurisprudence'}, 'cmmlu_legal_and_moral_basis': {'acc,none': 0.7336448598130841, 'acc_stderr,none': 0.030288912386133213, 'acc_norm,none': 0.7336448598130841, 'acc_norm_stderr,none': 0.030288912386133213, 'alias': ' - cmmlu_legal_and_moral_basis'}, 'cmmlu_logical': {'acc,none': 0.4715447154471545, 'acc_stderr,none': 0.04519450648295478, 'acc_norm,none': 0.4715447154471545, 'acc_norm_stderr,none': 0.04519450648295478, 'alias': ' - cmmlu_logical'}, 'cmmlu_machine_learning': {'acc,none': 0.4426229508196721, 'acc_stderr,none': 0.04515426947106744, 'acc_norm,none': 0.4426229508196721, 'acc_norm_stderr,none': 0.04515426947106744, 'alias': ' - cmmlu_machine_learning'}, 'cmmlu_management': {'acc,none': 0.5285714285714286, 'acc_stderr,none': 0.03452921053595503, 'acc_norm,none': 0.5285714285714286, 'acc_norm_stderr,none': 0.03452921053595503, 'alias': ' - cmmlu_management'}, 'cmmlu_marketing': {'acc,none': 0.5611111111111111, 'acc_stderr,none': 0.0370915696198558, 'acc_norm,none': 0.5611111111111111, 'acc_norm_stderr,none': 0.0370915696198558, 'alias': ' - cmmlu_marketing'}, 'cmmlu_marxist_theory': {'acc,none': 0.4708994708994709, 'acc_stderr,none': 0.036404433270336836, 'acc_norm,none': 0.4708994708994709, 'acc_norm_stderr,none': 0.036404433270336836, 'alias': ' - cmmlu_marxist_theory'}, 'cmmlu_modern_chinese': {'acc,none': 0.3275862068965517, 'acc_stderr,none': 0.04376552980994349, 'acc_norm,none': 0.3275862068965517, 'acc_norm_stderr,none': 0.04376552980994349, 'alias': ' - cmmlu_modern_chinese'}, 'cmmlu_nutrition': {'acc,none': 0.3724137931034483, 'acc_stderr,none': 0.04028731532947558, 'acc_norm,none': 0.3724137931034483, 'acc_norm_stderr,none': 0.04028731532947558, 'alias': ' - cmmlu_nutrition'}, 'cmmlu_philosophy': {'acc,none': 0.4380952380952381, 'acc_stderr,none': 0.048651804501824956, 'acc_norm,none': 0.4380952380952381, 'acc_norm_stderr,none': 0.048651804501824956, 'alias': ' - cmmlu_philosophy'}, 'cmmlu_professional_accounting': {'acc,none': 0.46285714285714286, 'acc_stderr,none': 0.03780017090541436, 'acc_norm,none': 0.46285714285714286, 'acc_norm_stderr,none': 0.03780017090541436, 'alias': ' - cmmlu_professional_accounting'}, 'cmmlu_professional_law': {'acc,none': 0.3127962085308057, 'acc_stderr,none': 0.031993655655275954, 'acc_norm,none': 0.3127962085308057, 'acc_norm_stderr,none': 0.031993655655275954, 'alias': ' - cmmlu_professional_law'}, 'cmmlu_professional_medicine': {'acc,none': 0.28191489361702127, 'acc_stderr,none': 0.023234393263661224, 'acc_norm,none': 0.28191489361702127, 'acc_norm_stderr,none': 0.023234393263661224, 'alias': ' - cmmlu_professional_medicine'}, 'cmmlu_professional_psychology': {'acc,none': 0.43103448275862066, 'acc_stderr,none': 0.03258314422493334, 'acc_norm,none': 0.43103448275862066, 'acc_norm_stderr,none': 0.03258314422493334, 'alias': ' - cmmlu_professional_psychology'}, 'cmmlu_public_relations': {'acc,none': 0.45977011494252873, 'acc_stderr,none': 0.03789104827773084, 'acc_norm,none': 0.45977011494252873, 'acc_norm_stderr,none': 0.03789104827773084, 'alias': ' - cmmlu_public_relations'}, 'cmmlu_security_study': {'acc,none': 0.45925925925925926, 'acc_stderr,none': 0.04304979692464243, 'acc_norm,none': 0.45925925925925926, 'acc_norm_stderr,none': 0.04304979692464243, 'alias': ' - cmmlu_security_study'}, 'cmmlu_sociology': {'acc,none': 0.4911504424778761, 'acc_stderr,none': 0.03332811194650095, 'acc_norm,none': 0.4911504424778761, 'acc_norm_stderr,none': 0.03332811194650095, 'alias': ' - cmmlu_sociology'}, 'cmmlu_sports_science': {'acc,none': 0.46060606060606063, 'acc_stderr,none': 0.03892207016552013, 'acc_norm,none': 0.46060606060606063, 'acc_norm_stderr,none': 0.03892207016552013, 'alias': ' - cmmlu_sports_science'}, 'cmmlu_traditional_chinese_medicine': {'acc,none': 0.3783783783783784, 'acc_stderr,none': 0.03575339609546739, 'acc_norm,none': 0.3783783783783784, 'acc_norm_stderr,none': 0.03575339609546739, 'alias': ' - cmmlu_traditional_chinese_medicine'}, 'cmmlu_virology': {'acc,none': 0.48520710059171596, 'acc_stderr,none': 0.03855895070315001, 'acc_norm,none': 0.48520710059171596, 'acc_norm_stderr,none': 0.03855895070315001, 'alias': ' - cmmlu_virology'}, 'cmmlu_world_history': {'acc,none': 0.4968944099378882, 'acc_stderr,none': 0.039527708265086496, 'acc_norm,none': 0.4968944099378882, 'acc_norm_stderr,none': 0.039527708265086496, 'alias': ' - cmmlu_world_history'}, 'cmmlu_world_religions': {'acc,none': 0.45, 'acc_stderr,none': 0.03945381823835186, 'acc_norm,none': 0.45, 'acc_norm_stderr,none': 0.03945381823835186, 'alias': ' - cmmlu_world_religions'}, 'lambada': {'perplexity,none': 3.786399904636776, 'perplexity_stderr,none': 0.21254011171654186, 'acc,none': 0.6832912866291481, 'acc_stderr,none': 0.01690798528858505, 'alias': 'lambada'}, 'lambada_openai': {'perplexity,none': 3.3977925600261814, 'perplexity_stderr,none': 0.07203596942011029, 'acc,none': 0.7145352222006598, 'acc_stderr,none': 0.006292165813769896, 'alias': ' - lambada_openai'}, 'lambada_standard': {'perplexity,none': 4.17500724924737, 'perplexity_stderr,none': 0.09820231784231818, 'acc,none': 0.6520473510576363, 'acc_stderr,none': 0.006636081541776578, 'alias': ' - lambada_standard'}, 'piqa': {'acc,none': 0.8025027203482046, 'acc_stderr,none': 0.009288578108523272, 'acc_norm,none': 0.8068552774755169, 'acc_norm_stderr,none': 0.009210530962579788, 'alias': 'piqa'}, 'mutual': {'r@1,none': 0.22573363431151242, 'r@1_stderr,none': 0.014053085820407473, 'r@2,none': 0.39954853273137697, 'r@2_stderr,none': 0.01646463433752643, 'mrr,none': 0.7563017318130347, 'mrr_stderr,none': 0.010009184048616275, 'alias': 'mutual'}, 'prost': {'acc,none': 0.4664282664389411, 'acc_stderr,none': 0.003644701699456615, 'acc_norm,none': 0.43824722459436377, 'acc_norm_stderr,none': 0.0036249778054749677, 'alias': 'prost'}, 'wsc273': {'acc,none': 0.8827838827838828, 'acc_stderr,none': 0.01950457139863538, 'alias': 'wsc273'}, 'sycophancy': {'acc,none': 0.8909853249475891, 'acc_stderr,none': 0.06497187134187173, 'alias': 'sycophancy'}, 'sycophancy_on_nlp_survey': {'acc,none': 0.9704527243589743, 'acc_stderr,none': 0.0016947879911929757, 'alias': ' - sycophancy_on_nlp_survey'}, 'sycophancy_on_philpapers2020': {'acc,none': 0.9812506334245464, 'acc_stderr,none': 0.001365566076894862, 'alias': ' - sycophancy_on_philpapers2020'}, 'sycophancy_on_political_typology_quiz': {'acc,none': 0.7258823529411764, 'acc_stderr,none': 0.00441695343620367, 'alias': ' - sycophancy_on_political_typology_quiz'}, 'qa4mre': {'acc,none': 0.5460992907801419, 'acc_stderr,none': 0.06008175299623157, 'acc_norm,none': 0.5815602836879432, 'acc_norm_stderr,none': 0.07866017109546872, 'alias': 'qa4mre'}, 'qa4mre_2011': {'acc,none': 0.6583333333333333, 'acc_stderr,none': 0.04347611684317006, 'acc_norm,none': 0.7333333333333333, 'acc_norm_stderr,none': 0.040537932807004046, 'alias': ' - qa4mre_2011'}, 'qa4mre_2012': {'acc,none': 0.55, 'acc_stderr,none': 0.03945381823835186, 'acc_norm,none': 0.61875, 'acc_norm_stderr,none': 0.03851802138867096, 'alias': ' - qa4mre_2012'}, 'qa4mre_2013': {'acc,none': 0.4964788732394366, 'acc_stderr,none': 0.02972117790031384, 'acc_norm,none': 0.4964788732394366, 'acc_norm_stderr,none': 0.029721177900313853, 'alias': ' - qa4mre_2013'}, 'ai2_arc': {'acc,none': 0.7237880496054115, 'acc_stderr,none': 0.0870545951521031, 'acc_norm,none': 0.6992671927846674, 'acc_norm_stderr,none': 0.06575368411217784, 'alias': 'ai2_arc'}, 'arc_challenge': {'acc,none': 0.5401023890784983, 'acc_stderr,none': 0.014564318856924848, 'acc_norm,none': 0.5614334470989761, 'acc_norm_stderr,none': 0.014500682618212867, 'alias': ' - arc_challenge'}, 'arc_easy': {'acc,none': 0.8143939393939394, 'acc_stderr,none': 0.007977770454202346, 'acc_norm,none': 0.7672558922558923, 'acc_norm_stderr,none': 0.008671169120579301, 'alias': ' - arc_easy'}, 'logiqa': {'acc,none': 0.2964669738863287, 'acc_stderr,none': 0.017913222760382753, 'acc_norm,none': 0.30721966205837176, 'acc_norm_stderr,none': 0.01809529226082822, 'alias': 'logiqa'}, 'winogrande': {'acc,none': 0.7371744277821626, 'acc_stderr,none': 0.012370922527262008, 'alias': 'winogrande'}, 'boolq': {'acc,none': 0.8538226299694189, 'acc_stderr,none': 0.006178975060597746, 'alias': 'boolq'}, 'logiqa2': {'acc,none': 0.3505089058524173, 'acc_stderr,none': 0.01203782529856954, 'acc_norm,none': 0.3435114503816794, 'acc_norm_stderr,none': 0.011981083483986733, 'alias': 'logiqa2'}, 'openbookqa': {'acc,none': 0.352, 'acc_stderr,none': 0.021380042385946048, 'acc_norm,none': 0.454, 'acc_norm_stderr,none': 0.02228814759117695, 'alias': 'openbookqa'}, 'multimedqa': {'alias': 'stem', 'acc,none': 0.5175301632363378, 'acc_stderr,none': 0.0694061118891545, 'acc_norm,none': 0.4778006153543328, 'acc_norm_stderr,none': 0.0001606261257056986}, 'medmcqa': {'acc,none': 0.46282572316519244, 'acc_stderr,none': 0.007710354282495721, 'acc_norm,none': 0.46282572316519244, 'acc_norm_stderr,none': 0.007710354282495721, 'alias': 'medmcqa'}, 'medqa_4options': {'acc,none': 0.5043205027494109, 'acc_stderr,none': 0.014018780453018352, 'acc_norm,none': 0.5043205027494109, 'acc_norm_stderr,none': 0.014018780453018352, 'alias': 'medqa_4options'}, 'mmlu_anatomy': {'alias': '  - anatomy', 'acc,none': 0.5777777777777777, 'acc_stderr,none': 0.04266763404099582}, 'mmlu_clinical_knowledge': {'alias': '  - clinical_knowledge', 'acc,none': 0.660377358490566, 'acc_stderr,none': 0.029146904747798335}, 'mmlu_college_biology': {'alias': '  - college_biology', 'acc,none': 0.6527777777777778, 'acc_stderr,none': 0.03981240543717861}, 'mmlu_college_medicine': {'alias': '  - college_medicine', 'acc,none': 0.5664739884393064, 'acc_stderr,none': 0.03778621079092056}, 'mmlu_medical_genetics': {'alias': '  - medical_genetics', 'acc,none': 0.63, 'acc_stderr,none': 0.04852365870939099}, 'mmlu_professional_medicine': {'alias': '  - professional_medicine', 'acc,none': 0.6654411764705882, 'acc_stderr,none': 0.028661996202335317}, 'pubmedqa': {'acc,none': 0.758, 'acc_stderr,none': 0.019173085678337157, 'alias': 'pubmedqa'}, 'mc_taco': {'acc,none': 0.7994069053166702, 'acc_stderr,none': 0.004121287749681853, 'f1,none': 0.734733893557423, 'f1_stderr,none': 0.005895264085796533, 'alias': 'mc_taco'}, 'kmmlu': {'acc,none': 0.34704013860814326, 'acc_stderr,none': 0.07238936581650249, 'acc_norm,none': 0.34704013860814326, 'acc_norm_stderr,none': 0.07238936581650249, 'alias': 'kmmlu'}, 'kmmlu_accounting': {'acc,none': 0.24, 'acc_stderr,none': 0.042923469599092816, 'acc_norm,none': 0.24, 'acc_norm_stderr,none': 0.042923469599092816, 'alias': ' - kmmlu_accounting'}, 'kmmlu_agricultural_sciences': {'acc,none': 0.29, 'acc_stderr,none': 0.014356395999905694, 'acc_norm,none': 0.29, 'acc_norm_stderr,none': 0.014356395999905694, 'alias': ' - kmmlu_agricultural_sciences'}, 'kmmlu_aviation_engineering_and_maintenance': {'acc,none': 0.305, 'acc_stderr,none': 0.014566646394664377, 'acc_norm,none': 0.305, 'acc_norm_stderr,none': 0.014566646394664377, 'alias': ' - kmmlu_aviation_engineering_and_maintenance'}, 'kmmlu_biology': {'acc,none': 0.303, 'acc_stderr,none': 0.014539683710535269, 'acc_norm,none': 0.303, 'acc_norm_stderr,none': 0.014539683710535269, 'alias': ' - kmmlu_biology'}, 'kmmlu_chemical_engineering': {'acc,none': 0.319, 'acc_stderr,none': 0.014746404865473487, 'acc_norm,none': 0.319, 'acc_norm_stderr,none': 0.014746404865473487, 'alias': ' - kmmlu_chemical_engineering'}, 'kmmlu_chemistry': {'acc,none': 0.29833333333333334, 'acc_stderr,none': 0.018694028559022177, 'acc_norm,none': 0.29833333333333334, 'acc_norm_stderr,none': 0.018694028559022177, 'alias': ' - kmmlu_chemistry'}, 'kmmlu_civil_engineering': {'acc,none': 0.352, 'acc_stderr,none': 0.015110404505648671, 'acc_norm,none': 0.352, 'acc_norm_stderr,none': 0.015110404505648671, 'alias': ' - kmmlu_civil_engineering'}, 'kmmlu_computer_science': {'acc,none': 0.592, 'acc_stderr,none': 0.015549205052920676, 'acc_norm,none': 0.592, 'acc_norm_stderr,none': 0.015549205052920676, 'alias': ' - kmmlu_computer_science'}, 'kmmlu_construction': {'acc,none': 0.295, 'acc_stderr,none': 0.01442855443844551, 'acc_norm,none': 0.295, 'acc_norm_stderr,none': 0.01442855443844551, 'alias': ' - kmmlu_construction'}, 'kmmlu_criminal_law': {'acc,none': 0.275, 'acc_stderr,none': 0.031652557907861936, 'acc_norm,none': 0.275, 'acc_norm_stderr,none': 0.031652557907861936, 'alias': ' - kmmlu_criminal_law'}, 'kmmlu_ecology': {'acc,none': 0.373, 'acc_stderr,none': 0.015300493622922809, 'acc_norm,none': 0.373, 'acc_norm_stderr,none': 0.015300493622922809, 'alias': ' - kmmlu_ecology'}, 'kmmlu_economics': {'acc,none': 0.3384615384615385, 'acc_stderr,none': 0.041661735408389584, 'acc_norm,none': 0.3384615384615385, 'acc_norm_stderr,none': 0.041661735408389584, 'alias': ' - kmmlu_economics'}, 'kmmlu_education': {'acc,none': 0.45, 'acc_stderr,none': 0.05, 'acc_norm,none': 0.45, 'acc_norm_stderr,none': 0.05, 'alias': ' - kmmlu_education'}, 'kmmlu_electrical_engineering': {'acc,none': 0.324, 'acc_stderr,none': 0.014806864733738854, 'acc_norm,none': 0.324, 'acc_norm_stderr,none': 0.014806864733738854, 'alias': ' - kmmlu_electrical_engineering'}, 'kmmlu_electronics_engineering': {'acc,none': 0.435, 'acc_stderr,none': 0.015685057252717193, 'acc_norm,none': 0.435, 'acc_norm_stderr,none': 0.015685057252717193, 'alias': ' - kmmlu_electronics_engineering'}, 'kmmlu_energy_management': {'acc,none': 0.277, 'acc_stderr,none': 0.014158794845306265, 'acc_norm,none': 0.277, 'acc_norm_stderr,none': 0.014158794845306265, 'alias': ' - kmmlu_energy_management'}, 'kmmlu_environmental_science': {'acc,none': 0.29, 'acc_stderr,none': 0.014356395999905687, 'acc_norm,none': 0.29, 'acc_norm_stderr,none': 0.014356395999905687, 'alias': ' - kmmlu_environmental_science'}, 'kmmlu_fashion': {'acc,none': 0.331, 'acc_stderr,none': 0.014888272588203922, 'acc_norm,none': 0.331, 'acc_norm_stderr,none': 0.014888272588203922, 'alias': ' - kmmlu_fashion'}, 'kmmlu_food_processing': {'acc,none': 0.322, 'acc_stderr,none': 0.014782913600996669, 'acc_norm,none': 0.322, 'acc_norm_stderr,none': 0.014782913600996669, 'alias': ' - kmmlu_food_processing'}, 'kmmlu_gas_technology_and_engineering': {'acc,none': 0.28, 'acc_stderr,none': 0.014205696104091501, 'acc_norm,none': 0.28, 'acc_norm_stderr,none': 0.014205696104091501, 'alias': ' - kmmlu_gas_technology_and_engineering'}, 'kmmlu_geomatics': {'acc,none': 0.328, 'acc_stderr,none': 0.014853842487270336, 'acc_norm,none': 0.328, 'acc_norm_stderr,none': 0.014853842487270336, 'alias': ' - kmmlu_geomatics'}, 'kmmlu_health': {'acc,none': 0.36, 'acc_stderr,none': 0.048241815132442176, 'acc_norm,none': 0.36, 'acc_norm_stderr,none': 0.048241815132442176, 'alias': ' - kmmlu_health'}, 'kmmlu_industrial_engineer': {'acc,none': 0.36, 'acc_stderr,none': 0.015186527932040115, 'acc_norm,none': 0.36, 'acc_norm_stderr,none': 0.015186527932040115, 'alias': ' - kmmlu_industrial_engineer'}, 'kmmlu_information_technology': {'acc,none': 0.537, 'acc_stderr,none': 0.01577592722726242, 'acc_norm,none': 0.537, 'acc_norm_stderr,none': 0.01577592722726242, 'alias': ' - kmmlu_information_technology'}, 'kmmlu_interior_architecture_and_design': {'acc,none': 0.379, 'acc_stderr,none': 0.015349091002225349, 'acc_norm,none': 0.379, 'acc_norm_stderr,none': 0.015349091002225349, 'alias': ' - kmmlu_interior_architecture_and_design'}, 'kmmlu_law': {'acc,none': 0.336, 'acc_stderr,none': 0.01494414023379502, 'acc_norm,none': 0.336, 'acc_norm_stderr,none': 0.01494414023379502, 'alias': ' - kmmlu_law'}, 'kmmlu_machine_design_and_manufacturing': {'acc,none': 0.317, 'acc_stderr,none': 0.014721675438880226, 'acc_norm,none': 0.317, 'acc_norm_stderr,none': 0.014721675438880226, 'alias': ' - kmmlu_machine_design_and_manufacturing'}, 'kmmlu_management': {'acc,none': 0.391, 'acc_stderr,none': 0.015438826294681783, 'acc_norm,none': 0.391, 'acc_norm_stderr,none': 0.015438826294681783, 'alias': ' - kmmlu_management'}, 'kmmlu_maritime_engineering': {'acc,none': 0.355, 'acc_stderr,none': 0.019551524326912272, 'acc_norm,none': 0.355, 'acc_norm_stderr,none': 0.019551524326912272, 'alias': ' - kmmlu_maritime_engineering'}, 'kmmlu_marketing': {'acc,none': 0.512, 'acc_stderr,none': 0.015814743314581818, 'acc_norm,none': 0.512, 'acc_norm_stderr,none': 0.015814743314581818, 'alias': ' - kmmlu_marketing'}, 'kmmlu_materials_engineering': {'acc,none': 0.297, 'acc_stderr,none': 0.014456832294801103, 'acc_norm,none': 0.297, 'acc_norm_stderr,none': 0.014456832294801103, 'alias': ' - kmmlu_materials_engineering'}, 'kmmlu_mechanical_engineering': {'acc,none': 0.288, 'acc_stderr,none': 0.01432694179723156, 'acc_norm,none': 0.288, 'acc_norm_stderr,none': 0.01432694179723156, 'alias': ' - kmmlu_mechanical_engineering'}, 'kmmlu_nondestructive_testing': {'acc,none': 0.3, 'acc_stderr,none': 0.014498627873361427, 'acc_norm,none': 0.3, 'acc_norm_stderr,none': 0.014498627873361427, 'alias': ' - kmmlu_nondestructive_testing'}, 'kmmlu_patent': {'acc,none': 0.31, 'acc_stderr,none': 0.04648231987117316, 'acc_norm,none': 0.31, 'acc_norm_stderr,none': 0.04648231987117316, 'alias': ' - kmmlu_patent'}, 'kmmlu_political_science_and_sociology': {'acc,none': 0.38666666666666666, 'acc_stderr,none': 0.02816313890819685, 'acc_norm,none': 0.38666666666666666, 'acc_norm_stderr,none': 0.02816313890819685, 'alias': ' - kmmlu_political_science_and_sociology'}, 'kmmlu_psychology': {'acc,none': 0.29, 'acc_stderr,none': 0.01435639599990569, 'acc_norm,none': 0.29, 'acc_norm_stderr,none': 0.01435639599990569, 'alias': ' - kmmlu_psychology'}, 'kmmlu_public_safety': {'acc,none': 0.334, 'acc_stderr,none': 0.014922019523732958, 'acc_norm,none': 0.334, 'acc_norm_stderr,none': 0.014922019523732958, 'alias': ' - kmmlu_public_safety'}, 'kmmlu_railway_and_automotive_engineering': {'acc,none': 0.272, 'acc_stderr,none': 0.014078856992462623, 'acc_norm,none': 0.272, 'acc_norm_stderr,none': 0.014078856992462623, 'alias': ' - kmmlu_railway_and_automotive_engineering'}, 'kmmlu_real_estate': {'acc,none': 0.345, 'acc_stderr,none': 0.03369796379336736, 'acc_norm,none': 0.345, 'acc_norm_stderr,none': 0.03369796379336736, 'alias': ' - kmmlu_real_estate'}, 'kmmlu_refrigerating_machinery': {'acc,none': 0.272, 'acc_stderr,none': 0.014078856992462618, 'acc_norm,none': 0.272, 'acc_norm_stderr,none': 0.014078856992462618, 'alias': ' - kmmlu_refrigerating_machinery'}, 'kmmlu_social_welfare': {'acc,none': 0.364, 'acc_stderr,none': 0.015222868840522019, 'acc_norm,none': 0.364, 'acc_norm_stderr,none': 0.015222868840522019, 'alias': ' - kmmlu_social_welfare'}, 'kmmlu_taxation': {'acc,none': 0.36, 'acc_stderr,none': 0.03402629784040015, 'acc_norm,none': 0.36, 'acc_norm_stderr,none': 0.03402629784040015, 'alias': ' - kmmlu_taxation'}, 'kmmlu_telecommunications_and_wireless_technology': {'acc,none': 0.469, 'acc_stderr,none': 0.015788865959538996, 'acc_norm,none': 0.469, 'acc_norm_stderr,none': 0.015788865959538996, 'alias': ' - kmmlu_telecommunications_and_wireless_technology'}, 'mutual_plus': {'r@1,none': 0.2595936794582393, 'r@1_stderr,none': 0.014737047402750955, 'r@2,none': 0.4401805869074492, 'r@2_stderr,none': 0.016686597274671543, 'mrr,none': 0.6923438690720508, 'mrr_stderr,none': 0.01046550898169533, 'alias': 'mutual_plus'}, 'kobest': {'acc,none': 0.6033764525323394, 'acc_stderr,none': 0.10436362536496387, 'f1,none': 0.5553821902279571, 'f1_stderr,none': 'N/A', 'acc_norm,none': 0.496, 'acc_norm_stderr,none': 0.0005009699398797607, 'alias': 'kobest'}, 'kobest_boolq': {'acc,none': 0.811965811965812, 'acc_stderr,none': 0.010431780632246387, 'f1,none': 0.8116217798594848, 'f1_stderr,none': 'N/A', 'alias': ' - kobest_boolq'}, 'kobest_copa': {'acc,none': 0.587, 'acc_stderr,none': 0.015577986829936531, 'f1,none': 0.5854574873102816, 'f1_stderr,none': 'N/A', 'alias': ' - kobest_copa'}, 'kobest_hellaswag': {'acc,none': 0.404, 'acc_stderr,none': 0.02196663529383292, 'f1,none': 0.401473434891252, 'f1_stderr,none': 'N/A', 'acc_norm,none': 0.496, 'acc_norm_stderr,none': 0.02238235778196213, 'alias': ' - kobest_hellaswag'}, 'kobest_sentineg': {'acc,none': 0.5289672544080605, 'acc_stderr,none': 0.02508374348663252, 'f1,none': 0.4376216773098804, 'f1_stderr,none': 'N/A', 'alias': ' - kobest_sentineg'}, 'kobest_wic': {'acc,none': 0.4865079365079365, 'acc_stderr,none': 0.014086365971849188, 'f1,none': 0.3441676032214804, 'f1_stderr,none': 'N/A', 'alias': ' - kobest_wic'}, 'race': {'acc,none': 0.4583732057416268, 'acc_stderr,none': 0.015420889760190567, 'alias': 'race'}, 'arithmetic': {'acc,none': 0.86535, 'acc_stderr,none': 0.1044357382014318, 'alias': 'arithmetic'}, 'arithmetic_1dc': {'acc,none': 0.639, 'acc_stderr,none': 0.010742308811391417, 'alias': 'arithmetic_1dc'}, 'arithmetic_2da': {'acc,none': 0.9875, 'acc_stderr,none': 0.002484947178762673, 'alias': 'arithmetic_2da'}, 'arithmetic_2dm': {'acc,none': 0.5685, 'acc_stderr,none': 0.011077690761900849, 'alias': 'arithmetic_2dm'}, 'arithmetic_2ds': {'acc,none': 0.994, 'acc_stderr,none': 0.0017272787111155127, 'alias': 'arithmetic_2ds'}, 'arithmetic_3da': {'acc,none': 0.9675, 'acc_stderr,none': 0.003966073608738821, 'alias': 'arithmetic_3da'}, 'arithmetic_3ds': {'acc,none': 0.96, 'acc_stderr,none': 0.004382876316119542, 'alias': 'arithmetic_3ds'}, 'arithmetic_4da': {'acc,none': 0.922, 'acc_stderr,none': 0.005997998665721458, 'alias': 'arithmetic_4da'}, 'arithmetic_4ds': {'acc,none': 0.915, 'acc_stderr,none': 0.006237543865716644, 'alias': 'arithmetic_4ds'}, 'arithmetic_5da': {'acc,none': 0.8705, 'acc_stderr,none': 0.007509532045059017, 'alias': 'arithmetic_5da'}, 'arithmetic_5ds': {'acc,none': 0.8235, 'acc_stderr,none': 0.008527029383968144, 'alias': 'arithmetic_5ds'}, 'mmlu': {'acc,none': 0.5901580971371599, 'acc_stderr,none': 0.1308916942503901, 'alias': 'mmlu'}, 'mmlu_humanities': {'alias': ' - humanities', 'acc,none': 0.5385759829968119, 'acc_stderr,none': 0.14701376176218026}, 'mmlu_formal_logic': {'alias': '  - formal_logic', 'acc,none': 0.35714285714285715, 'acc_stderr,none': 0.04285714285714281}, 'mmlu_high_school_european_history': {'alias': '  - high_school_european_history', 'acc,none': 0.7272727272727273, 'acc_stderr,none': 0.0347769116216366}, 'mmlu_high_school_us_history': {'alias': '  - high_school_us_history', 'acc,none': 0.7843137254901961, 'acc_stderr,none': 0.028867431449849303}, 'mmlu_high_school_world_history': {'alias': '  - high_school_world_history', 'acc,none': 0.7805907172995781, 'acc_stderr,none': 0.026939106581553945}, 'mmlu_international_law': {'alias': '  - international_law', 'acc,none': 0.7603305785123967, 'acc_stderr,none': 0.03896878985070417}, 'mmlu_jurisprudence': {'alias': '  - jurisprudence', 'acc,none': 0.7129629629629629, 'acc_stderr,none': 0.043733130409147614}, 'mmlu_logical_fallacies': {'alias': '  - logical_fallacies', 'acc,none': 0.7607361963190185, 'acc_stderr,none': 0.033519538795212696}, 'mmlu_moral_disputes': {'alias': '  - moral_disputes', 'acc,none': 0.6589595375722543, 'acc_stderr,none': 0.025522474632121612}, 'mmlu_moral_scenarios': {'alias': '  - moral_scenarios', 'acc,none': 0.33631284916201115, 'acc_stderr,none': 0.015801003729145904}, 'mmlu_philosophy': {'alias': '  - philosophy', 'acc,none': 0.6463022508038585, 'acc_stderr,none': 0.027155208103200882}, 'mmlu_prehistory': {'alias': '  - prehistory', 'acc,none': 0.6697530864197531, 'acc_stderr,none': 0.026168298456732846}, 'mmlu_professional_law': {'alias': '  - professional_law', 'acc,none': 0.42046936114732725, 'acc_stderr,none': 0.012607654553832705}, 'mmlu_world_religions': {'alias': '  - world_religions', 'acc,none': 0.8128654970760234, 'acc_stderr,none': 0.029913127232368032}, 'mmlu_other': {'alias': ' - other', 'acc,none': 0.664628258770518, 'acc_stderr,none': 0.1089782488947092}, 'mmlu_business_ethics': {'alias': '  - business_ethics', 'acc,none': 0.59, 'acc_stderr,none': 0.04943110704237101}, 'mmlu_global_facts': {'alias': '  - global_facts', 'acc,none': 0.41, 'acc_stderr,none': 0.04943110704237102}, 'mmlu_human_aging': {'alias': '  - human_aging', 'acc,none': 0.6278026905829597, 'acc_stderr,none': 0.03244305283008732}, 'mmlu_management': {'alias': '  - management', 'acc,none': 0.7378640776699029, 'acc_stderr,none': 0.043546310772605956}, 'mmlu_marketing': {'alias': '  - marketing', 'acc,none': 0.8717948717948718, 'acc_stderr,none': 0.02190190511507332}, 'mmlu_miscellaneous': {'alias': '  - miscellaneous', 'acc,none': 0.7879948914431673, 'acc_stderr,none': 0.014616099385833711}, 'mmlu_nutrition': {'alias': '  - nutrition', 'acc,none': 0.673202614379085, 'acc_stderr,none': 0.026857294663281416}, 'mmlu_professional_accounting': {'alias': '  - professional_accounting', 'acc,none': 0.45390070921985815, 'acc_stderr,none': 0.02970045324729147}, 'mmlu_virology': {'alias': '  - virology', 'acc,none': 0.463855421686747, 'acc_stderr,none': 0.03882310850890594}, 'mmlu_social_sciences': {'alias': ' - social_sciences', 'acc,none': 0.6889827754306143, 'acc_stderr,none': 0.08980026474895134}, 'mmlu_econometrics': {'alias': '  - econometrics', 'acc,none': 0.4824561403508772, 'acc_stderr,none': 0.04700708033551038}, 'mmlu_high_school_geography': {'alias': '  - high_school_geography', 'acc,none': 0.7474747474747475, 'acc_stderr,none': 0.030954055470365907}, 'mmlu_high_school_government_and_politics': {'alias': '  - high_school_government_and_politics', 'acc,none': 0.8186528497409327, 'acc_stderr,none': 0.027807032360686088}, 'mmlu_high_school_macroeconomics': {'alias': '  - high_school_macroeconomics', 'acc,none': 0.5769230769230769, 'acc_stderr,none': 0.025049197876042338}, 'mmlu_high_school_microeconomics': {'alias': '  - high_school_microeconomics', 'acc,none': 0.6596638655462185, 'acc_stderr,none': 0.030778057422931673}, 'mmlu_high_school_psychology': {'alias': '  - high_school_psychology', 'acc,none': 0.7853211009174312, 'acc_stderr,none': 0.01760430414925649}, 'mmlu_human_sexuality': {'alias': '  - human_sexuality', 'acc,none': 0.6946564885496184, 'acc_stderr,none': 0.04039314978724562}, 'mmlu_professional_psychology': {'alias': '  - professional_psychology', 'acc,none': 0.5898692810457516, 'acc_stderr,none': 0.019898412717635892}, 'mmlu_public_relations': {'alias': '  - public_relations', 'acc,none': 0.6727272727272727, 'acc_stderr,none': 0.04494290866252088}, 'mmlu_security_studies': {'alias': '  - security_studies', 'acc,none': 0.7061224489795919, 'acc_stderr,none': 0.029162738410249755}, 'mmlu_sociology': {'alias': '  - sociology', 'acc,none': 0.8407960199004975, 'acc_stderr,none': 0.02587064676616914}, 'mmlu_us_foreign_policy': {'alias': '  - us_foreign_policy', 'acc,none': 0.81, 'acc_stderr,none': 0.039427724440366234}, 'mmlu_stem': {'alias': ' - stem', 'acc,none': 0.497304154773232, 'acc_stderr,none': 0.10643423341212979}, 'mmlu_abstract_algebra': {'alias': '  - abstract_algebra', 'acc,none': 0.3, 'acc_stderr,none': 0.046056618647183814}, 'mmlu_astronomy': {'alias': '  - astronomy', 'acc,none': 0.625, 'acc_stderr,none': 0.039397364351956274}, 'mmlu_college_chemistry': {'alias': '  - college_chemistry', 'acc,none': 0.38, 'acc_stderr,none': 0.048783173121456316}, 'mmlu_college_computer_science': {'alias': '  - college_computer_science', 'acc,none': 0.54, 'acc_stderr,none': 0.05009082659620333}, 'mmlu_college_mathematics': {'alias': '  - college_mathematics', 'acc,none': 0.33, 'acc_stderr,none': 0.04725815626252605}, 'mmlu_college_physics': {'alias': '  - college_physics', 'acc,none': 0.39215686274509803, 'acc_stderr,none': 0.048580835742663434}, 'mmlu_computer_security': {'alias': '  - computer_security', 'acc,none': 0.67, 'acc_stderr,none': 0.047258156262526066}, 'mmlu_conceptual_physics': {'alias': '  - conceptual_physics', 'acc,none': 0.5106382978723404, 'acc_stderr,none': 0.03267862331014063}, 'mmlu_electrical_engineering': {'alias': '  - electrical_engineering', 'acc,none': 0.5586206896551724, 'acc_stderr,none': 0.04137931034482757}, 'mmlu_elementary_mathematics': {'alias': '  - elementary_mathematics', 'acc,none': 0.42328042328042326, 'acc_stderr,none': 0.02544636563440678}, 'mmlu_high_school_biology': {'alias': '  - high_school_biology', 'acc,none': 0.6935483870967742, 'acc_stderr,none': 0.026226485652553887}, 'mmlu_high_school_chemistry': {'alias': '  - high_school_chemistry', 'acc,none': 0.5073891625615764, 'acc_stderr,none': 0.035176035403610105}, 'mmlu_high_school_computer_science': {'alias': '  - high_school_computer_science', 'acc,none': 0.61, 'acc_stderr,none': 0.04902071300001975}, 'mmlu_high_school_mathematics': {'alias': '  - high_school_mathematics', 'acc,none': 0.34444444444444444, 'acc_stderr,none': 0.02897264888484427}, 'mmlu_high_school_physics': {'alias': '  - high_school_physics', 'acc,none': 0.3708609271523179, 'acc_stderr,none': 0.03943966699183629}, 'mmlu_high_school_statistics': {'alias': '  - high_school_statistics', 'acc,none': 0.4537037037037037, 'acc_stderr,none': 0.033953227263757976}, 'mmlu_machine_learning': {'alias': '  - machine_learning', 'acc,none': 0.4642857142857143, 'acc_stderr,none': 0.04733667890053756}, 'asdiv': {'acc,none': 0.0, 'acc_stderr,none': 0.0, 'alias': 'asdiv'}, 'wic': {'acc,none': 0.6003134796238244, 'acc_stderr,none': 0.019407923975502145, 'alias': 'wic'}, 'swag': {'acc,none': 0.5974707587723683, 'acc_stderr,none': 0.0034672708384908342, 'acc_norm,none': 0.7870638808357493, 'acc_norm_stderr,none': 0.0028944122046582997, 'alias': 'swag'}, 'hellaswag': {'acc,none': 0.6602270464050985, 'acc_stderr,none': 0.004726640532562062, 'acc_norm,none': 0.8366859191396137, 'acc_norm_stderr,none': 0.003688965231733516, 'alias': 'hellaswag'}, 'cb': {'acc,none': 0.7678571428571429, 'acc_stderr,none': 0.0569293902400011, 'f1,none': 0.6534278959810874, 'f1_stderr,none': 'N/A', 'alias': 'cb'}, 'sglue_rte': {'acc,none': 0.7003610108303249, 'acc_stderr,none': 0.02757437014529261, 'alias': 'sglue_rte'}, 'mathqa': {'acc,none': 0.37051926298157456, 'acc_stderr,none': 0.008840914868809937, 'acc_norm,none': 0.3688442211055276, 'acc_norm_stderr,none': 0.008832636623685441, 'alias': 'mathqa'}}, 'groups': {'anli': {'acc,none': 0.470625, 'acc_stderr,none': 0.016323170842139138, 'alias': 'anli'}, 'lambada_cloze': {'perplexity,none': 79.69768925561641, 'perplexity_stderr,none': 21.41458436331952, 'acc,none': 0.2092955559868038, 'acc_stderr,none': 0.048106457496816746, 'alias': 'lambada_cloze'}, 'glue': {'mcc,none': 0.1275893167139847, 'mcc_stderr,none': 0.0010517992291010554, 'acc,none': 0.5822142909581908, 'acc_stderr,none': 0.032790502714334625, 'f1,none': 0.4411327873267142, 'f1_stderr,none': 0.0012831556331226797, 'alias': 'glue'}, 'blimp': {'acc,none': 0.8340597014925373, 'acc_stderr,none': 0.13897696485795538, 'alias': 'blimp'}, 'crows_pairs': {'likelihood_diff,none': 4.708786246175203, 'likelihood_diff_stderr,none': 0.4977051425278663, 'pct_stereotype,none': 0.586463923673226, 'pct_stereotype_stderr,none': 0.08249358792815063, 'alias': 'crows_pairs'}, 'ceval-valid': {'acc,none': 0.43684992570579495, 'acc_stderr,none': 0.14261591689704498, 'acc_norm,none': 0.43684992570579495, 'acc_norm_stderr,none': 0.14261591689704498, 'alias': 'ceval-valid'}, 'freebase': {'exact_match,none': 0.061515748031496065, 'exact_match_stderr,none': 0.005331527918306684, 'alias': 'freebase'}, 'cmmlu': {'acc,none': 0.42220687273355206, 'acc_stderr,none': 0.10478605809778388, 'acc_norm,none': 0.42220687273355206, 'acc_norm_stderr,none': 0.10478605809778388, 'alias': 'cmmlu'}, 'lambada': {'perplexity,none': 3.786399904636776, 'perplexity_stderr,none': 0.21254011171654186, 'acc,none': 0.6832912866291481, 'acc_stderr,none': 0.01690798528858505, 'alias': 'lambada'}, 'sycophancy': {'acc,none': 0.8909853249475891, 'acc_stderr,none': 0.06497187134187173, 'alias': 'sycophancy'}, 'qa4mre': {'acc,none': 0.5460992907801419, 'acc_stderr,none': 0.06008175299623157, 'acc_norm,none': 0.5815602836879432, 'acc_norm_stderr,none': 0.07866017109546872, 'alias': 'qa4mre'}, 'ai2_arc': {'acc,none': 0.7237880496054115, 'acc_stderr,none': 0.0870545951521031, 'acc_norm,none': 0.6992671927846674, 'acc_norm_stderr,none': 0.06575368411217784, 'alias': 'ai2_arc'}, 'multimedqa': {'alias': 'stem', 'acc,none': 0.5175301632363378, 'acc_stderr,none': 0.0694061118891545, 'acc_norm,none': 0.4778006153543328, 'acc_norm_stderr,none': 0.0001606261257056986}, 'kmmlu': {'acc,none': 0.34704013860814326, 'acc_stderr,none': 0.07238936581650249, 'acc_norm,none': 0.34704013860814326, 'acc_norm_stderr,none': 0.07238936581650249, 'alias': 'kmmlu'}, 'kobest': {'acc,none': 0.6033764525323394, 'acc_stderr,none': 0.10436362536496387, 'f1,none': 0.5553821902279571, 'f1_stderr,none': 'N/A', 'acc_norm,none': 0.496, 'acc_norm_stderr,none': 0.0005009699398797607, 'alias': 'kobest'}, 'arithmetic': {'acc,none': 0.86535, 'acc_stderr,none': 0.1044357382014318, 'alias': 'arithmetic'}, 'mmlu': {'acc,none': 0.5901580971371599, 'acc_stderr,none': 0.1308916942503901, 'alias': 'mmlu'}, 'mmlu_humanities': {'alias': ' - humanities', 'acc,none': 0.5385759829968119, 'acc_stderr,none': 0.14701376176218026}, 'mmlu_other': {'alias': ' - other', 'acc,none': 0.664628258770518, 'acc_stderr,none': 0.1089782488947092}, 'mmlu_social_sciences': {'alias': ' - social_sciences', 'acc,none': 0.6889827754306143, 'acc_stderr,none': 0.08980026474895134}, 'mmlu_stem': {'alias': ' - stem', 'acc,none': 0.497304154773232, 'acc_stderr,none': 0.10643423341212979}}}}}\n"
     ]
    }
   ],
   "source": [
    "import json\n",
    "\n",
    "# Global result map if it's not set\n",
    "if 'global_result_map' not in globals():\n",
    "    global_result_map = {}\n",
    "\n",
    "#\n",
    "# Function to process the results.json file\n",
    "#\n",
    "def process_results_json(file_path):\n",
    "    with open(file_path) as f:\n",
    "        data = json.load(f)\n",
    "\n",
    "    # Model args, presplit by ','\n",
    "    model_args = data['config']['model_args'].split(',')\n",
    "\n",
    "    # Extract the pretrained value from config.model_args\n",
    "    modelname = model_args[0].split('=')[1]\n",
    "\n",
    "    # Opt array\n",
    "    confArgsArr = model_args[1:]\n",
    "\n",
    "    # Sort the opt array\n",
    "    confArgsArr.sort()\n",
    "    # Convert it to a string\n",
    "    confStr = ','.join(confArgsArr)\n",
    "\n",
    "    # Convert the option array of key=value strings to a dictionary\n",
    "    confObj = { }\n",
    "    for o in confArgsArr:\n",
    "        k, v = o.split('=')\n",
    "        confObj[k] = v\n",
    "    \n",
    "    # Create a dictionary to store the results, or use the existing one if it exists\n",
    "    if modelname in global_result_map:\n",
    "        modelObj = global_result_map[modelname]\n",
    "    else:\n",
    "        modelObj = {\n",
    "            'name': modelname,\n",
    "            'config': { }\n",
    "        }\n",
    "    \n",
    "    # Get the opt object for the model\n",
    "    if confStr in modelObj['config']:\n",
    "        confSet = modelObj['config'][confStr]\n",
    "    else:\n",
    "        confSet = {\n",
    "            'confStr': confStr,\n",
    "            'confObj': confObj,\n",
    "            'results': {},\n",
    "            'groups': {}\n",
    "        }\n",
    "\n",
    "    # Iterate over the results and extract the result object for each test/group\n",
    "    if 'results' in data:\n",
    "        for test, result in data['results'].items():\n",
    "            confSet['results'][test] = result\n",
    "    if 'groups' in data:\n",
    "        for test, result in data['groups'].items():\n",
    "            confSet['groups'][test] = result\n",
    "    \n",
    "    # Update the global result map object\n",
    "    modelObj['config'][confStr] = confSet\n",
    "    global_result_map[modelname] = modelObj\n",
    "    return modelObj\n",
    "\n",
    "# Lets test the function with the first results.json file\n",
    "first_result = process_results_json(results_json_files[0])\n",
    "print(f\"Processed example: \", first_result)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 130 models\n",
      "Models: \n",
      "['mistralai/Mistral-7B-Instruct-v0.2', 'mistralai/Mistral-7B-v0.1', 'mosaicml/mpt-7b-instruct', 'mosaicml/mpt-7b', 'mosaicml/mpt-7b-chat', 'bigscience/bloom-7b1', 'bigscience/bloomz-7b1-mt', 'bigscience/bloomz-7b1', 'EleutherAI/pythia-2.8b', 'EleutherAI/pythia-1.4b', 'EleutherAI/gpt-j-6b', 'EleutherAI/pythia-6.9b', 'google/flan-t5-base', 'google/gemma-2b', 'google/gemma-2b-it', 'google/gemma-7b', 'google/gemma-7b-it', 'google/flan-t5-large', 'microsoft/phi-1_5', 'microsoft/phi-2', 'microsoft/phi-1', 'allenai/OLMo-7B', 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'RWKV/rwkv-5-world-1b5', 'RWKV/rwkv-5-world-3b', 'RWKV/rwkv-4-world-3b', 'RWKV/v5-EagleX-v2-7B-HF', 'RWKV/rwkv-6-world-1b6', 'RWKV/rwkv-4-world-1b5', 'RWKV/v5-Eagle-7B-HF', 'RWKV/v6-Finch-7B-HF', 'RWKV/rwkv-6-world-3b-v2.1', 'RWKV/rwkv-4-world-7b', 'RWKV/v6-Finch-14B-HF', 'RWKV/rwkv-raven-7b', 'RWKV/rwkv-6-world-3b', 'aisingapore/sealion7b', 'aisingapore/sealion3b', './rwkv-x-dev/1_3-C5-rwkv-270_pth', './rwkv-x-dev/225-EagleX-PreFT-C', './rwkv-x-dev/225-EagleX-PreFT-D', './rwkv-x-dev/1_0_pth', './rwkv-x-dev/chunk4-0_85_pth', './rwkv-x-dev/1_3-C1-rwkv-340_pth', './rwkv-x-dev/chunk1-0_8_pth', './rwkv-x-dev/chunk0-0_8_pth', './rwkv-x-dev/225-EagleX-PreFT-E', './rwkv-x-dev/225-EagleX-PreFT-B', './rwkv-x-dev/blink4-final_pth', './rwkv-x-dev/chunk2-0_8_pth', './rwkv-x-dev/chunk3-0_8_pth', './rwkv-x-dev/r3-4k-test2-fix3-blink-final_pth', './rwkv-x-dev/R4-7B-15t-With-Mask_pth', './rwkv-x-dev/r3-testchunk-1-8_pth', './rwkv-x-dev/R4-with-shuffle-rwkv-53_pth', './rwkv-x-dev/chunk7-2-0_85_pth', './rwkv-x-dev/EagleX-1_7T_pth', './rwkv-x-dev/r3-testchunk2-blink-fixed_pth', './rwkv-x-dev/r3-testchunk2-blink_pth', './rwkv-x-dev/rwkv-230_pth', './rwkv-x-dev/1_3-C0-rwkv-60_pth', './rwkv-x-dev/chunk5-0_85_pth', './rwkv-x-dev/R4-7B-Base-No-Mask_pth', './rwkv-x-dev/RWKV-5-World-1B5-v2-20231025-ctx4096', './rwkv-x-dev/R4-1B5-No-Mask_pth', './rwkv-x-dev/RWKV-32K-5B-RW_pth', './rwkv-x-dev/R4-7B-15t-32k-No-Mask_pth', './rwkv-x-dev/1_3-C0-PRERUN-rwkv-60_pth', './rwkv-x-dev/EagleX_1-7T_Chat_pth', './rwkv-x-dev/1_3-C1-rwkv-390_pth', './rwkv-x-dev/1_3-C1-rwkv-20_pth', './rwkv-x-dev/chunk8-1-0_85_pth', './rwkv-x-dev/R4-7B-Base-32k-No-Mask_pth', './rwkv-x-dev/R4-no-shuffle-rwkv-53_pth', './rwkv-x-dev/1_3-C2-rwkv-648_pth', './rwkv-x-dev/1_3-C2-rwkv-250_pth', './rwkv-x-dev/r3-testchunk-1-8-no-cuda-with-warmup_pth', './rwkv-x-dev/1_3-C0-rwkv-140_pth', './rwkv-x-dev/bruber_9b', './rwkv-x-dev/Eagle-225-1FT', './rwkv-x-dev/225-EagleX-PreFT-A', './rwkv-x-dev/225-EagleX-PreFT-F', './rwkv-x-dev/r3-c1-8_pth', './rwkv-x-dev/1_3-C0-PRERUN-rwkv-450_pth', './rwkv-x-dev/RWKV-5-World-3B-v2-20231118-ctx16k', './rwkv-x-dev/1_3-C0-PREPRERUN-rwkv-40_pth', './rwkv-x-dev/RWKV-5-World-7B-v2-20240128-ctx4096', './rwkv-x-dev/R4-7B-15t-No-Mask_pth', './rwkv-x-dev/1_0-c1-290_pth', './rwkv-x-dev/R4-1B5-With-Mask_pth', './rwkv-x-dev/Quetzal-N8-1', './rwkv-x-dev/1_3-C0-PREPRERUN-rwkv-30_pth', './rwkv-x-dev/1_3-C0-rwkv-70_pth', './rwkv-x-dev/chunk6-0_85_pth', './rwkv-x-dev/R4-7B-Base-With-Mask_pth', 'rwkv-x-dev/v5-Eagle-7B-1_0T-HF', './rwkv-x-dev/1_3-C0-PRERUN-rwkv-30_pth', './rwkv-x-dev/chunk7-1-0_85_pth', './rwkv-x-dev/1_3-C1-rwkv-190_pth', './rwkv-x-dev/R4-7B-15t-extd-e3_pth', './rwkv-x-dev/r3-testchunk2_pth', './rwkv-x-dev/Hermes-RWKV-v5-7B_pth', './rwkv-x-dev/1_3-C0-rwkv-153_pth', './rwkv-x-dev/R4-7B-15t-extd-e2_pth', './rwkv-x-dev/r3-testchunk-blink_pth', 'SmerkyG/rwkv-5-world-1b5', 'SmerkyG/rwkv6-world-1b6', 'SmerkyG/rwkv6-world-3b', 'SmerkyG/rwkv-5-world-3b', 'SmerkyG/rwkv-5-world-7b', 'SmerkyG/rwkv5-world-7b', 'togethercomputer/RedPajama-INCITE-7B-Base', 'togethercomputer/RedPajama-INCITE-7B-Instruct', 'togethercomputer/RedPajama-INCITE-7B-Chat', 'facebook/opt-2.7b', 'facebook/opt-6.7b', 'facebook/opt-1.3b', 'tiiuae/falcon-7b-instruct', 'tiiuae/falcon-rw-1b', 'tiiuae/falcon-rw-7b', 'tiiuae/falcon-7b', 'm8than/Finch-14B-Continued', 'm8than/FinchX-Med', 'TimeMobius/Mobius-RWKV-Chat-12B-128k-v4-HF', 'huggyllama/llama-7b', 'meta-llama/Llama-2-7b-chat-hf', 'meta-llama/Llama-2-7b-hf', 'state-spaces/mamba-2.8b-hf', 'state-spaces/mamba-1.4b-hf']\n",
      "Saved to compiled-lm-eval-results.json\n"
     ]
    }
   ],
   "source": [
    "# Lets reset and reprocess all the results.json files\n",
    "global_result_map = {}\n",
    "\n",
    "# Process all the results.json files\n",
    "for file in results_json_files:\n",
    "    process_results_json(file)\n",
    "\n",
    "# Show high level list of models\n",
    "print(f\"Found {len(global_result_map)} models\")\n",
    "print(f\"Models: \\n{list(global_result_map.keys())}\")\n",
    "\n",
    "# Save the result map to a file\n",
    "with open('summary/compiled-lm-eval-results.json', 'w') as f:\n",
    "    json.dump(global_result_map, f, sort_keys=True, indent='\\t')\n",
    "\n",
    "# Echo that its been saved to json\n",
    "print(f\"Saved to compiled-lm-eval-results.json\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Convert the results into CSV table formats"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>model</th>\n",
       "      <th>avg_acc</th>\n",
       "      <th>avg_acc_stderr</th>\n",
       "      <th>xcopa (acc)</th>\n",
       "      <th>xcopa (acc_stderr)</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>mistralai/Mistral-7B-Instruct-v0.2</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>NaN</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>mistralai/Mistral-7B-v0.1</td>\n",
       "      <td>0.559455</td>\n",
       "      <td>0.053879</td>\n",
       "      <td>0.559455</td>\n",
       "      <td>0.053879</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>mosaicml/mpt-7b-instruct</td>\n",
       "      <td>0.537091</td>\n",
       "      <td>0.041919</td>\n",
       "      <td>0.537091</td>\n",
       "      <td>0.041919</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>mosaicml/mpt-7b</td>\n",
       "      <td>0.536000</td>\n",
       "      <td>0.042339</td>\n",
       "      <td>0.536000</td>\n",
       "      <td>0.042339</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>mosaicml/mpt-7b-chat</td>\n",
       "      <td>0.538000</td>\n",
       "      <td>0.047059</td>\n",
       "      <td>0.538000</td>\n",
       "      <td>0.047059</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>56</th>\n",
       "      <td>huggyllama/llama-7b</td>\n",
       "      <td>0.541818</td>\n",
       "      <td>0.040718</td>\n",
       "      <td>0.541818</td>\n",
       "      <td>0.040718</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>57</th>\n",
       "      <td>meta-llama/Llama-2-7b-chat-hf</td>\n",
       "      <td>0.559818</td>\n",
       "      <td>0.054954</td>\n",
       "      <td>0.559818</td>\n",
       "      <td>0.054954</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>58</th>\n",
       "      <td>meta-llama/Llama-2-7b-hf</td>\n",
       "      <td>0.566727</td>\n",
       "      <td>0.052515</td>\n",
       "      <td>0.566727</td>\n",
       "      <td>0.052515</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>59</th>\n",
       "      <td>state-spaces/mamba-2.8b-hf</td>\n",
       "      <td>0.552909</td>\n",
       "      <td>0.035570</td>\n",
       "      <td>0.552909</td>\n",
       "      <td>0.035570</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>60</th>\n",
       "      <td>state-spaces/mamba-1.4b-hf</td>\n",
       "      <td>0.544182</td>\n",
       "      <td>0.031390</td>\n",
       "      <td>0.544182</td>\n",
       "      <td>0.031390</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>61 rows × 5 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "                                 model   avg_acc  avg_acc_stderr  xcopa (acc)  \\\n",
       "0   mistralai/Mistral-7B-Instruct-v0.2  0.000000        0.000000          NaN   \n",
       "1            mistralai/Mistral-7B-v0.1  0.559455        0.053879     0.559455   \n",
       "2             mosaicml/mpt-7b-instruct  0.537091        0.041919     0.537091   \n",
       "3                      mosaicml/mpt-7b  0.536000        0.042339     0.536000   \n",
       "4                 mosaicml/mpt-7b-chat  0.538000        0.047059     0.538000   \n",
       "..                                 ...       ...             ...          ...   \n",
       "56                 huggyllama/llama-7b  0.541818        0.040718     0.541818   \n",
       "57       meta-llama/Llama-2-7b-chat-hf  0.559818        0.054954     0.559818   \n",
       "58            meta-llama/Llama-2-7b-hf  0.566727        0.052515     0.566727   \n",
       "59          state-spaces/mamba-2.8b-hf  0.552909        0.035570     0.552909   \n",
       "60          state-spaces/mamba-1.4b-hf  0.544182        0.031390     0.544182   \n",
       "\n",
       "    xcopa (acc_stderr)  \n",
       "0                  NaN  \n",
       "1             0.053879  \n",
       "2             0.041919  \n",
       "3             0.042339  \n",
       "4             0.047059  \n",
       "..                 ...  \n",
       "56            0.040718  \n",
       "57            0.054954  \n",
       "58            0.052515  \n",
       "59            0.035570  \n",
       "60            0.031390  \n",
       "\n",
       "[61 rows x 5 columns]"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Lets convert this into a table, which we will display in this notebook, and save as a CSV\n",
    "import pandas as pd\n",
    "\n",
    "##################################################\n",
    "#\n",
    "#  Utility functions\n",
    "#\n",
    "##################################################\n",
    "\n",
    "# Check if the given name string, is within the list, including \"*\" wildcard\n",
    "def is_in_list(name, list):\n",
    "    for n in list:\n",
    "        if n[-1] == '*':\n",
    "            if name.startswith(n[:-1]):\n",
    "                return True\n",
    "        elif n == name:\n",
    "            return True\n",
    "    return False\n",
    "\n",
    "# Is in inclusion exclusion list pair\n",
    "def is_in_list_pair(name, inList, exList):\n",
    "    if not is_in_list(name, inList):\n",
    "        return False\n",
    "    if is_in_list(name, exList):\n",
    "        return False\n",
    "    return True\n",
    "\n",
    "# Prepare a single test/group result object\n",
    "# By applying common filtering and formatting changes\n",
    "def prepare_test_result(result):\n",
    "    # The reutrn object\n",
    "    ret = {}\n",
    "    # Iterate the result key/value\n",
    "    for k, v in result.items():\n",
    "        # Skip if its alias\n",
    "        if k == 'alias':\n",
    "            continue\n",
    "\n",
    "        # If the key ends with \",none\", drop the \",none\"\n",
    "        if k.endswith(',none'):\n",
    "            k = k[:-5]\n",
    "        \n",
    "        # Save the result\n",
    "        ret[k] = v\n",
    "    \n",
    "    # Return the result\n",
    "    return ret\n",
    "\n",
    "##################################################\n",
    "#\n",
    "#  Generate the result\n",
    "#\n",
    "##################################################\n",
    "\n",
    "# Create a list of rows for the table\n",
    "def generate_result_table(\n",
    "    inConfig = { \"dtype\": \"bfloat16\" },\n",
    "\n",
    "    # Models to include/exclude\n",
    "    inModels = [\"*\"],\n",
    "    exModels = [\"./rwkv-x-dev/*\", \"rwkv-x-dev\"],\n",
    "\n",
    "    # Results and groups to include\n",
    "    inResults = [],\n",
    "    inGroups = [\"*\"],\n",
    "\n",
    "    # Exclude results and groups, applied after inResults and inGroups\n",
    "    exResults = [],\n",
    "    exGroups = [],\n",
    "\n",
    "    # Sorted\n",
    "    sort = False,\n",
    "    simplified = False\n",
    "):\n",
    "    table_rows = []\n",
    "\n",
    "    # Iterate over the models\n",
    "    for model, modelObj in global_result_map.items():\n",
    "        # Skip if not in the inModels or exModels\n",
    "        if not is_in_list_pair(model, inModels, exModels):\n",
    "            continue\n",
    "\n",
    "        # Iterate over the configurations\n",
    "        for confStr, confSet in modelObj['config'].items():\n",
    "            # Get the confObj\n",
    "            confObj = confSet['confObj']\n",
    "\n",
    "            # Check if the inConfig, matches the confObj\n",
    "            if inConfig:\n",
    "                skip = False\n",
    "                for k, v in inConfig.items():\n",
    "                    if k not in confObj or confObj[k] != v:\n",
    "                        skip = True\n",
    "                        break\n",
    "                if skip:\n",
    "                    continue\n",
    "\n",
    "            # Create a row object\n",
    "            row = {\n",
    "                'model': model,\n",
    "                # 'config': confStr\n",
    "\n",
    "                \"avg_acc\": 0.0,\n",
    "                \"avg_acc_stderr\": 0.0,\n",
    "            }\n",
    "\n",
    "            # Total acc / acc_stderr\n",
    "            acc_total = 0.0\n",
    "            acc_count = 0\n",
    "            acc_stderr_total = 0.0\n",
    "            acc_stderr_count = 0\n",
    "\n",
    "            # Add the groups\n",
    "            for test, result in confSet['groups'].items():\n",
    "\n",
    "                # Skip if not in the inGroups or exGroups\n",
    "                if not is_in_list_pair(test, inGroups, exGroups):\n",
    "                    continue\n",
    "\n",
    "                # Filter the result obj\n",
    "                cleanResult = prepare_test_result(result)\n",
    "\n",
    "                # Add the result to the row, as seperate columns for each key\n",
    "                for k, v in cleanResult.items():\n",
    "                    if k == 'acc':\n",
    "                        acc_total += v\n",
    "                        acc_count += 1\n",
    "                    elif k == 'acc_stderr':\n",
    "                        acc_stderr_total += v\n",
    "                        acc_stderr_count += 1\n",
    "                    \n",
    "                    # For simplified, we only use acc and perplexity\n",
    "                    if simplified and k not in ['acc', 'perplexity']:\n",
    "                        continue\n",
    "\n",
    "                    # Save the value\n",
    "                    row[f\"{test} ({k})\"] = v\n",
    "\n",
    "            # Add the results\n",
    "            for test, result in confSet['results'].items():\n",
    "\n",
    "                # Skip if not in the inResults or exResults\n",
    "                if not is_in_list_pair(test, inResults, exResults):\n",
    "                    continue\n",
    "\n",
    "                # Filter the result obj\n",
    "                cleanResult = prepare_test_result(result)\n",
    "\n",
    "                # Add the result to the row, as seperate columns for each key\n",
    "                for k, v in cleanResult.items():\n",
    "                    if k == 'acc':\n",
    "                        acc_total += v\n",
    "                        acc_count += 1\n",
    "                    elif k == 'acc_stderr':\n",
    "                        acc_stderr_total += v\n",
    "                        acc_stderr_count += 1\n",
    "\n",
    "                    # For simplified, we only use acc and perplexity\n",
    "                    if simplified and k not in ['acc', 'perplexity']:\n",
    "                        continue\n",
    "\n",
    "                    # Save the value\n",
    "                    row[f\"{test} ({k})\"] = v\n",
    "                \n",
    "            # Add the avg acc and acc_stderr\n",
    "            if acc_count > 0:\n",
    "               row[\"avg_acc\"] = acc_total / acc_count\n",
    "            if acc_stderr_count > 0:\n",
    "                row[\"avg_acc_stderr\"] = acc_stderr_total / acc_stderr_count\n",
    "\n",
    "            # Append the row to the table\n",
    "            table_rows.append(row)\n",
    "\n",
    "    # Create a dataframe from the table rows\n",
    "    df = pd.DataFrame(table_rows)\n",
    "\n",
    "    # Sort by avg_acc\n",
    "    if sort:\n",
    "        df = df.sort_values(by='avg_acc', ascending=False)\n",
    "\n",
    "    # Show the dataframe\n",
    "    return df\n",
    "\n",
    "# Generate the dataframe\n",
    "df = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"xcopa\"], inResults=[] )\n",
    "\n",
    "# # Save the dataframe to a CSV file\n",
    "# df.to_csv('summary/compiled-lm-eval-results.csv', index=False)\n",
    "\n",
    "# Show results\n",
    "df\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "total 38624\n",
      "-rw-r--r--@ 1 picocreator  staff   1.3M Jul 26 09:22 bf16-all-results-and-groups.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   350K Jul 26 09:22 bf16-all-simplified-results-and-groups.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   350K Jul 26 09:22 bf16-all-sorted-results-and-groups.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    91K Jul 26 09:22 bf16-eng-focus.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   1.2M Jul 26 09:22 bf16-eng-results.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   105K Jul 26 09:22 bf16-eng-summary.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   134K Jul 26 09:22 bf16-multilang-results.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    19K Jul 26 09:22 bf16-multilang-summary.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    91K Jul 26 09:22 bf16-sorted-eng-focus.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   1.2M Jul 26 09:22 bf16-sorted-eng-results.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   105K Jul 26 09:22 bf16-sorted-eng-summary.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    19K Jul 26 09:22 bf16-sorted-multilang-summary.csv\n",
      "-rw-r--r--  1 picocreator  staff    10M Jul 26 09:22 compiled-lm-eval-results.json\n",
      "-rw-r--r--@ 1 picocreator  staff   184K Jul 26 09:21 rwkv-x-dev-bf16-sorted-eng-180.csv\n",
      "-rw-r--r--@ 1 picocreator  staff    33K Jul 26 09:21 rwkv-x-dev-bf16-sorted-eng-21-focus.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   107K Jul 26 09:22 rwkv-x-dev-bf16-sorted-eng-all.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   6.7K Jul 26 09:22 rwkv-x-dev-bf16-sorted-eng-focus.csv\n",
      "-rw-r--r--@ 1 picocreator  staff   5.7K Jul 26 09:22 rwkv-x-dev-bf16-sorted-multilang-summary.csv\n"
     ]
    }
   ],
   "source": [
    "##################################################\n",
    "#\n",
    "#  Build the various subsets\n",
    "#\n",
    "##################################################\n",
    "\n",
    "FOCUS_MODEL_LIST=[\n",
    "    # \"./rwkv-x-dev/*\", \n",
    "    \"rwkv-x-dev/*\", \"RWKV/*\", \"meta-llama/Llama-2-7b*\", \"mistralai/Mistral-7B-v0.1\", \"m8than/*\"\n",
    "]\n",
    "\n",
    "# Overall results\n",
    "all_results = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"] )\n",
    "all_results.to_csv('summary/bf16-all-results-and-groups.csv', index=False)\n",
    "\n",
    "all_results = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], simplified=True )\n",
    "all_results.to_csv('summary/bf16-all-simplified-results-and-groups.csv', index=False)\n",
    "\n",
    "all_results = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], simplified=True, sort=True )\n",
    "all_results.to_csv('summary/bf16-all-sorted-results-and-groups.csv', index=False)\n",
    "\n",
    "# Multilang results\n",
    "multiLang_tTest = [\"xcopa_*\", \"xnli_*\", \"xstorycloze_*\", \"xwinograd_*\", \"lambada_openai_*\", \"pawsx_*\"]\n",
    "multiLang_tGrps = [\"xcopa\",   \"xnli\",   \"xstorycloze\",   \"xwinograd\",   \"lambada_multilingual\", \"pawsx\"]\n",
    "# Both test and groups, merged into a single list\n",
    "multiLang_joint = multiLang_tTest + multiLang_tGrps\n",
    "\n",
    "multilang_grp = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[] )\n",
    "multilang_test = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=multiLang_tTest )\n",
    "multilang_grp.to_csv('summary/bf16-multilang-summary.csv', index=False)\n",
    "multilang_test.to_csv('summary/bf16-multilang-results.csv', index=False)\n",
    "\n",
    "multilang_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[], sort=True )\n",
    "multilang_grp_sorted.to_csv('summary/bf16-sorted-multilang-summary.csv', index=False)\n",
    "\n",
    "# RWKV perf tracking\n",
    "rwkv_multilang_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=multiLang_tGrps, inResults=[], exModels=[], inModels=FOCUS_MODEL_LIST, sort=True )\n",
    "rwkv_multilang_grp_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-multilang-summary.csv', index=False)\n",
    "\n",
    "# All other results\n",
    "eng_grp = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[], exGroups=multiLang_joint, exResults=multiLang_joint )\n",
    "eng_grp_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[], exGroups=multiLang_joint, exResults=multiLang_joint, sort=True )\n",
    "eng_test = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], exGroups=multiLang_joint, exResults=multiLang_joint )\n",
    "eng_test_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], exGroups=multiLang_joint, exResults=multiLang_joint, sort=True )\n",
    "\n",
    "eng_grp.to_csv('summary/bf16-eng-summary.csv', index=False)\n",
    "eng_test.to_csv('summary/bf16-eng-results.csv', index=False)\n",
    "eng_test_sorted.to_csv('summary/bf16-sorted-eng-results.csv', index=False)\n",
    "eng_grp_sorted.to_csv('summary/bf16-sorted-eng-summary.csv', index=False)\n",
    "\n",
    "# English focused subset\n",
    "eng_focus_mixed=[\"lambada_openai\", \"lambada_standard\", \"blimp\", \"piqa\", \"copa\", \"sciq\", \"truthfulqa\", \"pythia\"] #\"np_open\", \"cmmlu\", \"record\"\n",
    "eng_focus_tGrps=[\"anli\", \"glue\", \"mmlu\" ]\n",
    "eng_focus_tTest=[\"blimp\", \"arc_*\", \"logiqa\", \"winogrande\", \"openbookqa\", \"hellaswag\"]\n",
    "\n",
    "eng_focus_tGrps = eng_focus_tGrps + eng_focus_mixed\n",
    "eng_focus_tTest = eng_focus_tTest + eng_focus_mixed\n",
    "\n",
    "eng_focus = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest )\n",
    "eng_focus_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest, sort=True )\n",
    "eng_focus.to_csv('summary/bf16-eng-focus.csv', index=False)\n",
    "eng_focus_sorted.to_csv('summary/bf16-sorted-eng-focus.csv', index=False)\n",
    "\n",
    "# RWKV perf tracking\n",
    "rwkv_eng_focus_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_focus_tGrps, inResults=eng_focus_tTest, exModels=[], inModels=FOCUS_MODEL_LIST, sort=True, simplified=True )\n",
    "rwkv_eng_focus_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-eng-focus.csv', index=False)\n",
    "\n",
    "# RWKV perf tracking\n",
    "rwkv_eng_all_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], exModels=[], inModels=FOCUS_MODEL_LIST, sort=True, simplified=True )\n",
    "rwkv_eng_all_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-eng-all.csv', index=False)\n",
    "\n",
    "# # Overall results\n",
    "# rwkv_all_results = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=[\"*\"], inResults=[\"*\"], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\"], exModels=[], sort=True )\n",
    "# rwkv_all_results.to_csv('summary/rwkv-x-dev-bf16-all-results-and-groups.csv', index=False)\n",
    "\n",
    "# List the files\n",
    "!ls -lh summary"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 21 eval focus\n",
    "focus_21=[\"sciq\", \"glue\", \"anli\", \"mnli\", \"mnli_mismatch\", \"swag\", \"winogrande\", \"wnli\", \"truthfulqa\", \"logiqa\", \"logiqa2\", \"lambada_standard\", \"lambada_openai\", \"mmlu\", \"piqa\", \"arc_easy\", \"arc_challenge\", \"hellaswag\", \"openbookqa\", \"mathqa\", \"arithmetic\"]\n",
    "focus_21_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=focus_21, inResults=focus_21, exModels=[], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\", \"meta-llama/Llama-2-7b*\", \"mistralai/Mistral-7B-v0.1\"], sort=True, simplified=True )\n",
    "focus_21_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-eng-21-focus.csv', index=False)\n",
    "\n",
    "# English 180\n",
    "eng_180=[\n",
    "    \"anli\",\n",
    "    \"glue\",\n",
    "    \"blimp\",\n",
    "    \"truthfulqa\",\n",
    "    \"lambada\",\n",
    "    \"ai2_arc\",\n",
    "    \"multimedqa\",\n",
    "    \"pythia\",\n",
    "    \"mathqa\",\n",
    "    \"mmlu\",\n",
    "    \"anli_r1\",\n",
    "    \"anli_r2\",\n",
    "    \"anli_r3\",\n",
    "    \"wsc\",\n",
    "    \"lambada_standard_cloze_yaml\",\n",
    "    \"mnli\",\n",
    "    \"mnli_mismatch\",\n",
    "    \"mrpc\",\n",
    "    \"qnli\",\n",
    "    \"qqp\",\n",
    "    \"rte\",\n",
    "    \"sst2\",\n",
    "    \"wnli\",\n",
    "    \"blimp_adjunct_island\",\n",
    "    \"blimp_anaphor_gender_agreement\",\n",
    "    \"blimp_anaphor_number_agreement\",\n",
    "    \"blimp_animate_subject_passive\",\n",
    "    \"blimp_animate_subject_trans\",\n",
    "    \"blimp_causative\",\n",
    "    \"blimp_complex_NP_island\",\n",
    "    \"blimp_coordinate_structure_constraint_complex_left_branch\",\n",
    "    \"blimp_coordinate_structure_constraint_object_extraction\",\n",
    "    \"blimp_determiner_noun_agreement_1\",\n",
    "    \"blimp_determiner_noun_agreement_2\",\n",
    "    \"blimp_determiner_noun_agreement_irregular_1\",\n",
    "    \"blimp_determiner_noun_agreement_irregular_2\",\n",
    "    \"blimp_determiner_noun_agreement_with_adj_2\",\n",
    "    \"blimp_determiner_noun_agreement_with_adj_irregular_1\",\n",
    "    \"blimp_determiner_noun_agreement_with_adj_irregular_2\",\n",
    "    \"blimp_determiner_noun_agreement_with_adjective_1\",\n",
    "    \"blimp_distractor_agreement_relational_noun\",\n",
    "    \"blimp_distractor_agreement_relative_clause\",\n",
    "    \"blimp_drop_argument\",\n",
    "    \"blimp_ellipsis_n_bar_1\",\n",
    "    \"blimp_ellipsis_n_bar_2\",\n",
    "    \"blimp_existential_there_object_raising\",\n",
    "    \"blimp_existential_there_quantifiers_1\",\n",
    "    \"blimp_existential_there_quantifiers_2\",\n",
    "    \"blimp_existential_there_subject_raising\",\n",
    "    \"blimp_expletive_it_object_raising\",\n",
    "    \"blimp_inchoative\",\n",
    "    \"blimp_intransitive\",\n",
    "    \"blimp_irregular_past_participle_adjectives\",\n",
    "    \"blimp_irregular_past_participle_verbs\",\n",
    "    \"blimp_irregular_plural_subject_verb_agreement_1\",\n",
    "    \"blimp_irregular_plural_subject_verb_agreement_2\",\n",
    "    \"blimp_left_branch_island_echo_question\",\n",
    "    \"blimp_left_branch_island_simple_question\",\n",
    "    \"blimp_matrix_question_npi_licensor_present\",\n",
    "    \"blimp_npi_present_1\",\n",
    "    \"blimp_npi_present_2\",\n",
    "    \"blimp_only_npi_licensor_present\",\n",
    "    \"blimp_only_npi_scope\",\n",
    "    \"blimp_passive_1\",\n",
    "    \"blimp_passive_2\",\n",
    "    \"blimp_principle_A_c_command\",\n",
    "    \"blimp_principle_A_case_1\",\n",
    "    \"blimp_principle_A_case_2\",\n",
    "    \"blimp_principle_A_domain_1\",\n",
    "    \"blimp_principle_A_domain_2\",\n",
    "    \"blimp_principle_A_domain_3\",\n",
    "    \"blimp_principle_A_reconstruction\",\n",
    "    \"blimp_regular_plural_subject_verb_agreement_1\",\n",
    "    \"blimp_regular_plural_subject_verb_agreement_2\",\n",
    "    \"blimp_sentential_negation_npi_licensor_present\",\n",
    "    \"blimp_sentential_negation_npi_scope\",\n",
    "    \"blimp_sentential_subject_island\",\n",
    "    \"blimp_superlative_quantifiers_1\",\n",
    "    \"blimp_superlative_quantifiers_2\",\n",
    "    \"blimp_tough_vs_raising_1\",\n",
    "    \"blimp_tough_vs_raising_2\",\n",
    "    \"blimp_transitive\",\n",
    "    \"blimp_wh_island\",\n",
    "    \"blimp_wh_questions_object_gap\",\n",
    "    \"blimp_wh_questions_subject_gap\",\n",
    "    \"blimp_wh_questions_subject_gap_long_distance\",\n",
    "    \"blimp_wh_vs_that_no_gap\",\n",
    "    \"blimp_wh_vs_that_no_gap_long_distance\",\n",
    "    \"blimp_wh_vs_that_with_gap\",\n",
    "    \"blimp_wh_vs_that_with_gap_long_distance\",\n",
    "    \"sciq\",\n",
    "    \"truthfulqa_mc1\",\n",
    "    \"truthfulqa_mc2\",\n",
    "    \"multirc\",\n",
    "    \"lambada_openai\",\n",
    "    \"lambada_standard\",\n",
    "    \"piqa\",\n",
    "    \"prost\",\n",
    "    \"wsc273\",\n",
    "    \"qa4mre_2011\",\n",
    "    \"qa4mre_2012\",\n",
    "    \"qa4mre_2013\",\n",
    "    \"arc_challenge\",\n",
    "    \"arc_easy\",\n",
    "    \"logiqa\",\n",
    "    \"winogrande\",\n",
    "    \"boolq\",\n",
    "    \"logiqa2\",\n",
    "    \"openbookqa\",\n",
    "    \"medmcqa\",\n",
    "    \"medqa_4options\",\n",
    "    \"mmlu_anatomy\",\n",
    "    \"mmlu_clinical_knowledge\",\n",
    "    \"mmlu_college_biology\",\n",
    "    \"mmlu_college_medicine\",\n",
    "    \"mmlu_medical_genetics\",\n",
    "    \"mmlu_professional_medicine\",\n",
    "    \"pubmedqa\",\n",
    "    \"mc_taco\",\n",
    "    \"lambada_openai_mt_de\",\n",
    "    \"lambada_openai_mt_en\",\n",
    "    \"lambada_openai_mt_es\",\n",
    "    \"lambada_openai_mt_fr\",\n",
    "    \"lambada_openai_mt_it\",\n",
    "    \"mmlu_formal_logic\",\n",
    "    \"mmlu_high_school_european_history\",\n",
    "    \"mmlu_high_school_us_history\",\n",
    "    \"mmlu_high_school_world_history\",\n",
    "    \"mmlu_international_law\",\n",
    "    \"mmlu_jurisprudence\",\n",
    "    \"mmlu_logical_fallacies\",\n",
    "    \"mmlu_moral_disputes\",\n",
    "    \"mmlu_moral_scenarios\",\n",
    "    \"mmlu_philosophy\",\n",
    "    \"mmlu_prehistory\",\n",
    "    \"mmlu_professional_law\",\n",
    "    \"mmlu_world_religions\",\n",
    "    \"mmlu_business_ethics\",\n",
    "    \"mmlu_global_facts\",\n",
    "    \"mmlu_human_aging\",\n",
    "    \"mmlu_management\",\n",
    "    \"mmlu_marketing\",\n",
    "    \"mmlu_miscellaneous\",\n",
    "    \"mmlu_nutrition\",\n",
    "    \"mmlu_professional_accounting\",\n",
    "    \"mmlu_virology\",\n",
    "    \"mmlu_econometrics\",\n",
    "    \"mmlu_high_school_geography\",\n",
    "    \"mmlu_high_school_government_and_politics\",\n",
    "    \"mmlu_high_school_macroeconomics\",\n",
    "    \"mmlu_high_school_microeconomics\",\n",
    "    \"mmlu_high_school_psychology\",\n",
    "    \"mmlu_human_sexuality\",\n",
    "    \"mmlu_professional_psychology\",\n",
    "    \"mmlu_public_relations\",\n",
    "    \"mmlu_security_studies\",\n",
    "    \"mmlu_sociology\",\n",
    "    \"mmlu_us_foreign_policy\",\n",
    "    \"mmlu_abstract_algebra\",\n",
    "    \"mmlu_astronomy\",\n",
    "    \"mmlu_college_chemistry\",\n",
    "    \"mmlu_college_computer_science\",\n",
    "    \"mmlu_college_mathematics\",\n",
    "    \"mmlu_college_physics\",\n",
    "    \"mmlu_computer_security\",\n",
    "    \"mmlu_conceptual_physics\",\n",
    "    \"mmlu_electrical_engineering\",\n",
    "    \"mmlu_elementary_mathematics\",\n",
    "    \"mmlu_high_school_biology\",\n",
    "    \"mmlu_high_school_chemistry\",\n",
    "    \"mmlu_high_school_computer_science\",\n",
    "    \"mmlu_high_school_mathematics\",\n",
    "    \"mmlu_high_school_physics\",\n",
    "    \"mmlu_high_school_statistics\",\n",
    "    \"mmlu_machine_learning\",\n",
    "    \"wic\",\n",
    "    \"swag\",\n",
    "    \"hellaswag\",\n",
    "    \"cb\",\n",
    "    \"sglue_rte\"\n",
    "]\n",
    "eng_180_sorted = generate_result_table( inConfig = { \"dtype\": \"bfloat16\" }, inGroups=eng_180, inResults=eng_180, exModels=[], inModels=[\"./rwkv-x-dev/*\", \"rwkv-x-dev/*\", \"RWKV/*\", \"meta-llama/Llama-2-7b*\", \"mistralai/Mistral-7B-v0.1\"], sort=True, simplified=True )\n",
    "eng_180_sorted.to_csv('summary/rwkv-x-dev-bf16-sorted-eng-180.csv', index=False)\n",
    "\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}