File size: 161,222 Bytes
8d82201
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import json\n",
    "import os\n",
    "\n",
    "hardpos_path = os.path.join('/data2/projects/VRIS/llama3', 'verb_ext_text_example_refzom.json')\n",
    "with open(hardpos_path, 'r', encoding='utf-8') as f:\n",
    "    hardpos_json = json.load(f)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "57624\n",
      "loading dataset ref-zom into memory...\n",
      "loading dataset split final\n",
      "creating index...\n",
      "index created.\n",
      "DONE (t=11.10s)\n"
     ]
    }
   ],
   "source": [
    "print(len(hardpos_json.keys()))\n",
    "\n",
    "from refer.refer_zom import ZREFER\n",
    "refer = ZREFER('/data2/dataset/COCO2014/', 'ref-zom', 'final')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2327 {'2327': []}\n",
      "{'sent_ids': [2327], 'file_name': 'COCO_train2014_000000318556.jpg', 'ann_id': [], 'ref_id': 2327, 'image_id': 318556, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['Cooking', 'table', 'in', 'background'], 'raw': 'Cooking table in background', 'sent_id': 2327, 'sent': 'Cooking table in background'}]}\n",
      "2328 {'2328': []}\n",
      "{'sent_ids': [2328], 'file_name': 'COCO_train2014_000000116100.jpg', 'ann_id': [], 'ref_id': 2328, 'image_id': 116100, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['An', 'elephant', 'that', 'has', \"it's\", 'trunk', 'and', 'all', 'four', 'feet', 'in', 'the', 'water.'], 'raw': \"An elephant that has it's trunk and all four feet in the water.\", 'sent_id': 2328, 'sent': \"An elephant that has it's trunk and all four feet in the water.\"}]}\n",
      "2329 {'2329': ['carrying plates of pizza']}\n",
      "{'sent_ids': [2329], 'file_name': 'COCO_train2014_000000538480.jpg', 'ann_id': [], 'ref_id': 2329, 'image_id': 538480, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['Man', 'in', 'a', 'black', 'shirt', 'carrying', 'plates', 'of', 'pizza.'], 'raw': 'Man in a black shirt carrying plates of pizza.', 'sent_id': 2329, 'sent': 'Man in a black shirt carrying plates of pizza.'}]}\n",
      "2330 {'2330': ['holding']}\n",
      "{'sent_ids': [2330], 'file_name': 'COCO_train2014_000000476220.jpg', 'ann_id': [], 'ref_id': 2330, 'image_id': 476220, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['The', 'stuffed', 'pig', 'that', 'the', 'blond', 'boy', 'is', 'holding'], 'raw': 'The stuffed pig that the blond boy is holding', 'sent_id': 2330, 'sent': 'The stuffed pig that the blond boy is holding'}]}\n",
      "2331 {'2331': []}\n",
      "{'sent_ids': [2331], 'file_name': 'COCO_train2014_000000299675.jpg', 'ann_id': [], 'ref_id': 2331, 'image_id': 299675, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['The', 'MacBook', 'Pro', 'box.'], 'raw': 'The MacBook Pro box.', 'sent_id': 2331, 'sent': 'The MacBook Pro box.'}]}\n",
      "2332 {'2332': []}\n",
      "{'sent_ids': [2332], 'file_name': 'COCO_train2014_000000032275.jpg', 'ann_id': [], 'ref_id': 2332, 'image_id': 32275, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['A', 'purple', 'brick', 'building', 'with', 'a', 'black', 'and', 'blue', 'parking', 'meter.', ''], 'raw': 'A purple brick building with a black and blue parking meter. ', 'sent_id': 2332, 'sent': 'A purple brick building with a black and blue parking meter. '}]}\n",
      "2333 {'2333': ['being wrapped around']}\n",
      "{'sent_ids': [2333], 'file_name': 'COCO_train2014_000000025470.jpg', 'ann_id': [], 'ref_id': 2333, 'image_id': 25470, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['The', 'lighter', 'colored', 'giraffe', 'whose', 'neck', 'is', 'wrapped', 'around', 'the', 'other', 'giraffes'], 'raw': 'The lighter colored giraffe whose neck is wrapped around the other giraffes', 'sent_id': 2333, 'sent': 'The lighter colored giraffe whose neck is wrapped around the other giraffes'}]}\n",
      "2334 {'2334': ['reaching for a frisbee']}\n",
      "{'sent_ids': [2334], 'file_name': 'COCO_train2014_000000513461.jpg', 'ann_id': [], 'ref_id': 2334, 'image_id': 513461, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['A', 'man', 'wearing', 'white', 'and', 'blue', 'shorts,', 'reaching', 'for', 'a', 'frisbee.'], 'raw': 'A man wearing white and blue shorts, reaching for a frisbee.', 'sent_id': 2334, 'sent': 'A man wearing white and blue shorts, reaching for a frisbee.'}]}\n",
      "2335 {'2335': []}\n",
      "{'sent_ids': [2335], 'file_name': 'COCO_train2014_000000285579.jpg', 'ann_id': [], 'ref_id': 2335, 'image_id': 285579, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['A', 'motorbike', 'occupied', 'by', 'two', 'men', 'dressed', 'like', 'teddy', 'bear.'], 'raw': 'A motorbike occupied by two men dressed like teddy bear.', 'sent_id': 2335, 'sent': 'A motorbike occupied by two men dressed like teddy bear.'}]}\n",
      "2336 {'2336': []}\n",
      "{'sent_ids': [2336], 'file_name': 'COCO_train2014_000000266366.jpg', 'ann_id': [], 'ref_id': 2336, 'image_id': 266366, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['left', 'portion', 'of', 'sandwich', 'closest', 'to', 'pickle'], 'raw': 'left portion of sandwich closest to pickle', 'sent_id': 2336, 'sent': 'left portion of sandwich closest to pickle'}]}\n",
      "2337 {'2337': ['leaning over']}\n",
      "{'sent_ids': [2337], 'file_name': 'COCO_train2014_000000321194.jpg', 'ann_id': [], 'ref_id': 2337, 'image_id': 321194, 'split': 'train', 'source': 'zero', 'sentences': [{'tokens': ['A', 'man', 'in', 'white', 'leaning', 'over.'], 'raw': 'A man in white leaning over.', 'sent_id': 2337, 'sent': 'A man in white leaning over.'}]}\n"
     ]
    }
   ],
   "source": [
    "for idx, key in enumerate(hardpos_json) :\n",
    "    print(key, hardpos_json[key])\n",
    "    print(refer.Refs[int(key)])\n",
    "    \n",
    "    if idx == 10 :\n",
    "        break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "57624\n"
     ]
    }
   ],
   "source": [
    "ref_ids = refer.getRefIds(split='train')\n",
    "print(len(ref_ids))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "dict_values([['standing next to', 'being held'], ['standing in front']])\n"
     ]
    }
   ],
   "source": [
    "pos_sents = hardpos_json['9914'].values()\n",
    "print(pos_sents)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "dict_values([['standing next to', 'being held'], ['standing in front']])\n",
      "[['standing next to', 'being held'], ['standing in front']]\n",
      "['standing next to', 'being held']\n",
      "[101, 100, 100, 102]\n"
     ]
    }
   ],
   "source": [
    "from bert.tokenization_bert import BertTokenizer\n",
    "import random\n",
    "pos_sents = hardpos_json['9914'].values()\n",
    "print(pos_sents)\n",
    "pos_sents = [s for s in pos_sents if s is not None]\n",
    "print(pos_sents)\n",
    "pos_sent_picked = random.choice(list(pos_sents))\n",
    "print(pos_sent_picked)\n",
    "\n",
    "\n",
    "attention_mask = [0] * 20\n",
    "padded_input_ids = [0] * 20\n",
    "tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n",
    "\n",
    "input_ids = tokenizer.encode(text=pos_sent_picked, add_special_tokens=True)\n",
    "input_ids = input_ids[:20]\n",
    "print(input_ids)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
      "  from .autonotebook import tqdm as notebook_tqdm\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/timm/models/layers/__init__.py:48: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers\n",
      "  warnings.warn(f\"Importing from {__name__} is deprecated, please import via timm.layers\", FutureWarning)\n"
     ]
    }
   ],
   "source": [
    "import datetime\n",
    "import argparse\n",
    "import os\n",
    "import time\n",
    "\n",
    "import torch\n",
    "import torch.utils.data\n",
    "from torch import nn\n",
    "\n",
    "from functools import reduce\n",
    "import operator\n",
    "from bert.modeling_bert import BertModel\n",
    "import torchvision\n",
    "from lib import segmentation\n",
    "\n",
    "import transforms as T\n",
    "import utils\n",
    "import numpy as np\n",
    "\n",
    "import torch.nn.functional as F\n",
    "\n",
    "import gc\n",
    "from collections import OrderedDict\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Image size: 480\n"
     ]
    }
   ],
   "source": [
    "# python -m torch.distributed.launch \\\n",
    "# --nproc_per_node 4 \\\n",
    "# --master_port ${LOCALHOST} \\\n",
    "# train.py \\\n",
    "# --model lavt_one \\\n",
    "# --dataset refcocog \\\n",
    "# --splitBy umd \\\n",
    "# --model_id gref_umd \\\n",
    "# --batch-size 8 \\\n",
    "# --lr 0.00005 \\\n",
    "# --wd 1e-2 \\\n",
    "# --output-dir ./models/gref_umd/lavt_test_lr \\\n",
    "# --swin_type base \\\n",
    "# --pretrained_swin_weights ./pretrained_weights/swin_base_patch4_window12_384_22k.pth \\\n",
    "# --epochs 40 \\\n",
    "# --img_size 480 2>&1 | tee ./models/gref_umd/lavt_test_lr\n",
    "import argparse\n",
    "from utils import init_distributed_mode\n",
    "\n",
    "def get_parser():\n",
    "    parser = argparse.ArgumentParser(description=\"Model Training Configuration\")\n",
    "    parser.add_argument('--batch_size', default=8, type=int)\n",
    "    parser.add_argument('--output_dir', default='./models/gref_umd/lavt_test_dset', type=str)\n",
    "    parser.add_argument('--pretrained_swin_weights', default='./pretrained_weights/swin_base_patch4_window12_384_22k.pth', type=str)\n",
    "    parser.add_argument('--dataset', default='refcocog', type=str)\n",
    "    parser.add_argument('--splitBy', default='umd', type=str)\n",
    "    parser.add_argument('--model', default='lavt_one', type=str)\n",
    "\n",
    "    parser.add_argument('--amsgrad', action='store_true',\n",
    "                        help='if true, set amsgrad to True in an Adam or AdamW optimizer.')\n",
    "    parser.add_argument('-b', '--batch-size', default=8, type=int)\n",
    "    parser.add_argument('--bert_tokenizer', default='bert-base-uncased', help='BERT tokenizer')\n",
    "    parser.add_argument('--ck_bert', default='bert-base-uncased', help='pre-trained BERT weights')\n",
    "    #parser.add_argument('--dataset', default='refcoco', help='refcoco, refcoco+, or refcocog')\n",
    "    parser.add_argument('--ddp_trained_weights', action='store_true',\n",
    "                        help='Only needs specified when testing,'\n",
    "                             'whether the weights to be loaded are from a DDP-trained model')\n",
    "    parser.add_argument('--device', default='cuda:0', help='device')  # only used when testing on a single machine\n",
    "    parser.add_argument('--epochs', default=40, type=int, metavar='N', help='number of total epochs to run')\n",
    "    parser.add_argument('--fusion_drop', default=0.0, type=float, help='dropout rate for PWAMs')\n",
    "    parser.add_argument('--img_size', default=480, type=int, help='input image size')\n",
    "    parser.add_argument(\"--local_rank\", type=int, help='local rank for DistributedDataParallel')\n",
    "    parser.add_argument('--lr', default=0.00005, type=float, help='the initial learning rate')\n",
    "    parser.add_argument('--mha', default='', help='If specified, should be in the format of a-b-c-d, e.g., 4-4-4-4,'\n",
    "                                                  'where a, b, c, and d refer to the numbers of heads in stage-1,'\n",
    "                                                  'stage-2, stage-3, and stage-4 PWAMs')\n",
    "    #parser.add_argument('--model', default='lavt', help='model: lavt, lavt_one')\n",
    "    parser.add_argument('--model_id', default='lavt', help='name to identify the model')\n",
    "    parser.add_argument('--output-dir', default='./checkpoints/', help='path where to save checkpoint weights')\n",
    "    parser.add_argument('--pin_mem', action='store_true',\n",
    "                        help='If true, pin memory when using the data loader.')\n",
    "    parser.add_argument('--print-freq', default=10, type=int, help='print frequency')\n",
    "    parser.add_argument('--refer_data_root', default='./refer/data/', help='REFER dataset root directory')\n",
    "    parser.add_argument('--resume', default='', help='resume from checkpoint')\n",
    "    parser.add_argument('--split', default='test', help='only used when testing')\n",
    "    #parser.add_argument('--splitBy', default='unc', help='change to umd or google when the dataset is G-Ref (RefCOCOg)')\n",
    "    parser.add_argument('--swin_type', default='base',\n",
    "                        help='tiny, small, base, or large variants of the Swin Transformer')\n",
    "    parser.add_argument('--wd', '--weight-decay', default=1e-2, type=float, metavar='W', help='weight decay',\n",
    "                        dest='weight_decay')\n",
    "    parser.add_argument('--window12', action='store_true',\n",
    "                        help='only needs specified when testing,'\n",
    "                             'when training, window size is inferred from pre-trained weights file name'\n",
    "                             '(containing \\'window12\\'). Initialize Swin with window size 12 instead of the default 7.')\n",
    "    parser.add_argument('-j', '--workers', default=8, type=int, metavar='N', help='number of data loading workers')\n",
    "\n",
    "    parser.add_argument('--metric_learning', default=True, type=bool, help='whether to use metric learning')\n",
    "    parser.add_argument('--exclude_multiobj', default=True, type=bool, help='whether to exclude multi-object images')\n",
    "    parser.add_argument('--metric_mode', default='both', type=str, help='both : add hp and hn')\n",
    "    parser.add_argument('--hn_prob', default=0.5, type=float, help='negative sample prob')\n",
    "    \n",
    "    return parser\n",
    "\n",
    "parser = get_parser()\n",
    "args = parser.parse_args([])\n",
    "print('Image size: {}'.format(str(args.img_size)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 107,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "import sys\n",
    "import json\n",
    "import torch.utils.data as data\n",
    "import torch\n",
    "from torchvision import transforms\n",
    "from torch.autograd import Variable\n",
    "import numpy as np\n",
    "from PIL import Image\n",
    "import torchvision.transforms.functional as TF\n",
    "import random\n",
    "\n",
    "from bert.tokenization_bert import BertTokenizer\n",
    "\n",
    "import h5py\n",
    "from refer.refer import REFER\n",
    "\n",
    "from args import get_parser\n",
    "\n",
    "# Dataset configuration initialization\n",
    "# parser = get_parser()\n",
    "# args = parser.parse_args()\n",
    "\n",
    "\n",
    "class ReferDataset(data.Dataset):\n",
    "\n",
    "    def __init__(self,\n",
    "                 args,\n",
    "                 image_transforms=None,\n",
    "                 target_transforms=None,\n",
    "                 split='train',\n",
    "                 eval_mode=False):\n",
    "\n",
    "        self.classes = []\n",
    "        self.image_transforms = image_transforms\n",
    "        self.target_transform = target_transforms\n",
    "        self.split = split\n",
    "        self.refer = REFER(args.refer_data_root, args.dataset, args.splitBy)\n",
    "\n",
    "        self.max_tokens = 20\n",
    "\n",
    "        ref_ids = self.refer.getRefIds(split=self.split)\n",
    "        img_ids = self.refer.getImgIds(ref_ids)\n",
    "\n",
    "        all_imgs = self.refer.Imgs\n",
    "        self.imgs = list(all_imgs[i] for i in img_ids)\n",
    "        self.ref_ids = ref_ids\n",
    "\n",
    "        self.input_ids = []\n",
    "        self.attention_masks = []\n",
    "        self.tokenizer = BertTokenizer.from_pretrained(args.bert_tokenizer)\n",
    "\n",
    "        # for metric learning\n",
    "        self.ROOT = '/data2/projects/seunghoon/VerbRIS/VerbCentric_CY/datasets/VRIS'\n",
    "        self.metric_learning = args.metric_learning\n",
    "        self.exclude_multiobj = args.exclude_multiobj\n",
    "        self.metric_mode = args.metric_mode\n",
    "        self.exclude_position = False\n",
    "\n",
    "        if self.metric_learning:\n",
    "            self.hardneg_prob = args.hn_prob \n",
    "            self.multi_obj_ref_ids = self._load_multi_obj_ref_ids()\n",
    "            self.hardpos_meta, self.hardneg_meta = self._load_metadata()\n",
    "        else:\n",
    "            self.hardneg_prob = 0.0\n",
    "            self.multi_obj_ref_ids = None\n",
    "            self.hardpos_meta, self.hardneg_meta = None, None\n",
    "\n",
    "\n",
    "        self.eval_mode = eval_mode\n",
    "        # if we are testing on a dataset, test all sentences of an object;\n",
    "        # o/w, we are validating during training, randomly sample one sentence for efficiency\n",
    "        for r in ref_ids:\n",
    "            ref = self.refer.Refs[r]\n",
    "\n",
    "            sentences_for_ref = []\n",
    "            attentions_for_ref = []\n",
    "\n",
    "            for i, (el, sent_id) in enumerate(zip(ref['sentences'], ref['sent_ids'])):\n",
    "                sentence_raw = el['raw']\n",
    "                attention_mask = [0] * self.max_tokens\n",
    "                padded_input_ids = [0] * self.max_tokens\n",
    "\n",
    "                input_ids = self.tokenizer.encode(text=sentence_raw, add_special_tokens=True)\n",
    "\n",
    "                # truncation of tokens\n",
    "                input_ids = input_ids[:self.max_tokens]\n",
    "\n",
    "                padded_input_ids[:len(input_ids)] = input_ids\n",
    "                attention_mask[:len(input_ids)] = [1]*len(input_ids)\n",
    "\n",
    "                sentences_for_ref.append(torch.tensor(padded_input_ids).unsqueeze(0))\n",
    "                attentions_for_ref.append(torch.tensor(attention_mask).unsqueeze(0))\n",
    "\n",
    "            self.input_ids.append(sentences_for_ref)\n",
    "            self.attention_masks.append(attentions_for_ref)\n",
    "\n",
    "\n",
    "    def _tokenize(self, sentence):\n",
    "        attention_mask = [0] * self.max_tokens\n",
    "        padded_input_ids = [0] * self.max_tokens\n",
    "\n",
    "        input_ids = self.tokenizer.encode(text=sentence, add_special_tokens=True)\n",
    "        # truncation of tokens\n",
    "        input_ids = input_ids[:self.max_tokens]\n",
    "        padded_input_ids[:len(input_ids)] = input_ids\n",
    "        attention_mask[:len(input_ids)] = [1]*len(input_ids)\n",
    "\n",
    "        return torch.tensor(padded_input_ids), torch.tensor(attention_mask)\n",
    "    \n",
    "    def _plot(self, img, target):\n",
    "        import matplotlib.pyplot as plt\n",
    "\n",
    "        # If img is a PyTorch tensor, convert it to a NumPy array and adjust shape\n",
    "        if isinstance(img, torch.Tensor):\n",
    "            img = img.cpu().numpy()\n",
    "            if img.shape[0] == 3:  # Shape is (channels, height, width)\n",
    "                img = img.transpose(1, 2, 0)  # Now shape is (height, width, channels)\n",
    "\n",
    "        # Ensure target is a NumPy array\n",
    "        if isinstance(target, torch.Tensor):\n",
    "            target = target.cpu().numpy()\n",
    "            if target.ndim == 3 and target.shape[0] == 1:  # Shape is (1, height, width)\n",
    "                target = target.squeeze(0)  # Now shape is (height, width)\n",
    "\n",
    "        plt.imshow(img)\n",
    "        plt.imshow(target, alpha=0.5)\n",
    "        plt.show()\n",
    "\n",
    "\n",
    "    def _load_multi_obj_ref_ids(self):\n",
    "        # Load multi-object reference IDs based on configurations\n",
    "        if not self.exclude_multiobj and not self.exclude_position :\n",
    "            return None\n",
    "        elif self.exclude_position:\n",
    "            multiobj_path = os.path.join(self.ROOT, 'multiobj_ov2_nopos.txt')\n",
    "        elif self.exclude_multiobj :\n",
    "            multiobj_path = os.path.join(self.ROOT, 'multiobj_ov3.txt')\n",
    "        with open(multiobj_path, 'r') as f:\n",
    "            return [int(line.strip()) for line in f.readlines()]\n",
    "\n",
    "    def _load_metadata(self):\n",
    "        # Load metadata for hard positive verb phrases, hard negative queries\n",
    "        if 'op2' in self.metric_mode :\n",
    "            hardpos_path = os.path.join(self.ROOT, 'hardpos_verbphrase_op2_1024upd.json') \n",
    "        else :\n",
    "            hardpos_path = os.path.join(self.ROOT, 'hardpos_verbphrase_0906upd.json')\n",
    "        # do not use hardneg_path\n",
    "        hardneg_path = os.path.join(self.ROOT, 'hardneg_verb.json')\n",
    "\n",
    "        with open(hardpos_path, 'r', encoding='utf-8') as f:\n",
    "            hardpos_json = json.load(f)\n",
    "        if \"hardpos_only\" in self.metric_mode :\n",
    "            hardneg_json = None\n",
    "        else :         \n",
    "            with open(hardneg_path, 'r', encoding='utf-8') as q:\n",
    "                hardneg_json = json.load(q)\n",
    "        return hardpos_json, hardneg_json\n",
    "\n",
    "    def get_classes(self):\n",
    "        return self.classes\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.ref_ids)\n",
    "\n",
    "    def __getitem__(self, index):\n",
    "        this_ref_id = self.ref_ids[index]\n",
    "        this_img_id = self.refer.getImgIds(this_ref_id)\n",
    "        this_img = self.refer.Imgs[this_img_id[0]]\n",
    "\n",
    "        img = Image.open(os.path.join(self.refer.IMAGE_DIR, this_img['file_name'])).convert(\"RGB\")\n",
    "\n",
    "        ref = self.refer.loadRefs(this_ref_id)\n",
    "        #print(ref)\n",
    "\n",
    "        ref_mask = np.array(self.refer.getMask(ref[0])['mask'])\n",
    "        annot = np.zeros(ref_mask.shape)\n",
    "        annot[ref_mask == 1] = 1\n",
    "\n",
    "        annot = Image.fromarray(annot.astype(np.uint8), mode=\"P\")\n",
    "\n",
    "        if self.image_transforms is not None:\n",
    "            # resize, from PIL to tensor, and mean and std normalization\n",
    "            img, target = self.image_transforms(img, annot)\n",
    "\n",
    "        pos_sent = None\n",
    "        neg_sent = None\n",
    "        pos_attn_mask = None\n",
    "        neg_attn_mask = None\n",
    "        choice_sent = None\n",
    "\n",
    "        if self.eval_mode:\n",
    "            embedding = []\n",
    "            att = []\n",
    "            for s in range(len(self.input_ids[index])):\n",
    "                e = self.input_ids[index][s]\n",
    "                a = self.attention_masks[index][s]\n",
    "                embedding.append(e.unsqueeze(-1))\n",
    "                att.append(a.unsqueeze(-1))\n",
    "\n",
    "            tensor_embeddings = torch.cat(embedding, dim=-1)\n",
    "            attention_mask = torch.cat(att, dim=-1)\n",
    "        else: # train phase\n",
    "            choice_sent = np.random.choice(len(self.input_ids[index]))\n",
    "            tensor_embeddings = self.input_ids[index][choice_sent]\n",
    "            attention_mask = self.attention_masks[index][choice_sent]\n",
    "\n",
    "            # print(\"object id: \", this_ref_id)\n",
    "            # print(\"sentence ids: \", self.input_ids[index])\n",
    "            # for i in range(len(self.input_ids[index])):\n",
    "            #     print(\"object sentences: \", self.tokenizer.decode(self.input_ids[index][i].squeeze(0).tolist()))\n",
    "            # # plot selected refid\n",
    "            # self._plot(img, target)\n",
    "\n",
    "            pos_sent, neg_sent = None, None\n",
    "            pos_attn_mask, neg_attn_mask = None, None\n",
    "            pos_mask = [[1, ]] # (GT, pos) 초기화\n",
    "            neg_mask = [[0, ]] # (GT, neg) 초기화\n",
    "\n",
    "            if self.metric_learning:\n",
    "                if self.metric_mode in ['hardpos_only', 'hardpos_only_rev'] or self.hardneg_prob == 0.0:\n",
    "                    pos_sent_dict = self.hardpos_meta.get(str(this_ref_id), {})\n",
    "                    pos_sents = []\n",
    "                    for sent_list in pos_sent_dict.values():\n",
    "                        pos_sents.extend(sent_list)\n",
    "                    if pos_sents:\n",
    "                        pos_sent = random.choice(pos_sents)\n",
    "                        pos_sent, pos_attn_mask = self._tokenize(pos_sent)\n",
    "                else:\n",
    "                    if random.random() < self.hardneg_prob:\n",
    "                        neg_sent_dict = self.hardneg_meta.get(str(this_ref_id), {})\n",
    "                        neg_sents = []\n",
    "                        for sent_list in neg_sent_dict.values():\n",
    "                            neg_sents.extend(sent_list)\n",
    "                        if neg_sents:\n",
    "                            neg_sent = random.choice(neg_sents)\n",
    "                            neg_sent, neg_attn_mask = self._tokenize(neg_sent)\n",
    "                    else:\n",
    "                        pos_sent_dict = self.hardpos_meta.get(str(this_ref_id), {})\n",
    "                        pos_sents = []\n",
    "                        for sent_list in pos_sent_dict.values():\n",
    "                            pos_sents.extend(sent_list)\n",
    "                        if pos_sents:\n",
    "                            pos_sent = random.choice(pos_sents)\n",
    "                            #print(\"original pos sentence: \", pos_sent)\n",
    "                            pos_sent, pos_attn_mask = self._tokenize(pos_sent)\n",
    "                if pos_sent is None and len(self.input_ids[index]) > 1:\n",
    "                    to_select = list(range(len(self.input_ids[index])))\n",
    "                    to_select.remove(choice_sent)\n",
    "                    choice_sent = np.random.choice(to_select)\n",
    "                    pos_sent = self.input_ids[index][choice_sent]\n",
    "                    pos_attn_mask = self.attention_masks[index][choice_sent]\n",
    "                    #print(\"pos sent does not exist, use other sentence : \", self.tokenizer.decode(pos_sent.squeeze(0).tolist()))\n",
    "\n",
    "                # concat tensors\n",
    "                if img.dim() == 3:\n",
    "                    img = img.unsqueeze(0)  # [1, C, H, W]\n",
    "                if target.dim() == 2:\n",
    "                    target = target.unsqueeze(0)  # [1, H, W]\n",
    "                if tensor_embeddings.dim() == 1:\n",
    "                    tensor_embeddings = tensor_embeddings.unsqueeze(0)  # [1, max_tokens]\n",
    "                if attention_mask.dim() == 1:\n",
    "                    attention_mask = attention_mask.unsqueeze(0)  # [1, max_tokens]\n",
    "                if pos_sent is not None and pos_sent.dim() == 1:\n",
    "                    pos_sent = pos_sent.unsqueeze(0)\n",
    "                if neg_sent is not None and neg_sent.dim() == 1:\n",
    "                    neg_sent = neg_sent.unsqueeze(0)\n",
    "                if pos_attn_mask is not None and pos_attn_mask.dim() == 1:\n",
    "                    pos_attn_mask = pos_attn_mask.unsqueeze(0)\n",
    "                if neg_attn_mask is not None and neg_attn_mask.dim() == 1:\n",
    "                    neg_attn_mask = neg_attn_mask.unsqueeze(0)\n",
    "\n",
    "\n",
    "            # print(\"index: \", self.input_ids[index])\n",
    "            # print(\"choice_sent: \", choice_sent)\n",
    "            # print(\"tensor_embeddings: \", tensor_embeddings)\n",
    "            # print(\"original sentence: \", self.tokenizer.decode(tensor_embeddings.squeeze(0).tolist()))\n",
    "            # print(\"pos_sent: \", pos_sent)\n",
    "            # print(\"neg_sent: \", neg_sent)\n",
    "            # print(\"pos_attn_mask: \", pos_attn_mask)\n",
    "            # print(\"neg_attn_mask: \", neg_attn_mask)\n",
    "            # print(img.shape, target.shape, tensor_embeddings.shape, attention_mask.shape, pos_mask, neg_mask)\n",
    "\n",
    "            if (pos_sent is not None) and (neg_sent is not None):\n",
    "                img = torch.stack([img, img, img], dim=0)\n",
    "                target = torch.stack([target, target, target], dim=0)\n",
    "                tensor_embeddings = torch.stack([tensor_embeddings, pos_sent, neg_sent], dim=0)\n",
    "                attention_mask = torch.stack([attention_mask, pos_attn_mask, neg_attn_mask], dim=0)\n",
    "                pos_mask = [[1, 1, 0]]\n",
    "                neg_mask = [[0, 0, 1]]\n",
    "            elif (pos_sent is not None and not neg_sent) or (neg_sent is not None and not pos_sent):\n",
    "                img = torch.stack([img, img], dim=0)\n",
    "                target = torch.stack([target, target], dim=0)\n",
    "                tensor_embeddings = torch.stack([tensor_embeddings, pos_sent], dim=0) if (pos_sent is not None) \\\n",
    "                                    else torch.stack([tensor_embeddings, neg_sent], dim=0)\n",
    "                attention_mask = torch.stack([attention_mask, pos_attn_mask], dim=0) if (pos_attn_mask is not None) \\\n",
    "                                    else torch.stack([attention_mask, neg_attn_mask], dim=0)\n",
    "                pos_mask = [[1, int(pos_sent is not None)]]\n",
    "                neg_mask = [[0, int(neg_sent is not None)]]\n",
    "            else:\n",
    "                pass\n",
    "        return img, target, tensor_embeddings, attention_mask, pos_mask, neg_mask\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 108,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_dataset(image_set, transform, args):\n",
    "    #from data.dataset_refer_bert import ReferDataset\n",
    "    ds = ReferDataset(args,\n",
    "                      split=image_set,\n",
    "                      image_transforms=transform,\n",
    "                      target_transforms=None\n",
    "                      )\n",
    "    num_classes = 2\n",
    "\n",
    "    return ds, num_classes\n",
    "\n",
    "def get_transform(args):\n",
    "    transforms = [T.Resize(args.img_size, args.img_size),\n",
    "                  T.ToTensor(),\n",
    "                  T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n",
    "                  ]\n",
    "\n",
    "    return T.Compose(transforms)\n",
    "\n",
    "\n",
    "def criterion(input, target):\n",
    "    weight = torch.FloatTensor([0.9, 1.1]).cuda()\n",
    "    return nn.functional.cross_entropy(input, target, weight=weight)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "loading dataset refcocog into memory...\n",
      "Split by umd!\n",
      "creating index...\n",
      "index created.\n",
      "DONE (t=6.64s)\n"
     ]
    }
   ],
   "source": [
    "dataset, num_classes = get_dataset(\"train\",\n",
    "                                    get_transform(args=args),\n",
    "                                    args=args)\n",
    "train_sampler = torch.utils.data.distributed.DistributedSampler(dataset, num_replicas=1, rank=0,\n",
    "                                                                    shuffle=True)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 118,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([3, 1, 20])\n",
      "\n",
      "\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([3, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "torch.Size([1, 20])torch.Size([3, 1, 20])torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "\n",
      "\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([1, 20])torch.Size([2, 1, 20])torch.Size([1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "\n",
      "\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([3, 1, 20])torch.Size([1, 20])\n",
      "\n",
      "torch.Size([3, 1, 20])\n",
      "\n",
      "\n",
      "\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "\n",
      "\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])\n",
      "\n",
      "\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])\n"
     ]
    },
    {
     "ename": "TypeError",
     "evalue": "Caught TypeError in DataLoader worker process 0.\nOriginal Traceback (most recent call last):\n  File \"/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torch/utils/data/_utils/worker.py\", line 302, in _worker_loop\n    data = fetcher.fetch(index)\n  File \"/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torch/utils/data/_utils/fetch.py\", line 52, in fetch\n    return self.collate_fn(data)\n  File \"/tmp/ipykernel_2235050/518736739.py\", line 10, in custom_collate\n    tensor_embeddings = torch.cat(*tensor_embeddings, dim=0)\nTypeError: cat() received an invalid combination of arguments - got (Tensor, Tensor, Tensor, Tensor, Tensor, Tensor, Tensor, Tensor, dim=int), but expected one of:\n * (tuple of Tensors tensors, int dim, *, Tensor out)\n * (tuple of Tensors tensors, name dim, *, Tensor out)\n\n",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[118], line 36\u001b[0m\n\u001b[1;32m     30\u001b[0m data_loader \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mutils\u001b[38;5;241m.\u001b[39mdata\u001b[38;5;241m.\u001b[39mDataLoader(\n\u001b[1;32m     31\u001b[0m         dataset, batch_size\u001b[38;5;241m=\u001b[39margs\u001b[38;5;241m.\u001b[39mbatch_size,\n\u001b[1;32m     32\u001b[0m         sampler\u001b[38;5;241m=\u001b[39mtrain_sampler, num_workers\u001b[38;5;241m=\u001b[39margs\u001b[38;5;241m.\u001b[39mworkers, \n\u001b[1;32m     33\u001b[0m         collate_fn\u001b[38;5;241m=\u001b[39mcustom_collate,  pin_memory\u001b[38;5;241m=\u001b[39margs\u001b[38;5;241m.\u001b[39mpin_mem, drop_last\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m     35\u001b[0m \u001b[38;5;66;03m# single sample from dataloader\u001b[39;00m\n\u001b[0;32m---> 36\u001b[0m img, target, tensor_embeddings, attention_mask, pos_mask, neg_mask \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mnext\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43miter\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mdata_loader\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     38\u001b[0m \u001b[38;5;28mprint\u001b[39m(img\u001b[38;5;241m.\u001b[39mshape, target\u001b[38;5;241m.\u001b[39mshape, tensor_embeddings\u001b[38;5;241m.\u001b[39mshape, attention_mask\u001b[38;5;241m.\u001b[39mshape, pos_mask, neg_mask)\n",
      "File \u001b[0;32m~/.conda/envs/lavt/lib/python3.9/site-packages/torch/utils/data/dataloader.py:652\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    649\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    650\u001b[0m     \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[1;32m    651\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset()  \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[0;32m--> 652\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    653\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m    654\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m    655\u001b[0m         \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m    656\u001b[0m         \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called:\n",
      "File \u001b[0;32m~/.conda/envs/lavt/lib/python3.9/site-packages/torch/utils/data/dataloader.py:1347\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._next_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1345\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m   1346\u001b[0m     \u001b[38;5;28;01mdel\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_task_info[idx]\n\u001b[0;32m-> 1347\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_process_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m~/.conda/envs/lavt/lib/python3.9/site-packages/torch/utils/data/dataloader.py:1373\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._process_data\u001b[0;34m(self, data)\u001b[0m\n\u001b[1;32m   1371\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_try_put_index()\n\u001b[1;32m   1372\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(data, ExceptionWrapper):\n\u001b[0;32m-> 1373\u001b[0m     \u001b[43mdata\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mreraise\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1374\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m data\n",
      "File \u001b[0;32m~/.conda/envs/lavt/lib/python3.9/site-packages/torch/_utils.py:461\u001b[0m, in \u001b[0;36mExceptionWrapper.reraise\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    457\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m:\n\u001b[1;32m    458\u001b[0m     \u001b[38;5;66;03m# If the exception takes multiple arguments, don't try to\u001b[39;00m\n\u001b[1;32m    459\u001b[0m     \u001b[38;5;66;03m# instantiate since we don't know how to\u001b[39;00m\n\u001b[1;32m    460\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(msg) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 461\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exception\n",
      "\u001b[0;31mTypeError\u001b[0m: Caught TypeError in DataLoader worker process 0.\nOriginal Traceback (most recent call last):\n  File \"/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torch/utils/data/_utils/worker.py\", line 302, in _worker_loop\n    data = fetcher.fetch(index)\n  File \"/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torch/utils/data/_utils/fetch.py\", line 52, in fetch\n    return self.collate_fn(data)\n  File \"/tmp/ipykernel_2235050/518736739.py\", line 10, in custom_collate\n    tensor_embeddings = torch.cat(*tensor_embeddings, dim=0)\nTypeError: cat() received an invalid combination of arguments - got (Tensor, Tensor, Tensor, Tensor, Tensor, Tensor, Tensor, Tensor, dim=int), but expected one of:\n * (tuple of Tensors tensors, int dim, *, Tensor out)\n * (tuple of Tensors tensors, name dim, *, Tensor out)\n\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([3, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([3, 1, 20])\n",
      "\n",
      "torch.Size([3, 1, 20])torch.Size([3, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "\n",
      "torch.Size([3, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([3, 1, 20])torch.Size([3, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])torch.Size([2, 1, 20])\n",
      "\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([3, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([1, 20])\n",
      "torch.Size([2, 1, 20])\n",
      "torch.Size([3, 1, 20])\n"
     ]
    }
   ],
   "source": [
    "from torch.nn.utils.rnn import pad_sequence\n",
    "\n",
    "def custom_collate(batch):\n",
    "    imgs, targets, tensor_embeddings, attention_masks, pos_masks, neg_masks = zip(*batch)\n",
    "    imgs = torch.cat([img for img in imgs], dim=0)\n",
    "    targets = torch.cat([tgt for tgt in targets], dim=0)\n",
    "    \n",
    "    tensor_embeddings = torch.cat([t_e for t_e in tensor_embeddings], dim=0)\n",
    "    attention_masks = torch.cat([a_m for a_m in attention_masks], dim=0)\n",
    "\n",
    "    # Handle pos_masks\n",
    "    if any(pos_mask is not None for pos_mask in pos_masks):\n",
    "        pos_masks = [mask if mask is not None else torch.zeros_like(tensor_embeddings[0]) for mask in pos_masks]\n",
    "        pos_masks = pad_sequence(pos_masks, batch_first=True, padding_value=0)\n",
    "    else:\n",
    "        pos_masks = None\n",
    "\n",
    "    # Handle neg_masks\n",
    "    if any(neg_mask is not None for neg_mask in neg_masks):\n",
    "        neg_masks = [mask if mask is not None else torch.zeros_like(tensor_embeddings[0]) for mask in neg_masks]\n",
    "        neg_masks = pad_sequence(neg_masks, batch_first=True, padding_value=0)\n",
    "    else:\n",
    "        neg_masks = None\n",
    "\n",
    "    return imgs, targets, tensor_embeddings, attention_masks, pos_masks, neg_masks\n",
    "\n",
    "\n",
    "data_loader = torch.utils.data.DataLoader(\n",
    "        dataset, batch_size=args.batch_size,\n",
    "        sampler=train_sampler, num_workers=args.workers, \n",
    "        collate_fn=custom_collate,  pin_memory=args.pin_mem, drop_last=True)\n",
    "\n",
    "# single sample from dataloader\n",
    "img, target, tensor_embeddings, attention_mask, pos_mask, neg_mask = next(iter(data_loader))\n",
    "\n",
    "print(img.shape, target.shape, tensor_embeddings.shape, attention_mask.shape, pos_mask, neg_mask)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[{'image_id': 391435, 'split': 'train', 'sentences': [{'tokens': ['the', 'reflection', 'of', 'the', 'man', 'shaving'], 'raw': 'the reflection of the man shaving', 'sent_id': 13437, 'sent': 'the reflection of the man shaving'}, {'tokens': ['image', 'of', 'a', 'man', 'shaving', 'on', 'a', 'laptop', 'screen'], 'raw': 'image of a man shaving on a laptop screen', 'sent_id': 13438, 'sent': 'image of a man shaving on a laptop screen'}], 'file_name': 'COCO_train2014_000000391435_1709050.jpg', 'category_id': 1, 'ann_id': 1709050, 'sent_ids': [13437, 13438], 'ref_id': 45871}][{'image_id': 421848, 'split': 'train', 'sentences': [{'tokens': ['the', 'tallest', 'giraffe', 'among', 'the', 'two'], 'raw': 'The tallest giraffe among the two', 'sent_id': 82708, 'sent': 'the tallest giraffe among the two'}, {'tokens': ['the', 'tallest', 'of', 'two', 'giraffes'], 'raw': 'The tallest of two giraffes.', 'sent_id': 82709, 'sent': 'the tallest of two giraffes'}], 'file_name': 'COCO_train2014_000000421848_596471.jpg', 'category_id': 25, 'ann_id': 596471, 'sent_ids': [82708, 82709], 'ref_id': 36770}]\n",
      "[{'image_id': 13468, 'split': 'train', 'sentences': [{'tokens': ['a', 'sandwich', 'right', 'of', 'another'], 'raw': 'A sandwich right of another.', 'sent_id': 5866, 'sent': 'a sandwich right of another'}, {'tokens': ['sandwich', 'half', 'furthest', 'to', 'right'], 'raw': 'sandwich half furthest to right', 'sent_id': 5867, 'sent': 'sandwich half furthest to right'}], 'file_name': 'COCO_train2014_000000013468_310040.jpg', 'category_id': 54, 'ann_id': 310040, 'sent_ids': [5866, 5867], 'ref_id': 7280}][{'image_id': 181054, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'in', 'a', 'white', 'shirt', 'with', 'a', 'woman', 'buttoning', 'it', 'up'], 'raw': 'A man in a white shirt with a woman buttoning it up.', 'sent_id': 68075, 'sent': 'a man in a white shirt with a woman buttoning it up'}, {'tokens': ['a', 'man', 'in', 'a', 'white', 'shirt', 'looks', 'nervous', 'as', 'an', 'older', 'woman', 'buttons', 'him', 'up'], 'raw': 'A man in a white shirt looks nervous as an older woman buttons him up.', 'sent_id': 68076, 'sent': 'a man in a white shirt looks nervous as an older woman buttons him up'}], 'file_name': 'COCO_train2014_000000181054_484268.jpg', 'category_id': 1, 'ann_id': 484268, 'sent_ids': [68075, 68076], 'ref_id': 48236}]\n",
      "\n",
      "\n",
      "[{'image_id': 569919, 'split': 'train', 'sentences': [{'tokens': ['the', 'spoon', 'next', 'to', 'the', 'pizza'], 'raw': 'The spoon next to the pizza.', 'sent_id': 97107, 'sent': 'the spoon next to the pizza'}, {'tokens': ['a', 'metal', 'spoon', 'on', 'a', 'plate', 'on', 'a', 'table'], 'raw': 'A metal spoon on a plate on a table.', 'sent_id': 97108, 'sent': 'a metal spoon on a plate on a table'}], 'file_name': 'COCO_train2014_000000569919_703521.jpg', 'category_id': 50, 'ann_id': 703521, 'sent_ids': [97107, 97108], 'ref_id': 42368}][{'image_id': 129359, 'split': 'train', 'sentences': [{'tokens': ['a', 'white', 'dish', 'with', 'some', 'kind', 'of', 'sauce', 'in', 'it', 'along', 'with', 'a', 'silver', 'spoon'], 'raw': 'A white dish with some kind of sauce in it along with a silver spoon', 'sent_id': 97230, 'sent': 'a white dish with some kind of sauce in it along with a silver spoon'}, {'tokens': ['a', 'cup', 'of', 'food', 'with', 'a', 'spoon'], 'raw': 'A cup of food with a spoon.', 'sent_id': 97231, 'sent': 'a cup of food with a spoon'}], 'file_name': 'COCO_train2014_000000129359_1039869.jpg', 'category_id': 51, 'ann_id': 1039869, 'sent_ids': [97230, 97231], 'ref_id': 42420}][{'image_id': 2964, 'split': 'train', 'sentences': [{'tokens': ['bottle', 'of', '14', 'hands', 'wine'], 'raw': 'bottle of 14 Hands wine', 'sent_id': 44379, 'sent': 'bottle of 14 hands wine'}, {'tokens': ['a', 'bottle', 'of', 'wine', 'that', 'says', '14', 'hands', 'and', 'has', 'a', 'purple', 'horse', 'on', 'it'], 'raw': 'A bottle of wine that says 14 hands and has a purple horse on it.', 'sent_id': 44380, 'sent': 'a bottle of wine that says 14 hands and has a purple horse on it'}], 'file_name': 'COCO_train2014_000000002964_91245.jpg', 'category_id': 44, 'ann_id': 91245, 'sent_ids': [44379, 44380], 'ref_id': 22056}]\n",
      "\n",
      "\n",
      "[{'image_id': 330683, 'split': 'train', 'sentences': [{'tokens': ['a', 'black', 'cow', 'alongside', 'a', 'brown', 'cow'], 'raw': 'A black cow alongside a brown cow.', 'sent_id': 78006, 'sent': 'a black cow alongside a brown cow'}, {'tokens': ['a', 'black', 'cow', 'standing', 'between', 'another', 'black', 'cow', 'and', 'a', 'brown', 'cow'], 'raw': 'A black cow standing between another black cow and a brown cow', 'sent_id': 78007, 'sent': 'a black cow standing between another black cow and a brown cow'}], 'file_name': 'COCO_train2014_000000330683_76006.jpg', 'category_id': 21, 'ann_id': 76006, 'sent_ids': [78006, 78007], 'ref_id': 34980}]\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n",
      "/home/seunghoon/.conda/envs/lavt/lib/python3.9/site-packages/torchvision/transforms/functional.py:417: UserWarning: Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. Please use InterpolationMode enum.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[{'image_id': 263823, 'split': 'train', 'sentences': [{'tokens': ['the', 'umpire', 'behind', 'the', 'plate'], 'raw': 'the umpire behind the plate', 'sent_id': 9335, 'sent': 'the umpire behind the plate'}, {'tokens': ['umpire', 'wearing', 'blue'], 'raw': 'umpire wearing blue', 'sent_id': 9336, 'sent': 'umpire wearing blue'}], 'file_name': 'COCO_train2014_000000263823_2160611.jpg', 'category_id': 1, 'ann_id': 2160611, 'sent_ids': [9335, 9336], 'ref_id': 8614}]\n",
      "[{'image_id': 170366, 'split': 'train', 'sentences': [{'tokens': ['the', 'boy', 'in', 'the', 'suit'], 'raw': 'The boy in the suit.', 'sent_id': 96474, 'sent': 'the boy in the suit'}, {'tokens': ['a', 'young', 'man', 'with', 'brown', 'hair', 'in', 'a', 'black', 'suit', ',', 'with', 'a', 'black', 'hat', 'with', 'sunglasses', 'resting', 'on', 'it'], 'raw': 'A young man with brown hair in a black suit, with a black hat with sunglasses resting on it', 'sent_id': 96475, 'sent': 'a young man with brown hair in a black suit , with a black hat with sunglasses resting on it'}], 'file_name': 'COCO_train2014_000000170366_484717.jpg', 'category_id': 1, 'ann_id': 484717, 'sent_ids': [96474, 96475], 'ref_id': 42104}][{'image_id': 181316, 'split': 'train', 'sentences': [{'tokens': ['the', 'racket', 'held', 'by', 'a', 'girl', 'wearing', 'dark', 'skirt'], 'raw': 'The racket held by a girl wearing dark skirt.', 'sent_id': 14001, 'sent': 'the racket held by a girl wearing dark skirt'}, {'tokens': ['a', 'racket', 'being', 'held', 'by', 'the', 'girl', 'in', 'the', 'black', 'skirt'], 'raw': 'A racket being held by the girl in the black skirt.', 'sent_id': 14002, 'sent': 'a racket being held by the girl in the black skirt'}], 'file_name': 'COCO_train2014_000000181316_655443.jpg', 'category_id': 43, 'ann_id': 655443, 'sent_ids': [14001, 14002], 'ref_id': 45890}]\n",
      "\n",
      "[{'image_id': 96723, 'split': 'train', 'sentences': [{'tokens': ['a', 'number', 'of', 'books', 'on', 'a', 'shelf'], 'raw': 'A number of books on a shelf.', 'sent_id': 35543, 'sent': 'a number of books on a shelf'}, {'tokens': ['a', 'bunch', 'of', 'books', 'on', 'a', 'shelf'], 'raw': 'A bunch of books on a shelf.', 'sent_id': 35544, 'sent': 'a bunch of books on a shelf'}], 'file_name': 'COCO_train2014_000000096723_1139765.jpg', 'category_id': 84, 'ann_id': 1139765, 'sent_ids': [35543, 35544], 'ref_id': 18668}]\n",
      "[{'image_id': 273951, 'split': 'train', 'sentences': [{'tokens': ['a', 'white', 'woman', 'skier', 'with', 'a', 'colorful', 'hat', 'sitting', 'between', 'two', 'men', 'skiers'], 'raw': 'A white woman skier with a colorful hat sitting between two men skiers.', 'sent_id': 34676, 'sent': 'a white woman skier with a colorful hat sitting between two men skiers'}, {'tokens': ['a', 'blonde', 'woman', 'in', 'red'], 'raw': 'A blonde woman in red', 'sent_id': 34677, 'sent': 'a blonde woman in red'}], 'file_name': 'COCO_train2014_000000273951_509586.jpg', 'category_id': 1, 'ann_id': 509586, 'sent_ids': [34676, 34677], 'ref_id': 18328}][{'image_id': 387527, 'split': 'train', 'sentences': [{'tokens': ['a', 'banana', 'to', 'the', 'far', 'left', 'of', 'the', 'fruit', 'bowl'], 'raw': 'A banana to the far left of the fruit bowl.', 'sent_id': 65272, 'sent': 'a banana to the far left of the fruit bowl'}, {'tokens': ['the', 'farthest', 'banana', 'away', 'from', 'the', 'camera'], 'raw': 'The farthest banana away from the camera.', 'sent_id': 65273, 'sent': 'the farthest banana away from the camera'}], 'file_name': 'COCO_train2014_000000387527_1043422.jpg', 'category_id': 52, 'ann_id': 1043422, 'sent_ids': [65272, 65273], 'ref_id': 30094}][{'image_id': 103510, 'split': 'train', 'sentences': [{'tokens': ['the', 'carrots'], 'raw': 'the carrots', 'sent_id': 63484, 'sent': 'the carrots'}, {'tokens': ['a', 'group', 'of', 'fresh', 'baby', 'carrots'], 'raw': 'A group of fresh baby carrots.', 'sent_id': 63485, 'sent': 'a group of fresh baby carrots'}], 'file_name': 'COCO_train2014_000000103510_1063832.jpg', 'category_id': 57, 'ann_id': 1063832, 'sent_ids': [63484, 63485], 'ref_id': 29390}]\n",
      "\n",
      "\n",
      "[{'image_id': 427633, 'split': 'train', 'sentences': [{'tokens': ['the', 'back', 'side', 'of', 'a', 'multi', 'light', 'traffic', 'light'], 'raw': 'The back side of a multi light traffic light.', 'sent_id': 52260, 'sent': 'the back side of a multi light traffic light'}, {'tokens': ['a', 'stoplight', 'faces', 'to', 'the', 'right'], 'raw': 'A stoplight faces to the right.', 'sent_id': 52261, 'sent': 'a stoplight faces to the right'}], 'file_name': 'COCO_train2014_000000427633_2172860.jpg', 'category_id': 10, 'ann_id': 2172860, 'sent_ids': [52260, 52261], 'ref_id': 25098}]\n",
      "[{'image_id': 350083, 'split': 'train', 'sentences': [{'tokens': ['giraffe', 'to', 'left', 'of', 'other'], 'raw': 'giraffe to left of other', 'sent_id': 33055, 'sent': 'giraffe to left of other'}, {'tokens': ['the', 'slightly', 'shorter', 'giraffe'], 'raw': 'the slightly shorter giraffe', 'sent_id': 33056, 'sent': 'the slightly shorter giraffe'}], 'file_name': 'COCO_train2014_000000350083_599600.jpg', 'category_id': 25, 'ann_id': 599600, 'sent_ids': [33055, 33056], 'ref_id': 46695}]\n",
      "[{'image_id': 287519, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'with', 'cream', '&', 'black', 'colored', 'shirt', 'and', 'short', 'trousers', 'walking', 'behind', 'a', 'police', 'man'], 'raw': 'a man with cream & black colored shirt and short trousers walking behind a police man', 'sent_id': 99833, 'sent': 'a man with cream & black colored shirt and short trousers walking behind a police man'}, {'tokens': ['a', 'guy', 'wearing', 'a', 'short', 'sleeve', 'shirt', 'with', 'an', 'interesting', 'design'], 'raw': 'a guy wearing a short sleeve shirt with an interesting design', 'sent_id': 99834, 'sent': 'a guy wearing a short sleeve shirt with an interesting design'}], 'file_name': 'COCO_train2014_000000287519_2206490.jpg', 'category_id': 1, 'ann_id': 2206490, 'sent_ids': [99833, 99834], 'ref_id': 43469}]\n",
      "[{'image_id': 54141, 'split': 'train', 'sentences': [{'tokens': ['a', 'speed', 'boat', 'on', 'a', 'lake'], 'raw': 'A speed boat on a lake.', 'sent_id': 67100, 'sent': 'a speed boat on a lake'}, {'tokens': ['a', 'speedboat', 'pulling', 'a', 'wake', 'boarder', 'behind', 'it'], 'raw': 'A speedboat pulling a wake boarder behind it', 'sent_id': 67101, 'sent': 'a speedboat pulling a wake boarder behind it'}], 'file_name': 'COCO_train2014_000000054141_180852.jpg', 'category_id': 9, 'ann_id': 180852, 'sent_ids': [67100, 67101], 'ref_id': 48196}]\n",
      "[{'image_id': 156296, 'split': 'train', 'sentences': [{'tokens': ['a', 'woman', 'in', 'rain', 'boots', 'trying', 'to', 'fix', 'her', 'umbrella'], 'raw': 'A woman in rain boots trying to fix her umbrella.', 'sent_id': 9312, 'sent': 'a woman in rain boots trying to fix her umbrella'}, {'tokens': ['the', 'woman', 'with', 'the', 'black', 'umbrella'], 'raw': 'The woman with the black umbrella.', 'sent_id': 9313, 'sent': 'the woman with the black umbrella'}], 'file_name': 'COCO_train2014_000000156296_518143.jpg', 'category_id': 1, 'ann_id': 518143, 'sent_ids': [9312, 9313], 'ref_id': 8608}][{'image_id': 402212, 'split': 'train', 'sentences': [{'tokens': ['a', 'colorful', 'toy', 'van', 'in', 'the', 'street'], 'raw': 'a colorful toy van in the street', 'sent_id': 14596, 'sent': 'a colorful toy van in the street'}, {'tokens': ['colorful', 'truck'], 'raw': 'Colorful truck.', 'sent_id': 14597, 'sent': 'colorful truck'}], 'file_name': 'COCO_train2014_000000402212_396551.jpg', 'category_id': 8, 'ann_id': 396551, 'sent_ids': [14596, 14597], 'ref_id': 45911}]\n",
      "[{'image_id': 521796, 'split': 'train', 'sentences': [{'tokens': ['shelf', 'in', 'camper'], 'raw': 'Shelf in camper.', 'sent_id': 70798, 'sent': 'shelf in camper'}], 'file_name': 'COCO_train2014_000000521796_1978969.jpg', 'category_id': 78, 'ann_id': 1978969, 'sent_ids': [70798], 'ref_id': 48359}]\n",
      "[{'image_id': 136953, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'wearing', 'a', 'striped', 'shirt', 'and', 'black', 'pants'], 'raw': 'A man wearing a striped shirt and black pants.', 'sent_id': 21168, 'sent': 'a man wearing a striped shirt and black pants'}, {'tokens': ['a', 'man', 'in', 'a', 'striped', 'shirt'], 'raw': 'A man in a striped shirt.', 'sent_id': 21169, 'sent': 'a man in a striped shirt'}], 'file_name': 'COCO_train2014_000000136953_530897.jpg', 'category_id': 1, 'ann_id': 530897, 'sent_ids': [21168, 21169], 'ref_id': 13173}]\n",
      "[{'image_id': 179209, 'split': 'train', 'sentences': [{'tokens': ['the', 'large', 'truck', 'in', 'the', 'very', 'back'], 'raw': 'the large truck in the very back', 'sent_id': 45782, 'sent': 'the large truck in the very back'}, {'tokens': ['large', 'multiwheeled', 'truck', 'behind', 'a', 'truck', 'full', 'of', 'bananas'], 'raw': 'Large multiwheeled truck behind a truck full of bananas.', 'sent_id': 45783, 'sent': 'large multiwheeled truck behind a truck full of bananas'}], 'file_name': 'COCO_train2014_000000179209_2054261.jpg', 'category_id': 8, 'ann_id': 2054261, 'sent_ids': [45782, 45783], 'ref_id': 22607}]\n",
      "\n",
      "[{'image_id': 175565, 'split': 'train', 'sentences': [{'tokens': ['the', 'couch', ',', 'a', 'lady', 'sitted', 'on', 'it'], 'raw': 'the couch, a lady sitted on it.', 'sent_id': 1112, 'sent': 'the couch , a lady sitted on it'}], 'file_name': 'COCO_train2014_000000175565_99716.jpg', 'category_id': 63, 'ann_id': 99716, 'sent_ids': [1112], 'ref_id': 5449}]\n",
      "[{'image_id': 168217, 'split': 'train', 'sentences': [{'tokens': ['brown', 'and', 'pink', 'teddy', 'bears'], 'raw': 'Brown and pink teddy bears.', 'sent_id': 4409, 'sent': 'brown and pink teddy bears'}, {'tokens': ['a', 'brown', 'teddy', 'bear', 'with', 'a', 'morose', 'expression', 'sits', 'in', 'front', 'of', 'a', 'pink', 'teddy', 'bear', 'with', 'an', 'identical', 'expression'], 'raw': 'A brown teddy bear with a morose expression sits in front of a pink teddy bear with an identical expression.', 'sent_id': 4410, 'sent': 'a brown teddy bear with a morose expression sits in front of a pink teddy bear with an identical expression'}], 'file_name': 'COCO_train2014_000000168217_1162820.jpg', 'category_id': 88, 'ann_id': 1162820, 'sent_ids': [4409, 4410], 'ref_id': 6702}]\n",
      "[{'image_id': 413164, 'split': 'train', 'sentences': [{'tokens': ['a', 'table', 'with', 'a', 'white', 'tablecloth', 'on', 'it'], 'raw': 'A table with a white tablecloth on it', 'sent_id': 19497, 'sent': 'a table with a white tablecloth on it'}], 'file_name': 'COCO_train2014_000000413164_1092064.jpg', 'category_id': 67, 'ann_id': 1092064, 'sent_ids': [19497], 'ref_id': 12539}]\n",
      "[{'image_id': 30387, 'split': 'train', 'sentences': [{'tokens': ['the', 'man', 'all', 'way', 'at', 'the', 'end', 'of', 'the', 'line', 'who', 'is', 'barely', 'visible'], 'raw': 'The man all way at the end of the line who is barely visible.', 'sent_id': 12240, 'sent': 'the man all way at the end of the line who is barely visible'}, {'tokens': ['a', 'man', 'who', 'is', 'mostly', 'covered', 'up', 'by', 'the', 'arms', 'of', 'another', 'man'], 'raw': 'A man who is mostly covered up by the arms of another man.', 'sent_id': 12241, 'sent': 'a man who is mostly covered up by the arms of another man'}], 'file_name': 'COCO_train2014_000000030387_1706152.jpg', 'category_id': 1, 'ann_id': 1706152, 'sent_ids': [12240, 12241], 'ref_id': 9755}]\n",
      "[{'image_id': 347263, 'split': 'train', 'sentences': [{'tokens': ['woman', 'wearing', 'black', 'showing', 'someone', 'a', 'technique'], 'raw': 'Woman wearing black showing someone a technique', 'sent_id': 68299, 'sent': 'woman wearing black showing someone a technique'}, {'tokens': ['woman', 'with', 'sunglasses', 'on', 'her', 'head', 'is', 'cutting', 'a', 'roast', 'in', 'the', 'kitchen'], 'raw': 'woman with sunglasses on her head is cutting a roast in the kitchen', 'sent_id': 68300, 'sent': 'woman with sunglasses on her head is cutting a roast in the kitchen'}], 'file_name': 'COCO_train2014_000000347263_556785.jpg', 'category_id': 1, 'ann_id': 556785, 'sent_ids': [68299, 68300], 'ref_id': 31255}][{'image_id': 492325, 'split': 'train', 'sentences': [{'tokens': ['a', 'beer', 'which', 'is', 'standing'], 'raw': 'A beer which is standing', 'sent_id': 13351, 'sent': 'a beer which is standing'}, {'tokens': ['a', 'bear', 'that', 'is', 'sitting', 'up'], 'raw': 'A bear that is sitting up.', 'sent_id': 13352, 'sent': 'a bear that is sitting up'}], 'file_name': 'COCO_train2014_000000492325_588117.jpg', 'category_id': 23, 'ann_id': 588117, 'sent_ids': [13351, 13352], 'ref_id': 10175}]\n",
      "\n",
      "[{'image_id': 409706, 'split': 'train', 'sentences': [{'tokens': ['paper', 'being', 'drawn', 'on'], 'raw': 'paper being drawn on', 'sent_id': 84661, 'sent': 'paper being drawn on'}, {'tokens': ['drawing', 'book'], 'raw': 'drawing book', 'sent_id': 84662, 'sent': 'drawing book'}], 'file_name': 'COCO_train2014_000000409706_1140323.jpg', 'category_id': 84, 'ann_id': 1140323, 'sent_ids': [84661, 84662], 'ref_id': 37505}]\n",
      "[{'image_id': 526922, 'split': 'train', 'sentences': [{'tokens': ['a', 'bus', 'that', 'says', 'sebastian', 'el', 'gde'], 'raw': 'A bus that says Sebastian El GDE.', 'sent_id': 77013, 'sent': 'a bus that says sebastian el gde'}, {'tokens': ['the', 'r1', 'bus'], 'raw': 'The R1 bus.', 'sent_id': 77014, 'sent': 'the r1 bus'}], 'file_name': 'COCO_train2014_000000526922_248285.jpg', 'category_id': 6, 'ann_id': 248285, 'sent_ids': [77013, 77014], 'ref_id': 34577}]\n",
      "[{'image_id': 26274, 'split': 'train', 'sentences': [{'tokens': ['a', 'silver', 'van', 'with', 'people', 'facing', 'it'], 'raw': 'A silver van with people facing it.', 'sent_id': 61777, 'sent': 'a silver van with people facing it'}], 'file_name': 'COCO_train2014_000000026274_136004.jpg', 'category_id': 3, 'ann_id': 136004, 'sent_ids': [61777], 'ref_id': 28715}][{'image_id': 65842, 'split': 'train', 'sentences': [{'tokens': ['orange', 'sitting', 'in', 'boiling', 'wate', 'with', 'a', 'crack', 'on', 'the', 'side', 'and', 'a', 'white', 'spot', 'on', 'top'], 'raw': 'Orange sitting in boiling wate with a crack on the side and a white spot on top.', 'sent_id': 90770, 'sent': 'orange sitting in boiling wate with a crack on the side and a white spot on top'}, {'tokens': ['a', 'single', 'lemon', 'in', 'a', 'boiling', 'pot', 'with', 'five', 'lemons', 'surrounding'], 'raw': 'A single lemon in a boiling pot with five lemons surrounding.', 'sent_id': 90771, 'sent': 'a single lemon in a boiling pot with five lemons surrounding'}], 'file_name': 'COCO_train2014_000000065842_1050797.jpg', 'category_id': 55, 'ann_id': 1050797, 'sent_ids': [90770, 90771], 'ref_id': 49234}]\n",
      "\n",
      "[{'image_id': 315831, 'split': 'train', 'sentences': [{'tokens': ['donut', 'with', 'pink', 'frosting', 'and', 'sprinkles'], 'raw': 'Donut with pink frosting and sprinkles.', 'sent_id': 16573, 'sent': 'donut with pink frosting and sprinkles'}, {'tokens': ['a', 'purple', 'donut'], 'raw': 'A purple donut.', 'sent_id': 16574, 'sent': 'a purple donut'}], 'file_name': 'COCO_train2014_000000315831_1573196.jpg', 'category_id': 60, 'ann_id': 1573196, 'sent_ids': [16573, 16574], 'ref_id': 11418}]\n",
      "[{'image_id': 34404, 'split': 'train', 'sentences': [{'tokens': ['person', 'on', 'left', 'cut', 'off'], 'raw': 'person on left cut off', 'sent_id': 29208, 'sent': 'person on left cut off'}, {'tokens': ['the', 'skiir', 'standing', 'up'], 'raw': 'the skiir standing up', 'sent_id': 29209, 'sent': 'the skiir standing up'}], 'file_name': 'COCO_train2014_000000034404_467515.jpg', 'category_id': 1, 'ann_id': 467515, 'sent_ids': [29208, 29209], 'ref_id': 16214}]\n",
      "[{'image_id': 427523, 'split': 'train', 'sentences': [{'tokens': ['horse', 'on', 'the', 'left', 'in', 'the', 'right', 'hand', 'picture'], 'raw': 'horse on the left in the right hand picture', 'sent_id': 19348, 'sent': 'horse on the left in the right hand picture'}, {'tokens': ['horse', 'on', 'the', 'left'], 'raw': 'horse on the left', 'sent_id': 19349, 'sent': 'horse on the left'}], 'file_name': 'COCO_train2014_000000427523_54859.jpg', 'category_id': 19, 'ann_id': 54859, 'sent_ids': [19348, 19349], 'ref_id': 12482}]\n",
      "[{'image_id': 231963, 'split': 'train', 'sentences': [{'tokens': ['a', 'black', ',', 'green', 'and', 'red', 'train', 'car', 'stopped', 'on', 'the', 'tracks', 'to', 'the', 'right', 'of', 'a', 'train', 'engine'], 'raw': 'A black, green and red train car stopped on the tracks to the right of a train engine.', 'sent_id': 91692, 'sent': 'a black , green and red train car stopped on the tracks to the right of a train engine'}, {'tokens': ['the', 'back', 'of', 'the', 'train', 'leaving'], 'raw': 'The back of the train leaving', 'sent_id': 91693, 'sent': 'the back of the train leaving'}], 'file_name': 'COCO_train2014_000000231963_169754.jpg', 'category_id': 7, 'ann_id': 169754, 'sent_ids': [91692, 91693], 'ref_id': 40250}][{'image_id': 399922, 'split': 'train', 'sentences': [{'tokens': ['purse', 'on', 'front', 'mans', 'back'], 'raw': 'purse on front mans back', 'sent_id': 96153, 'sent': 'purse on front mans back'}, {'tokens': ['a', 'beige', 'satchel'], 'raw': 'a beige satchel.', 'sent_id': 96154, 'sent': 'a beige satchel'}], 'file_name': 'COCO_train2014_000000399922_1176815.jpg', 'category_id': 31, 'ann_id': 1176815, 'sent_ids': [96153, 96154], 'ref_id': 41986}]\n",
      "\n",
      "[{'image_id': 269045, 'split': 'train', 'sentences': [{'tokens': ['a', 'see', 'through', 'table'], 'raw': 'a see through table', 'sent_id': 36678, 'sent': 'a see through table'}, {'tokens': ['a', 'grilled', 'table', 'near', 'the', 'person', 'with', 'a', 'laptop'], 'raw': 'A grilled table near the person with a laptop', 'sent_id': 36679, 'sent': 'a grilled table near the person with a laptop'}], 'file_name': 'COCO_train2014_000000269045_1612853.jpg', 'category_id': 67, 'ann_id': 1612853, 'sent_ids': [36678, 36679], 'ref_id': 19074}]\n",
      "[{'image_id': 419297, 'split': 'train', 'sentences': [{'tokens': ['a', 'white', 'truck', 'filled', 'with', 'luggage'], 'raw': 'A white truck filled with luggage', 'sent_id': 50699, 'sent': 'a white truck filled with luggage'}, {'tokens': ['a', 'white', 'color', 'truck'], 'raw': 'A white color truck.', 'sent_id': 50700, 'sent': 'a white color truck'}], 'file_name': 'COCO_train2014_000000419297_1370937.jpg', 'category_id': 8, 'ann_id': 1370937, 'sent_ids': [50699, 50700], 'ref_id': 24467}]\n",
      "[{'image_id': 153609, 'split': 'train', 'sentences': [{'tokens': ['a', 'gentleman', 'wearing', 'a', 'suit', ',', 'looking', 'down', ',', 'and', 'walking', 'with', 'his', 'hands', 'on', 'his', 'hips'], 'raw': 'A gentleman wearing a suit, looking down, and walking with his hands on his hips.', 'sent_id': 102251, 'sent': 'a gentleman wearing a suit , looking down , and walking with his hands on his hips'}, {'tokens': ['a', 'man', 'in', 'a', 'black', 'suit', 'walking'], 'raw': 'A man in a black suit walking.', 'sent_id': 102252, 'sent': 'a man in a black suit walking'}], 'file_name': 'COCO_train2014_000000153609_462146.jpg', 'category_id': 1, 'ann_id': 462146, 'sent_ids': [102251, 102252], 'ref_id': 44392}][{'image_id': 61498, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'in', 'a', 'blue', 'shirt'], 'raw': 'A man in a blue shirt.', 'sent_id': 72219, 'sent': 'a man in a blue shirt'}, {'tokens': ['blue', 'shirt', 'boy'], 'raw': 'blue shirt boy', 'sent_id': 72220, 'sent': 'blue shirt boy'}], 'file_name': 'COCO_train2014_000000061498_500279.jpg', 'category_id': 1, 'ann_id': 500279, 'sent_ids': [72219, 72220], 'ref_id': 48407}]\n",
      "\n",
      "[{'image_id': 334714, 'split': 'train', 'sentences': [{'tokens': ['a', 'long', 'narrow', 'multi', '-', 'colored', 'kite', 'to', 'the', 'left', 'of', 'two', 'other', 'kites', 'of', 'the', 'same', 'colors'], 'raw': 'A long narrow multi-colored kite to the left of two other kites of the same colors.', 'sent_id': 29361, 'sent': 'a long narrow multi - colored kite to the left of two other kites of the same colors'}, {'tokens': ['a', 'side', 'of', 'the', 'kite', 'that', 'is', 'closest', 'to', 'the', 'man', 'in', 'black'], 'raw': 'A side of the kite that is closest to the man in black.', 'sent_id': 29362, 'sent': 'a side of the kite that is closest to the man in black'}], 'file_name': 'COCO_train2014_000000334714_623960.jpg', 'category_id': 38, 'ann_id': 623960, 'sent_ids': [29361, 29362], 'ref_id': 16269}]\n",
      "[{'image_id': 233111, 'split': 'train', 'sentences': [{'tokens': ['a', 'tennis', 'player', 'with', 'his', 'right', 'foot', 'balanced', 'on', 'its', 'toes'], 'raw': 'A tennis player with his right foot balanced on its toes.', 'sent_id': 94813, 'sent': 'a tennis player with his right foot balanced on its toes'}, {'tokens': ['a', 'man', 'in', 'a', 'blue', 'shirt', 'holding', 'a', 'tennis', 'racket'], 'raw': 'A man in a blue shirt holding a tennis racket.', 'sent_id': 94814, 'sent': 'a man in a blue shirt holding a tennis racket'}], 'file_name': 'COCO_train2014_000000233111_510824.jpg', 'category_id': 1, 'ann_id': 510824, 'sent_ids': [94813, 94814], 'ref_id': 49423}]\n",
      "[{'image_id': 333324, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'in', 'a', 'red', 'outfit', 'is', 'jumping', 'to', 'catch', 'a', 'frisbee'], 'raw': 'A man in a red outfit is jumping to catch a frisbee.', 'sent_id': 15405, 'sent': 'a man in a red outfit is jumping to catch a frisbee'}, {'tokens': ['a', 'man', 'jumping', 'for', 'a', 'frisbee'], 'raw': 'a man jumping for a frisbee', 'sent_id': 15406, 'sent': 'a man jumping for a frisbee'}], 'file_name': 'COCO_train2014_000000333324_424995.jpg', 'category_id': 1, 'ann_id': 424995, 'sent_ids': [15405, 15406], 'ref_id': 10961}]\n",
      "[{'image_id': 500390, 'split': 'train', 'sentences': [{'tokens': ['a', 'girl', 'playing', 'football', 'in', 'the', 'ground'], 'raw': 'A girl playing football in the ground.', 'sent_id': 26349, 'sent': 'a girl playing football in the ground'}, {'tokens': ['girl', 'running', 'with', 'hands', 'up'], 'raw': 'Girl running with hands up', 'sent_id': 26350, 'sent': 'girl running with hands up'}], 'file_name': 'COCO_train2014_000000500390_556949.jpg', 'category_id': 1, 'ann_id': 556949, 'sent_ids': [26349, 26350], 'ref_id': 15132}]\n",
      "[{'image_id': 282359, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'wearing', 'a', 'black', 'shirt', 'holding', 'a', 'tennis', 'racket'], 'raw': 'A man wearing a black shirt holding a tennis racket.', 'sent_id': 81458, 'sent': 'a man wearing a black shirt holding a tennis racket'}, {'tokens': ['a', 'young', 'man', 'in', 'a', 'black', 'shirt', 'and', 'gray', 'shorts'], 'raw': 'A young man in a black shirt and gray shorts.', 'sent_id': 81459, 'sent': 'a young man in a black shirt and gray shorts'}], 'file_name': 'COCO_train2014_000000282359_440734.jpg', 'category_id': 1, 'ann_id': 440734, 'sent_ids': [81458, 81459], 'ref_id': 36299}]\n",
      "[{'image_id': 377339, 'split': 'train', 'sentences': [{'tokens': ['black', 'suitcase', 'girl', 'is', 'sitting', 'in'], 'raw': 'black suitcase girl is sitting in', 'sent_id': 2283, 'sent': 'black suitcase girl is sitting in'}, {'tokens': ['a', 'suitcase', 'with', 'a', 'girl', 'sitting', 'in', 'it'], 'raw': 'A suitcase with a girl sitting in it', 'sent_id': 2284, 'sent': 'a suitcase with a girl sitting in it'}], 'file_name': 'COCO_train2014_000000377339_1186330.jpg', 'category_id': 33, 'ann_id': 1186330, 'sent_ids': [2283, 2284], 'ref_id': 5899}][{'image_id': 429215, 'split': 'train', 'sentences': [{'tokens': ['a', 'crown', 'of', 'broccoli', 'on', 'a', 'blue', 'plate', 'with', 'carrots', 'and', 'potatoes'], 'raw': 'A crown of broccoli on a blue plate with carrots and potatoes.', 'sent_id': 11008, 'sent': 'a crown of broccoli on a blue plate with carrots and potatoes'}, {'tokens': ['broccoli', 'on', 'a', 'plate'], 'raw': 'Broccoli on a plate.', 'sent_id': 11009, 'sent': 'broccoli on a plate'}], 'file_name': 'COCO_train2014_000000429215_1059094.jpg', 'category_id': 56, 'ann_id': 1059094, 'sent_ids': [11008, 11009], 'ref_id': 45758}]\n",
      "\n",
      "[{'image_id': 568492, 'split': 'train', 'sentences': [{'tokens': ['back', 'of', 'chair', 'with', 'women', 'in', 'hooded', 'jacket'], 'raw': 'BACK OF CHAIR WITH WOMEN IN HOODED JACKET', 'sent_id': 62788, 'sent': 'back of chair with women in hooded jacket'}, {'tokens': ['wooden', 'chair', 'with', 'person', 'in', 'red', 'coat', 'getting', 'off', 'of', 'it'], 'raw': 'wooden chair with person in red coat getting off of it', 'sent_id': 62789, 'sent': 'wooden chair with person in red coat getting off of it'}], 'file_name': 'COCO_train2014_000000568492_375813.jpg', 'category_id': 62, 'ann_id': 375813, 'sent_ids': [62788, 62789], 'ref_id': 48006}]\n",
      "[{'image_id': 183100, 'split': 'train', 'sentences': [{'tokens': ['a', 'red', 'handle', 'for', 'a', 'pair', 'of', 'shears', 'on', 'a', 'table'], 'raw': 'A red handle for a pair of shears on a table.', 'sent_id': 64588, 'sent': 'a red handle for a pair of shears on a table'}, {'tokens': ['scissors', 'with', 'red', 'handle'], 'raw': 'scissors with red handle', 'sent_id': 64589, 'sent': 'scissors with red handle'}], 'file_name': 'COCO_train2014_000000183100_2146842.jpg', 'category_id': 87, 'ann_id': 2146842, 'sent_ids': [64588, 64589], 'ref_id': 29813}]\n",
      "[{'image_id': 159109, 'split': 'train', 'sentences': [{'tokens': ['pizza', 'with', 'only', 'sausage', 'and', 'cheese', 'on', 'it'], 'raw': 'Pizza with only sausage and cheese on it.', 'sent_id': 20557, 'sent': 'pizza with only sausage and cheese on it'}, {'tokens': ['a', 'frozen', 'pepperoni', 'pizza', 'on', 'a', 'baking', 'sheet'], 'raw': 'A frozen pepperoni pizza on a baking sheet.', 'sent_id': 20558, 'sent': 'a frozen pepperoni pizza on a baking sheet'}], 'file_name': 'COCO_train2014_000000159109_1072198.jpg', 'category_id': 59, 'ann_id': 1072198, 'sent_ids': [20557, 20558], 'ref_id': 12943}][{'image_id': 99599, 'split': 'train', 'sentences': [{'tokens': ['a', 'woman', 'wearing', 'a', 'bracelet', 'helps', 'put', 'candles', 'on', 'a', 'cake'], 'raw': 'A woman wearing a bracelet helps put candles on a cake.', 'sent_id': 6577, 'sent': 'a woman wearing a bracelet helps put candles on a cake'}, {'tokens': ['person', 'wearing', 'a', 'silver', 'bracelet', 'and', 'a', 'silver', 'ring'], 'raw': 'Person wearing a silver bracelet and a silver ring.', 'sent_id': 6578, 'sent': 'person wearing a silver bracelet and a silver ring'}], 'file_name': 'COCO_train2014_000000099599_191383.jpg', 'category_id': 1, 'ann_id': 191383, 'sent_ids': [6577, 6578], 'ref_id': 7554}]\n",
      "\n",
      "[{'image_id': 198406, 'split': 'train', 'sentences': [{'tokens': ['a', 'bird', 'stands', 'on', 'the', 'road'], 'raw': 'A bird stands on the road.', 'sent_id': 59907, 'sent': 'a bird stands on the road'}, {'tokens': ['a', 'bird', 'is', 'standing', 'on', 'the', 'road', 'and', 'watching', 'the', 'other', 'bird', 'sitting', 'near'], 'raw': 'A bird is standing on the road and watching the other bird sitting near', 'sent_id': 59908, 'sent': 'a bird is standing on the road and watching the other bird sitting near'}], 'file_name': 'COCO_train2014_000000198406_43102.jpg', 'category_id': 16, 'ann_id': 43102, 'sent_ids': [59907, 59908], 'ref_id': 47881}]\n",
      "[{'image_id': 321173, 'split': 'train', 'sentences': [{'tokens': ['a', 'part', 'of', 'the', 'table', 'to', 'the', 'right', 'of', 'the', 'plate'], 'raw': 'A part of the table to the right of the plate.', 'sent_id': 83151, 'sent': 'a part of the table to the right of the plate'}], 'file_name': 'COCO_train2014_000000321173_390269.jpg', 'category_id': 67, 'ann_id': 390269, 'sent_ids': [83151], 'ref_id': 36932}]\n",
      "[{'image_id': 40901, 'split': 'train', 'sentences': [{'tokens': ['man', 'washing', 'his', 'hands', 'in', 'a', 'public', 'restroom'], 'raw': 'Man washing his hands in a public restroom.', 'sent_id': 57105, 'sent': 'man washing his hands in a public restroom'}, {'tokens': ['man', 'watching', 'his', 'hands', ',', 'not', 'his', 'reflection'], 'raw': 'Man watching his hands, not his reflection.', 'sent_id': 57106, 'sent': 'man watching his hands , not his reflection'}], 'file_name': 'COCO_train2014_000000040901_512279.jpg', 'category_id': 1, 'ann_id': 512279, 'sent_ids': [57105, 57106], 'ref_id': 26951}]\n",
      "[{'image_id': 286482, 'split': 'train', 'sentences': [{'tokens': ['green', 'suitcase'], 'raw': 'green suitcase', 'sent_id': 37373, 'sent': 'green suitcase'}, {'tokens': ['a', 'green', 'suitcase', 'with', 'two', 'other', 'luggage', 'pieces', 'stacked', 'atop', 'it'], 'raw': 'A green suitcase with two other luggage pieces stacked atop it.', 'sent_id': 37374, 'sent': 'a green suitcase with two other luggage pieces stacked atop it'}], 'file_name': 'COCO_train2014_000000286482_1184941.jpg', 'category_id': 33, 'ann_id': 1184941, 'sent_ids': [37373, 37374], 'ref_id': 19349}][{'image_id': 40094, 'split': 'train', 'sentences': [{'tokens': ['the', 'cat', 'that', 'is', 'the', 'lowest', 'on', 'the', 'couch'], 'raw': 'The cat that is the lowest on the couch.', 'sent_id': 54410, 'sent': 'the cat that is the lowest on the couch'}, {'tokens': ['the', 'cat', 'on', 'the', 'left'], 'raw': 'the cat on the left', 'sent_id': 54411, 'sent': 'the cat on the left'}], 'file_name': 'COCO_train2014_000000040094_52880.jpg', 'category_id': 17, 'ann_id': 52880, 'sent_ids': [54410, 54411], 'ref_id': 25939}]\n",
      "\n",
      "[{'image_id': 61209, 'split': 'train', 'sentences': [{'tokens': ['an', 'elephant', 'with', '2', 'white', 'tusks'], 'raw': 'An elephant with 2 white tusks.', 'sent_id': 14679, 'sent': 'an elephant with 2 white tusks'}, {'tokens': ['the', 'elephant', 'on', 'the', 'right', 'side', 'of', 'the', 'picture', 'that', 'seems', 'to', 'be', 'looking', 'into', 'the', 'camera'], 'raw': 'The elephant on the right side of the picture that seems to be looking into the camera.', 'sent_id': 14680, 'sent': 'the elephant on the right side of the picture that seems to be looking into the camera'}], 'file_name': 'COCO_train2014_000000061209_584430.jpg', 'category_id': 22, 'ann_id': 584430, 'sent_ids': [14679, 14680], 'ref_id': 45915}][{'image_id': 276806, 'split': 'train', 'sentences': [{'tokens': ['a', 'woman', 'that', 'is', 'waiting', 'for', 'a', 'slice', 'of', 'cake'], 'raw': 'A woman that is waiting for a slice of cake.', 'sent_id': 87859, 'sent': 'a woman that is waiting for a slice of cake'}, {'tokens': ['a', 'woman', 'wearing', 'a', 'brown', 'jacket', 'sitting', 'beside', 'a', 'younger', 'woman'], 'raw': 'A woman wearing a brown jacket sitting beside a younger woman.', 'sent_id': 87860, 'sent': 'a woman wearing a brown jacket sitting beside a younger woman'}], 'file_name': 'COCO_train2014_000000276806_487475.jpg', 'category_id': 1, 'ann_id': 487475, 'sent_ids': [87859, 87860], 'ref_id': 38745}]\n",
      "\n",
      "[{'image_id': 412101, 'split': 'train', 'sentences': [{'tokens': ['the', 'chair', 'at', 'the', 'bottom', 'right'], 'raw': 'the chair at the bottom right', 'sent_id': 42865, 'sent': 'the chair at the bottom right'}, {'tokens': ['the', 'black', 'plastic', 'chair', 'that', 'is', 'farthest', 'away', 'from', 'the', 'kitchen'], 'raw': 'The black plastic chair that is farthest away from the kitchen.', 'sent_id': 42866, 'sent': 'the black plastic chair that is farthest away from the kitchen'}], 'file_name': 'COCO_train2014_000000412101_103736.jpg', 'category_id': 62, 'ann_id': 103736, 'sent_ids': [42865, 42866], 'ref_id': 21474}]\n",
      "[{'image_id': 397132, 'split': 'train', 'sentences': [{'tokens': ['a', 'blue', 'minivan', 'beside', 'a', 'garbage', 'truck', 'on', 'a', 'street'], 'raw': 'a blue minivan beside a garbage truck on a street', 'sent_id': 82000, 'sent': 'a blue minivan beside a garbage truck on a street'}, {'tokens': ['blue', 'minivan', 'moving', 'on', 'the', 'street', 'with', 'other', 'vehicles'], 'raw': 'Blue minivan moving on the street with other vehicles.', 'sent_id': 82001, 'sent': 'blue minivan moving on the street with other vehicles'}], 'file_name': 'COCO_train2014_000000397132_353337.jpg', 'category_id': 3, 'ann_id': 353337, 'sent_ids': [82000, 82001], 'ref_id': 36513}]\n",
      "[{'image_id': 168879, 'split': 'train', 'sentences': [{'tokens': ['a', 'giraffe', 'on', 'right', 'side'], 'raw': 'a giraffe on right side', 'sent_id': 63328, 'sent': 'a giraffe on right side'}], 'file_name': 'COCO_train2014_000000168879_595571.jpg', 'category_id': 25, 'ann_id': 595571, 'sent_ids': [63328], 'ref_id': 29326}]\n",
      "[{'image_id': 287239, 'split': 'train', 'sentences': [{'tokens': ['a', 'toddler', 'with', 'blonde', 'hair', 'and', 'blue', 'eyes', 'who', 'has', 'a', 'brush', 'upon', 'their', 'head'], 'raw': 'a toddler with blonde hair and blue eyes who has a brush upon their head.', 'sent_id': 25368, 'sent': 'a toddler with blonde hair and blue eyes who has a brush upon their head'}, {'tokens': ['a', 'blonde', '-', 'haired', 'small', 'child'], 'raw': 'A blonde-haired small child.', 'sent_id': 25369, 'sent': 'a blonde - haired small child'}], 'file_name': 'COCO_train2014_000000287239_469385.jpg', 'category_id': 1, 'ann_id': 469385, 'sent_ids': [25368, 25369], 'ref_id': 14777}][{'image_id': 180559, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'in', 'blue', 'shirt', 'and', 'blue', 'hat', 'who', 'just', 'threw', 'a', 'frisbee'], 'raw': 'A man in blue shirt and blue hat who just threw a frisbee', 'sent_id': 93684, 'sent': 'a man in blue shirt and blue hat who just threw a frisbee'}, {'tokens': ['a', 'man', 'with', 'a', 'cap', 'is', 'trying', 'to', 'catch', 'a', 'frisbee'], 'raw': 'a man with a cap is trying to catch a Frisbee', 'sent_id': 93685, 'sent': 'a man with a cap is trying to catch a frisbee'}], 'file_name': 'COCO_train2014_000000180559_521030.jpg', 'category_id': 1, 'ann_id': 521030, 'sent_ids': [93684, 93685], 'ref_id': 41030}][{'image_id': 204792, 'split': 'train', 'sentences': [{'tokens': ['a', 'sheep', 'being', 'help', 'by', 'the', 'lady', 'in', 'the', 'black', 'shit', ';', 'left', 'side', 'of', 'the', 'picture'], 'raw': 'a sheep being help by the lady in the black shit; left side of the picture', 'sent_id': 1907, 'sent': 'a sheep being help by the lady in the black shit ; left side of the picture'}, {'tokens': ['the', 'animal', 'being', 'petted', 'by', 'the', 'blonde', 'lady'], 'raw': 'The animal being petted by the blonde lady.', 'sent_id': 1908, 'sent': 'the animal being petted by the blonde lady'}], 'file_name': 'COCO_train2014_000000204792_61249.jpg', 'category_id': 20, 'ann_id': 61249, 'sent_ids': [1907, 1908], 'ref_id': 5743}]\n",
      "\n",
      "\n",
      "[{'image_id': 516596, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'wearing', 'glasses', 'eating', 'a', 'huge', 'hero'], 'raw': 'a man wearing glasses eating a huge hero', 'sent_id': 38413, 'sent': 'a man wearing glasses eating a huge hero'}, {'tokens': ['a', 'man', 'wearing', 'glasses', 'eating', 'a', 'sandwich'], 'raw': 'a man wearing glasses eating a sandwich', 'sent_id': 38414, 'sent': 'a man wearing glasses eating a sandwich'}], 'file_name': 'COCO_train2014_000000516596_424086.jpg', 'category_id': 1, 'ann_id': 424086, 'sent_ids': [38413, 38414], 'ref_id': 19743}]\n",
      "[{'image_id': 28085, 'split': 'train', 'sentences': [{'tokens': ['pink', 'color', 'donuts', 'in', 'front', 'of', 'a', 'brown', 'colored', 'one'], 'raw': 'pink color donuts in front of a brown colored one', 'sent_id': 56205, 'sent': 'pink color donuts in front of a brown colored one'}, {'tokens': ['a', 'pink', 'color', 'ring', 'chocolate', 'iced', 'doughnut', 'in', 'front', 'of', 'the', 'brown', 'color'], 'raw': 'A PINK COLOR RING CHOCOLATE ICED DOUGHNUT IN FRONT OF THE BROWN COLOR', 'sent_id': 56206, 'sent': 'a pink color ring chocolate iced doughnut in front of the brown color'}], 'file_name': 'COCO_train2014_000000028085_1078993.jpg', 'category_id': 60, 'ann_id': 1078993, 'sent_ids': [56205, 56206], 'ref_id': 26602}]\n",
      "[{'image_id': 219633, 'split': 'train', 'sentences': [{'tokens': ['a', 'light', 'brown', 'dog', 'standing', 'next', 'to', 'a', 'dark', 'brown', 'dog'], 'raw': 'A light brown dog standing next to a dark brown dog.', 'sent_id': 79238, 'sent': 'a light brown dog standing next to a dark brown dog'}, {'tokens': ['the', 'lightest', 'color', 'dog', 'that', 'is', 'on', 'the', 'right'], 'raw': 'The lightest color dog that is on the right.', 'sent_id': 79239, 'sent': 'the lightest color dog that is on the right'}], 'file_name': 'COCO_train2014_000000219633_15879.jpg', 'category_id': 18, 'ann_id': 15879, 'sent_ids': [79238, 79239], 'ref_id': 35444}]\n",
      "[{'image_id': 110002, 'split': 'train', 'sentences': [{'tokens': ['large', 'zebra', 'left', 'of', 'screen'], 'raw': 'large zebra left of screen', 'sent_id': 100858, 'sent': 'large zebra left of screen'}, {'tokens': ['zebra', 'in', 'front', 'of', 'all', 'the', 'others'], 'raw': 'Zebra in front of all the others.', 'sent_id': 100859, 'sent': 'zebra in front of all the others'}], 'file_name': 'COCO_train2014_000000110002_589620.jpg', 'category_id': 24, 'ann_id': 589620, 'sent_ids': [100858, 100859], 'ref_id': 43849}]\n",
      "[{'image_id': 217460, 'split': 'train', 'sentences': [{'tokens': ['large', 'pizza', 'with', 'all', 'toppings'], 'raw': 'large pizza with all toppings', 'sent_id': 14009, 'sent': 'large pizza with all toppings'}, {'tokens': ['a', 'pizza', 'with', 'olives', ',', 'mushrooms', ',', 'artichokes', ',', 'and', 'ham'], 'raw': 'A pizza with olives, mushrooms, artichokes, and ham.', 'sent_id': 14010, 'sent': 'a pizza with olives , mushrooms , artichokes , and ham'}], 'file_name': 'COCO_train2014_000000217460_1077159.jpg', 'category_id': 59, 'ann_id': 1077159, 'sent_ids': [14009, 14010], 'ref_id': 10427}][{'image_id': 324336, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'standing', 'outside', 'with', 'an', 'umbrella'], 'raw': 'A man standing outside with an umbrella.', 'sent_id': 61801, 'sent': 'a man standing outside with an umbrella'}, {'tokens': ['a', 'lady', 'wearing', 'specs', 'and', 'holding', 'black', 'umbrella'], 'raw': 'A lady wearing specs and holding black umbrella', 'sent_id': 61802, 'sent': 'a lady wearing specs and holding black umbrella'}], 'file_name': 'COCO_train2014_000000324336_1734772.jpg', 'category_id': 1, 'ann_id': 1734772, 'sent_ids': [61801, 61802], 'ref_id': 28726}]\n",
      "[{'image_id': 10495, 'split': 'train', 'sentences': [{'tokens': ['a', 'baby', 'elephant', 'under', 'a', 'big', 'elephant'], 'raw': 'A baby elephant under a big elephant.', 'sent_id': 2010, 'sent': 'a baby elephant under a big elephant'}, {'tokens': ['a', 'baby', 'elephant', 'walking', 'with', 'its', 'mother'], 'raw': 'A baby elephant walking with its mother.', 'sent_id': 2011, 'sent': 'a baby elephant walking with its mother'}], 'file_name': 'COCO_train2014_000000010495_1821437.jpg', 'category_id': 22, 'ann_id': 1821437, 'sent_ids': [2010, 2011], 'ref_id': 5786}]\n",
      "\n",
      "[{'image_id': 279753, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'in', 'a', 'red', 'shirt', 'and', 'black', 'pants'], 'raw': 'A man in a red shirt and black pants', 'sent_id': 97009, 'sent': 'a man in a red shirt and black pants'}], 'file_name': 'COCO_train2014_000000279753_535460.jpg', 'category_id': 1, 'ann_id': 535460, 'sent_ids': [97009], 'ref_id': 42327}]\n",
      "[{'image_id': 427395, 'split': 'train', 'sentences': [{'tokens': ['a', 'purple', 'toothbrush'], 'raw': 'A purple toothbrush.', 'sent_id': 10965, 'sent': 'a purple toothbrush'}, {'tokens': ['purple', 'toothbrush', 'with', 'green', 'bristles'], 'raw': 'Purple toothbrush with green bristles.', 'sent_id': 10966, 'sent': 'purple toothbrush with green bristles'}], 'file_name': 'COCO_train2014_000000427395_342229.jpg', 'category_id': 90, 'ann_id': 342229, 'sent_ids': [10965, 10966], 'ref_id': 9269}][{'image_id': 63217, 'split': 'train', 'sentences': [{'tokens': ['a', 'boat', 'in', 'the', 'water', 'with', 'the', 'words', 'u', '.', 's', '.', 'coast', 'guard', 'on', 'the', 'side'], 'raw': 'a boat in the water with the words U.S. Coast Guard on the side', 'sent_id': 78927, 'sent': 'a boat in the water with the words u . s . coast guard on the side'}, {'tokens': ['a', 'red', 'and', 'white', 'us', 'coast', 'guard', 'ship'], 'raw': 'A red and white US Coast Guard Ship.', 'sent_id': 78928, 'sent': 'a red and white us coast guard ship'}], 'file_name': 'COCO_train2014_000000063217_179804.jpg', 'category_id': 9, 'ann_id': 179804, 'sent_ids': [78927, 78928], 'ref_id': 35324}]\n",
      "\n",
      "[{'image_id': 17520, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'wearing', 'a', 'checkered', 'shirt', 'walking', 'and', 'talking', 'on', 'the', 'phone'], 'raw': 'A man wearing a checkered shirt walking and talking on the phone.', 'sent_id': 56923, 'sent': 'a man wearing a checkered shirt walking and talking on the phone'}, {'tokens': ['man', 'wearing', 'a', 'plaid', 'shirt'], 'raw': 'man wearing a plaid shirt.', 'sent_id': 56924, 'sent': 'man wearing a plaid shirt'}], 'file_name': 'COCO_train2014_000000017520_196780.jpg', 'category_id': 1, 'ann_id': 196780, 'sent_ids': [56923, 56924], 'ref_id': 26881}]\n",
      "[{'image_id': 137203, 'split': 'train', 'sentences': [{'tokens': ['a', 'woman', 'in', 'red', 'is', 'trying', 'to', 'catch', 'a', 'pink', 'frisbee'], 'raw': 'A woman in red is trying to catch a pink frisbee', 'sent_id': 100490, 'sent': 'a woman in red is trying to catch a pink frisbee'}, {'tokens': ['a', 'woman', 'in', 'a', 'red', 'shirt', 'and', 'jean', 'shorts', 'about', 'to', 'catch', 'a', 'frisbee'], 'raw': 'A woman in a red shirt and jean shorts about to catch a Frisbee.', 'sent_id': 100491, 'sent': 'a woman in a red shirt and jean shorts about to catch a frisbee'}], 'file_name': 'COCO_train2014_000000137203_459492.jpg', 'category_id': 1, 'ann_id': 459492, 'sent_ids': [100490, 100491], 'ref_id': 49657}]\n",
      "[{'image_id': 34674, 'split': 'train', 'sentences': [{'tokens': ['the', 'horse', 'of', 'the', 'man', 'without', 'a', 'hat'], 'raw': 'The horse of the man without a hat', 'sent_id': 47889, 'sent': 'the horse of the man without a hat'}, {'tokens': ['horse', 'being', 'ridden', 'by', 'the', 'man', 'without', 'a', 'hat'], 'raw': 'Horse being ridden by the man without a hat.', 'sent_id': 47890, 'sent': 'horse being ridden by the man without a hat'}], 'file_name': 'COCO_train2014_000000034674_56042.jpg', 'category_id': 19, 'ann_id': 56042, 'sent_ids': [47889, 47890], 'ref_id': 23399}]\n",
      "[{'image_id': 293975, 'split': 'train', 'sentences': [{'tokens': ['a', 'white', 'laptop', 'comuter'], 'raw': 'A white laptop comuter.', 'sent_id': 48293, 'sent': 'a white laptop comuter'}, {'tokens': ['white', 'laptop'], 'raw': 'white laptop', 'sent_id': 48294, 'sent': 'white laptop'}], 'file_name': 'COCO_train2014_000000293975_1099887.jpg', 'category_id': 73, 'ann_id': 1099887, 'sent_ids': [48293, 48294], 'ref_id': 23543}]\n",
      "[{'image_id': 323705, 'split': 'train', 'sentences': [{'tokens': ['a', 'clock', 'face', 'where', 'all', 'the', 'numbers', 'are', 'displayed'], 'raw': 'A clock face where all the numbers are displayed.', 'sent_id': 8682, 'sent': 'a clock face where all the numbers are displayed'}, {'tokens': ['clock', 'facing', 'the', 'front'], 'raw': 'clock facing the front.', 'sent_id': 8683, 'sent': 'clock facing the front'}], 'file_name': 'COCO_train2014_000000323705_335093.jpg', 'category_id': 85, 'ann_id': 335093, 'sent_ids': [8682, 8683], 'ref_id': 8358}]\n",
      "[{'image_id': 416819, 'split': 'train', 'sentences': [{'tokens': ['a', 'zebra', 'with', 'his', 'back', 'to', 'the', 'camera'], 'raw': 'A zebra with his back to the camera', 'sent_id': 56637, 'sent': 'a zebra with his back to the camera'}, {'tokens': ['zebra', 'turn', 'the', 'head', 'left', 'hand', 'side'], 'raw': 'Zebra turn the head left hand side', 'sent_id': 56638, 'sent': 'zebra turn the head left hand side'}], 'file_name': 'COCO_train2014_000000416819_591965.jpg', 'category_id': 24, 'ann_id': 591965, 'sent_ids': [56637, 56638], 'ref_id': 26766}]\n",
      "[{'image_id': 522298, 'split': 'train', 'sentences': [{'tokens': ['a', 'pink', 'umbrella'], 'raw': 'A pink umbrella.', 'sent_id': 54683, 'sent': 'a pink umbrella'}, {'tokens': ['the', 'red', 'umbrella'], 'raw': 'the red umbrella', 'sent_id': 54684, 'sent': 'the red umbrella'}], 'file_name': 'COCO_train2014_000000522298_283547.jpg', 'category_id': 28, 'ann_id': 283547, 'sent_ids': [54683, 54684], 'ref_id': 26042}]\n",
      "[{'image_id': 427756, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'standing', 'with', 'blue', 'striped', 'shirt'], 'raw': 'A man standing with blue striped shirt.', 'sent_id': 62696, 'sent': 'a man standing with blue striped shirt'}, {'tokens': ['a', 'man', 'in', 'black', 'jeans', 'and', 'blue', 'and', 'black', 'striped', 'shirt', 'holding', 'wii', 'in', 'hand', 'standing', 'in', 'front', 'of', 'tv'], 'raw': 'A man in black jeans and blue and black striped shirt holding wii in hand standing in front of TV.', 'sent_id': 62697, 'sent': 'a man in black jeans and blue and black striped shirt holding wii in hand standing in front of tv'}], 'file_name': 'COCO_train2014_000000427756_490450.jpg', 'category_id': 1, 'ann_id': 490450, 'sent_ids': [62696, 62697], 'ref_id': 29075}][{'image_id': 405136, 'split': 'train', 'sentences': [{'tokens': ['a', 'woman', 'in', 'a', 'sleeveless', 'shirt', 'is', 'sitting', 'in', 'the', 'passenger', 'seat', 'watching', 'a', 'horse'], 'raw': 'A woman in a sleeveless shirt is sitting in the passenger seat watching a horse', 'sent_id': 12514, 'sent': 'a woman in a sleeveless shirt is sitting in the passenger seat watching a horse'}, {'tokens': ['a', 'person', 'sitting', 'next', 'to', 'the', 'driver'], 'raw': 'A person sitting next to the driver', 'sent_id': 12515, 'sent': 'a person sitting next to the driver'}], 'file_name': 'COCO_train2014_000000405136_188388.jpg', 'category_id': 1, 'ann_id': 188388, 'sent_ids': [12514, 12515], 'ref_id': 9851}]\n",
      "\n",
      "[{'image_id': 16465, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'in', 'a', 'white', 'soccer', 'uniform'], 'raw': 'A man in a white soccer uniform.', 'sent_id': 31029, 'sent': 'a man in a white soccer uniform'}, {'tokens': ['the', 'player', 'wearing', 'the', 'white', 'clothes'], 'raw': 'The player wearing the white clothes.', 'sent_id': 31030, 'sent': 'the player wearing the white clothes'}], 'file_name': 'COCO_train2014_000000016465_477891.jpg', 'category_id': 1, 'ann_id': 477891, 'sent_ids': [31029, 31030], 'ref_id': 16909}]\n",
      "[{'image_id': 326685, 'split': 'train', 'sentences': [{'tokens': ['a', 'blurry', 'shot', 'of', 'people', 'riding', 'a', 'scooter', 'in', 'the', 'rain'], 'raw': 'a blurry shot of people riding a scooter in the rain', 'sent_id': 19396, 'sent': 'a blurry shot of people riding a scooter in the rain'}, {'tokens': ['top', 'right', 'blurry', 'motorcyclist', 'going', 'out', 'of', 'frame'], 'raw': 'top right blurry motorcyclist going out of frame.', 'sent_id': 19397, 'sent': 'top right blurry motorcyclist going out of frame'}], 'file_name': 'COCO_train2014_000000326685_1713145.jpg', 'category_id': 1, 'ann_id': 1713145, 'sent_ids': [19396, 19397], 'ref_id': 12499}]\n",
      "[{'image_id': 326685, 'split': 'train', 'sentences': [{'tokens': ['green', '&', 'white', 'scooter', 'that', 'women', 'are', 'riding', 'in', 'rain'], 'raw': 'Green & white scooter that women are riding in rain', 'sent_id': 98188, 'sent': 'green & white scooter that women are riding in rain'}, {'tokens': ['white', 'color', 'motor', 'cycle'], 'raw': 'white color motor cycle', 'sent_id': 98189, 'sent': 'white color motor cycle'}], 'file_name': 'COCO_train2014_000000326685_147911.jpg', 'category_id': 4, 'ann_id': 147911, 'sent_ids': [98188, 98189], 'ref_id': 42804}][{'image_id': 316667, 'split': 'train', 'sentences': [{'tokens': ['a', 'bench', 'that', 'is', 'laying', 'on', 'the', 'ground'], 'raw': 'A bench that is laying on the ground', 'sent_id': 15264, 'sent': 'a bench that is laying on the ground'}, {'tokens': ['a', 'bench', 'on', 'which', 'the', 'guy', 'is', 'operating', 'the', 'skate', 'board'], 'raw': 'A bench on which the guy is operating the skate board', 'sent_id': 15265, 'sent': 'a bench on which the guy is operating the skate board'}], 'file_name': 'COCO_train2014_000000316667_1394952.jpg', 'category_id': 15, 'ann_id': 1394952, 'sent_ids': [15264, 15265], 'ref_id': 10907}]\n",
      "\n",
      "[{'image_id': 60170, 'split': 'train', 'sentences': [{'tokens': ['a', 'baby', 'elephant'], 'raw': 'A baby elephant', 'sent_id': 50475, 'sent': 'a baby elephant'}, {'tokens': ['an', 'elephant', 'that', 'is', 'relatively', 'small'], 'raw': 'An elephant that is relatively small.', 'sent_id': 50476, 'sent': 'an elephant that is relatively small'}], 'file_name': 'COCO_train2014_000000060170_582132.jpg', 'category_id': 22, 'ann_id': 582132, 'sent_ids': [50475, 50476], 'ref_id': 24381}]\n",
      "[{'image_id': 546366, 'split': 'train', 'sentences': [{'tokens': ['tennis', 'player', 'holding', 'racquet'], 'raw': 'tennis player holding racquet', 'sent_id': 1541, 'sent': 'tennis player holding racquet'}, {'tokens': ['a', 'woman', 'wearing', 'white'], 'raw': 'a woman wearing white.', 'sent_id': 1542, 'sent': 'a woman wearing white'}], 'file_name': 'COCO_train2014_000000546366_2150776.jpg', 'category_id': 1, 'ann_id': 2150776, 'sent_ids': [1541, 1542], 'ref_id': 45385}][{'image_id': 191994, 'split': 'train', 'sentences': [{'tokens': ['pizza', 'in', 'a', 'tray', 'ready', 'to', 'eat'], 'raw': 'pizza in a tray ready to eat', 'sent_id': 67556, 'sent': 'pizza in a tray ready to eat'}, {'tokens': ['a', 'sandwich', 'with', 'vegetables', 'on', 'a', 'white', 'bread', 'in', 'a', 'carrier'], 'raw': 'A sandwich with vegetables on a white bread in a carrier.', 'sent_id': 67557, 'sent': 'a sandwich with vegetables on a white bread in a carrier'}], 'file_name': 'COCO_train2014_000000191994_1539809.jpg', 'category_id': 51, 'ann_id': 1539809, 'sent_ids': [67556, 67557], 'ref_id': 30964}]\n",
      "\n",
      "[{'image_id': 239803, 'split': 'train', 'sentences': [{'tokens': ['a', 'teen', 'in', 'a', 'black', 'coat', 'to', 'the', 'right', 'of', 'two', 'other', 'teens'], 'raw': 'A teen in a black coat to the right of two other teens.', 'sent_id': 64376, 'sent': 'a teen in a black coat to the right of two other teens'}, {'tokens': ['a', 'young', 'gentleman', 'wearing', 'a', 'black', 'leather', 'jacket'], 'raw': 'A young gentleman wearing a black leather jacket', 'sent_id': 64377, 'sent': 'a young gentleman wearing a black leather jacket'}], 'file_name': 'COCO_train2014_000000239803_2166462.jpg', 'category_id': 1, 'ann_id': 2166462, 'sent_ids': [64376, 64377], 'ref_id': 29734}]\n",
      "[{'image_id': 235646, 'split': 'train', 'sentences': [{'tokens': ['the', 'giraffe', 'whose', 'head', 'is', 'not', 'visible'], 'raw': 'The giraffe whose head is not visible', 'sent_id': 50286, 'sent': 'the giraffe whose head is not visible'}, {'tokens': ['body', 'of', 'a', 'giraffe', 'stading', 'to', 'the', 'upper', 'right', 'of', 'the', 'group', 'against', 'the', 'fence'], 'raw': 'Body of a giraffe stading to the upper right of the group against the fence', 'sent_id': 50287, 'sent': 'body of a giraffe stading to the upper right of the group against the fence'}], 'file_name': 'COCO_train2014_000000235646_1414611.jpg', 'category_id': 25, 'ann_id': 1414611, 'sent_ids': [50286, 50287], 'ref_id': 24303}][{'image_id': 176385, 'split': 'train', 'sentences': [{'tokens': ['there', 'is', 'nobody', 'riding', 'this', 'skateboard'], 'raw': 'There is nobody riding this skateboard.', 'sent_id': 24280, 'sent': 'there is nobody riding this skateboard'}, {'tokens': ['a', 'skateboard', 'alone', 'on', 'the', 'ground'], 'raw': 'A skateboard alone on the ground.', 'sent_id': 24281, 'sent': 'a skateboard alone on the ground'}], 'file_name': 'COCO_train2014_000000176385_645613.jpg', 'category_id': 41, 'ann_id': 645613, 'sent_ids': [24280, 24281], 'ref_id': 14373}]\n",
      "\n",
      "[{'image_id': 131007, 'split': 'train', 'sentences': [{'tokens': ['black', 'chair', 'in', 'corner'], 'raw': 'black chair in corner', 'sent_id': 98162, 'sent': 'black chair in corner'}, {'tokens': ['a', 'black', 'recliner', 'chair'], 'raw': 'a black recliner chair', 'sent_id': 98163, 'sent': 'a black recliner chair'}], 'file_name': 'COCO_train2014_000000131007_115747.jpg', 'category_id': 63, 'ann_id': 115747, 'sent_ids': [98162, 98163], 'ref_id': 42794}]\n",
      "[{'image_id': 155995, 'split': 'train', 'sentences': [{'tokens': ['a', 'child', 'baseball', 'player', 'throwing', 'a', 'pitch', 'to', 'a', 'batter'], 'raw': 'A child baseball player throwing a pitch to a batter.', 'sent_id': 15351, 'sent': 'a child baseball player throwing a pitch to a batter'}, {'tokens': ['the', 'pitcher'], 'raw': 'the pitcher', 'sent_id': 15352, 'sent': 'the pitcher'}], 'file_name': 'COCO_train2014_000000155995_525361.jpg', 'category_id': 1, 'ann_id': 525361, 'sent_ids': [15351, 15352], 'ref_id': 10939}][{'image_id': 514025, 'split': 'train', 'sentences': [{'tokens': ['a', 'large', 'blue', 'and', 'white', 'crane', 'standing', 'on', 'the', 'dock'], 'raw': 'a large blue and white crane standing on the dock', 'sent_id': 43870, 'sent': 'a large blue and white crane standing on the dock'}, {'tokens': ['a', 'bird', 'that', 'is', 'standing', 'on', 'the', 'dock', 'with', 'long', 'legs', 'and', 'a', 'scrunched', 'up', 'neck'], 'raw': 'A bird that is standing on the dock with long legs and a scrunched up neck.', 'sent_id': 43871, 'sent': 'a bird that is standing on the dock with long legs and a scrunched up neck'}], 'file_name': 'COCO_train2014_000000514025_36534.jpg', 'category_id': 16, 'ann_id': 36534, 'sent_ids': [43870, 43871], 'ref_id': 21856}]\n",
      "[{'image_id': 485705, 'split': 'train', 'sentences': [{'tokens': ['middle', 'banana', 'in', 'the', 'bunch'], 'raw': 'middle banana in the bunch', 'sent_id': 30403, 'sent': 'middle banana in the bunch'}, {'tokens': ['the', 'bottom', 'banana', 'in', 'the', 'right', 'hand', 'picture'], 'raw': 'the bottom banana in the right hand picture', 'sent_id': 30404, 'sent': 'the bottom banana in the right hand picture'}], 'file_name': 'COCO_train2014_000000485705_1043190.jpg', 'category_id': 52, 'ann_id': 1043190, 'sent_ids': [30403, 30404], 'ref_id': 16660}]\n",
      "\n",
      "[{'image_id': 308758, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'wearing', 'a', 'chef', 'jacket'], 'raw': 'a man wearing a chef jacket', 'sent_id': 30897, 'sent': 'a man wearing a chef jacket'}, {'tokens': ['man', 'preparing', 'a', 'dish'], 'raw': 'Man preparing a dish', 'sent_id': 30898, 'sent': 'man preparing a dish'}], 'file_name': 'COCO_train2014_000000308758_196341.jpg', 'category_id': 1, 'ann_id': 196341, 'sent_ids': [30897, 30898], 'ref_id': 16860}][{'image_id': 54194, 'split': 'train', 'sentences': [{'tokens': ['a', 'lady', 'with', 'black', 'long', 'hair', 'in', 'a', 'yellow', 'shirt', ',', 'putting', 'butter', 'on', 'a', 'bread'], 'raw': 'a lady with black long hair in a yellow shirt, putting butter on a bread', 'sent_id': 52248, 'sent': 'a lady with black long hair in a yellow shirt , putting butter on a bread'}, {'tokens': ['a', 'woman', 'in', 'yellow', 'with', 'a', 'knife', 'in', 'her', 'hand', 'buttering', 'her', 'sub', 'sandwich'], 'raw': 'A woman in yellow with a knife in her hand buttering her sub sandwich.', 'sent_id': 52249, 'sent': 'a woman in yellow with a knife in her hand buttering her sub sandwich'}], 'file_name': 'COCO_train2014_000000054194_233992.jpg', 'category_id': 1, 'ann_id': 233992, 'sent_ids': [52248, 52249], 'ref_id': 25093}]\n",
      "\n",
      "[{'image_id': 563447, 'split': 'train', 'sentences': [{'tokens': ['the', 'kid', 'wearing', 'glasses'], 'raw': 'the kid wearing glasses', 'sent_id': 46806, 'sent': 'the kid wearing glasses'}, {'tokens': ['a', 'short', 'girl', 'standing', 'next', 'to', 'a', 'short', 'horse', 'wearing', 'a', 'belt', 'buckle', 'and', 'glasses'], 'raw': 'A short girl standing next to a short horse wearing a belt buckle and glasses', 'sent_id': 46807, 'sent': 'a short girl standing next to a short horse wearing a belt buckle and glasses'}], 'file_name': 'COCO_train2014_000000563447_186920.jpg', 'category_id': 1, 'ann_id': 186920, 'sent_ids': [46806, 46807], 'ref_id': 22985}]\n",
      "[{'image_id': 404592, 'split': 'train', 'sentences': [{'tokens': ['a', 'man', 'sitting', 'on', 'a', 'couch', 'between', 'two', 'other', 'people'], 'raw': 'A man sitting on a couch between two other people.', 'sent_id': 37227, 'sent': 'a man sitting on a couch between two other people'}, {'tokens': ['a', 'man', 'with', 'black', 'hair', 'wearing', 'a', 'black', 'shirt', 'and', 'holding', 'an', 'apple', 'laptop', 'between', 'a', 'man', 'and', 'a', 'woman'], 'raw': 'A man with black hair wearing a black shirt and holding an apple laptop between a man and a woman.', 'sent_id': 37228, 'sent': 'a man with black hair wearing a black shirt and holding an apple laptop between a man and a woman'}], 'file_name': 'COCO_train2014_000000404592_203428.jpg', 'category_id': 1, 'ann_id': 203428, 'sent_ids': [37227, 37228], 'ref_id': 19287}]\n",
      "[{'image_id': 36041, 'split': 'train', 'sentences': [{'tokens': ['a', 'girl', 'uitting', 'the', 'bike', 'with', 'boy', 'friend'], 'raw': 'A GIRL UITTING THE BIKE WITH BOY FRIEND', 'sent_id': 75019, 'sent': 'a girl uitting the bike with boy friend'}, {'tokens': ['the', 'girl', 'on', 'the', 'red', 'scooter'], 'raw': 'The girl on the red scooter', 'sent_id': 75020, 'sent': 'the girl on the red scooter'}], 'file_name': 'COCO_train2014_000000036041_199362.jpg', 'category_id': 1, 'ann_id': 199362, 'sent_ids': [75019, 75020], 'ref_id': 33798}]\n",
      "[{'image_id': 58105, 'split': 'train', 'sentences': [{'tokens': ['upside', 'down', 'chair'], 'raw': 'upside down chair', 'sent_id': 15047, 'sent': 'upside down chair'}, {'tokens': ['the', 'upside', 'down', 'chair'], 'raw': 'The upside down chair.', 'sent_id': 15048, 'sent': 'the upside down chair'}], 'file_name': 'COCO_train2014_000000058105_1587145.jpg', 'category_id': 62, 'ann_id': 1587145, 'sent_ids': [15047, 15048], 'ref_id': 10822}]\n",
      "[{'image_id': 309386, 'split': 'train', 'sentences': [{'tokens': ['a', 'food', 'on', 'tabule'], 'raw': 'a food on tabule', 'sent_id': 62106, 'sent': 'a food on tabule'}, {'tokens': ['a', 'table', 'with', 'pizza', 'slices', 'and', 'beer', 'on', 'it'], 'raw': 'A table with pizza slices and beer on it.', 'sent_id': 62107, 'sent': 'a table with pizza slices and beer on it'}], 'file_name': 'COCO_train2014_000000309386_1091316.jpg', 'category_id': 67, 'ann_id': 1091316, 'sent_ids': [62106, 62107], 'ref_id': 28845}][{'image_id': 419062, 'split': 'train', 'sentences': [{'tokens': ['a', 'medium', 'elephant', 'on', 'the', 'left'], 'raw': 'a medium elephant on the left', 'sent_id': 73909, 'sent': 'a medium elephant on the left'}, {'tokens': ['elephant', 'on', 'shore'], 'raw': 'elephant on shore', 'sent_id': 73910, 'sent': 'elephant on shore'}], 'file_name': 'COCO_train2014_000000419062_580921.jpg', 'category_id': 22, 'ann_id': 580921, 'sent_ids': [73909, 73910], 'ref_id': 33380}]\n",
      "\n",
      "[{'image_id': 325837, 'split': 'train', 'sentences': [{'tokens': ['a', 'glass', 'window', 'pain', 'behind', 'a', 'man', \"'\", 's'], 'raw': \"a glass window pain behind a man's\", 'sent_id': 68994, 'sent': \"a glass window pain behind a man ' s\"}, {'tokens': ['a', 'window', 'right', 'behind', 'the', 'man', \"'\", 's', 'head'], 'raw': \"a window right behind the man's head\", 'sent_id': 68995, 'sent': \"a window right behind the man ' s head\"}], 'file_name': 'COCO_train2014_000000325837_1732077.jpg', 'category_id': 1, 'ann_id': 1732077, 'sent_ids': [68994, 68995], 'ref_id': 31514}]\n",
      "[{'image_id': 258727, 'split': 'train', 'sentences': [{'tokens': ['a', 'sheep', 'eating', 'grass', 'facing', 'away', 'from', 'the', 'camera', 'and', 'closer', 'to', 'the', 'building'], 'raw': 'A sheep eating grass facing away from the camera and closer to the building.', 'sent_id': 95715, 'sent': 'a sheep eating grass facing away from the camera and closer to the building'}, {'tokens': ['there', 'is', 'one', 'sheep', 'is', 'eating', 'grass', 'infront', 'of', 'a', 'home'], 'raw': 'There is one sheep is eating grass infront of a home', 'sent_id': 95716, 'sent': 'there is one sheep is eating grass infront of a home'}], 'file_name': 'COCO_train2014_000000258727_62432.jpg', 'category_id': 20, 'ann_id': 62432, 'sent_ids': [95715, 95716], 'ref_id': 41806}]\n",
      "[{'image_id': 15262, 'split': 'train', 'sentences': [{'tokens': ['a', 'fork', 'on', 'a', 'plate'], 'raw': 'A fork on a plate', 'sent_id': 21138, 'sent': 'a fork on a plate'}, {'tokens': ['a', 'silver', 'fork'], 'raw': 'a silver fork', 'sent_id': 21139, 'sent': 'a silver fork'}], 'file_name': 'COCO_train2014_000000015262_1889611.jpg', 'category_id': 48, 'ann_id': 1889611, 'sent_ids': [21138, 21139], 'ref_id': 13163}]\n",
      "[{'image_id': 62336, 'split': 'train', 'sentences': [{'tokens': ['the', 'man', 'in', 'the', 'black', 'pullover', 'jacket', 'sitting', 'on', 'the', 'right'], 'raw': 'the man in the black pullover jacket sitting on the right', 'sent_id': 36534, 'sent': 'the man in the black pullover jacket sitting on the right'}, {'tokens': ['a', 'man', 'in', 'a', 'black', 'jacket', 'with', 'his', 'eyes', 'closed', ',', 'drinking', 'from', 'a', 'glass', 'of', 'wine'], 'raw': 'A man in a black jacket with his eyes closed, drinking from a glass of wine', 'sent_id': 36535, 'sent': 'a man in a black jacket with his eyes closed , drinking from a glass of wine'}], 'file_name': 'COCO_train2014_000000062336_1716597.jpg', 'category_id': 1, 'ann_id': 1716597, 'sent_ids': [36534, 36535], 'ref_id': 19021}]\n",
      "torch.Size([8, 3, 480, 480])\n",
      "torch.Size([8, 480, 480])\n",
      "torch.Size([8, 1, 20])\n",
      "torch.Size([8, 1, 20])\n",
      "tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])\n"
     ]
    }
   ],
   "source": [
    "# sample datas\n",
    "for i, (img, target, tensor_embeddings, attention_mask) in enumerate(data_loader):\n",
    "    print(img.shape)\n",
    "    print(target.shape)\n",
    "    print(tensor_embeddings.shape)\n",
    "    print(attention_mask.shape)\n",
    "    print(attention_mask[0])\n",
    "    break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAArIAAAIpCAYAAABe2NsAAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABy6UlEQVR4nO3deVwU9f8H8Ndwg7AoCAIJiEcioqbmgSeiIN6meWQGpmn5Q1MsMytT0/IoTSuPMvM2U/NASxQv8CIVMg+8xSO5FITlEFzZ+f1B7Nd1dzmWxd2F1/Px8JH7mc/MvGdmqVfDZz4jiKIogoiIiIjIyJjouwAiIiIiIm0wyBIRERGRUWKQJSIiIiKjxCBLREREREaJQZaIiIiIjBKDLBEREREZJQZZIiIiIjJKDLJEREREZJQYZImIiIjIKDHIEhkRQRDK/cff3x8AcPToUaXP1dGsWbMgCAJmzZql71JeGJlMhjVr1mDgwIHw8PCAtbU1bGxsUL9+fbz++uvYtGkTnjx5ou8yjcratWshCAJGjRql71KIqj0zfRdARGUXGhqq0paSkoL9+/drXO7t7V3pdZHuCYIAAKjIW8Tj4+Px+uuvIzExEYIgoEWLFmjbti1MTExw+/Zt7Nq1C7///js+/fRTJCQkwMbGRlflExG9EAyyREZk7dq1Km1Hjx5VBFl1y6l6io+PR+fOnZGXl4e+ffviu+++g5eXl1KfBw8e4Ntvv8WiRYvw5MkTBtkyeu2119C+fXvY29vruxSiao9BloioipHJZBgyZAjy8vIwcOBA/P777zAxUR1J5uTkhK+++gqvvfYaLC0t9VCpcbK3t2eIJTIQHCNLVA3JZDIsWLAATZs2hbW1NRwdHTFo0CBcvnxZpe/t27chCALq1auHwsJCLF68GC1btoStra3i19/F9u/fj759+8LZ2RkWFhZwc3PDsGHDcPbsWbV11KtXD4Ig4Pbt22qXjxo1CoIgqL3TnJubixkzZqBRo0awtLSEm5sbRo8ejfv375dpLOyDBw8QFhYGd3d3WFhYwN3dHRMnTkRmZqZK32fHRKanpyMsLAweHh6wtLSEp6cnwsPD8ejRoxLXU+fZc1usuPZiz4951nSunrV582bcunULFhYWWLFihdoQ+6w2bdrA2tpaqS0vLw/z589Hq1atYGdnBxsbGzRt2hSfffaZ2mN99ljkcjm+++47NG/eHDY2NnB1dcV7772HjIwMAEBBQQHmzJkDb29vWFtbw83NDZMmTUJubq7Kdp+9lnfu3EFISAhcXV1hZWWFl19+GbNmzcLjx49V1pPJZNi4cSPefPNNeHt7QyKRwNraGo0bN8b777+PpKQktefC398fgiDg6NGjOHbsGPr16wcnJyeYmJgovoclXdeDBw+iX79+qFOnDszNzVGrVi00atQII0eORExMjEr/p0+fYuXKlejQoQPs7e1hZWWFRo0a4f3338f9+/fV1lj8XQCA33//HZ06dYJEIkGNGjXQsWNH/Pnnn2rXI6qKeEeWqJqRyWTo3bs3Tp48iS5duqBJkyY4ffo0du7ciSNHjuDvv/9WClbFRFHEoEGDEBkZic6dO6NJkya4dOmSYvmMGTMwd+5cCIKADh06wMPDA5cvX8bWrVvx+++/46effsLo0aN1cgy5ubno1q0bzpw5A1tbWwQFBcHa2hqRkZH4448/0Lt37xLXv3fvHlq1agWZTIaOHTsiPz8fJ06cwA8//IC//voLJ06cgLm5ucp6jx49Qrt27ZCenq4UeJYsWYJ9+/bh2LFjcHJyqtCxvfLKKwgNDcW6desAqI57trW1LXUbu3fvBgD07NkTLi4u5a4hIyMD3bt3x7lz5yCRSBAQEABzc3NER0fjyy+/xObNm3H48GG13xMAGDlyJHbt2oWuXbuiQYMGOHnyJH788UecPn0ax44dQ3BwMM6fPw9/f380atQIx44dw3fffYfr169rDGGJiYlo3bo1zMzM0KVLFzx+/BhHjhzB7NmzcfDgQRw8eBBWVlaK/qmpqXjrrbdgb2+PJk2aoHnz5sjNzcW5c+fw/fffY8uWLTh58iQaNmyodn/btm3DypUr4e3tjR49eiAjI6PUu9br1q3D22+/DQBo27YtunXrhsePH+Pff//Fli1bULt2bXTp0kXRv6CgAH379lXU3q1bN0gkEpw8eRLff/89fv31V+zfvx+tWrVSu7+ZM2dizpw56NChA3r37o0rV67g5MmT6Nu3L37//Xe89tprJdZLVCWIRGTUjhw5IgIQS/txfrZfy5YtxeTkZMWyx48fiz179hQBiOPGjVNaLzExUbFe3bp1xatXr6pse9++fSIA0crKSjxw4IDSsp9//lkEIJqbm4sXL15UWubp6SkCEBMTE9XWHBoaKgIQ16xZo9QeHh4uAhB9fHzEpKQkpeN4/fXXFfXOnDlTab2ZM2cqlo0aNUrMz89XLLt796740ksviQDEzZs3K623Zs0axXrt27cX09PTFcsePXokdujQQQQgDh8+XO16oaGhao+v+Nx6enqqLCvLNdXE3d1dBCB+8cUXWq0/bNgwEYDYrl078eHDh4r27OxssVevXiIAsUOHDkrrPPs9adCggXj79m3FsocPH4qNGjUSAYjNmjUT27Ztq7TdW7duibVq1RIBiMePH1fa7rPXbMCAAWJeXp5i2b1798SXX35ZBCB+/PHHSutJpVJx9+7dYkFBgVL7kydPxOnTp4sAxN69e6sce9euXRX7W7Zsmdrzo+m6enl5iQDEY8eOqayTmpoqxsfHK7VNmzZNcb6e/Rl48uSJOGbMGBGA6OXlpXIMxfXVrFlTjI2NVVpWfL5efvlltbUTVTUMskRGrrxBVhAE8dy5cyrLY2NjRQBi/fr1ldqfDSjr169Xu+3u3buLAMQpU6aoXd63b18RgDh27Fildm2CbF5enmhraysCEPfv36+yTlpammhjY1NikK1bt66Ym5ursu78+fNFAOLo0aOV2p8Nsn///bfKeufPnxcFQRBNTEzEe/fuqaz3ooOslZWVCEBcuXJlude9c+eOaGJiIgqCIP7zzz8qy//991/F9k+cOKFof/Z78scff6ist3jxYsX378KFCyrLJ06cKAIQZ8+erdRefM2sra2V/uer2J49e0QAokQiER8/flzm43RzcxNNTExEqVSq1F4cZAMCAjSuq+m62tjYiPb29mXa/+PHjxXf44iICJXlubm5Yp06dUQA4qZNm5SWFZ/n7777TmW9/Px80d7eXgQg3r17t0y1EBkzjpElqmY8PDzQokULlfYmTZoAgMZxeQAwePBglbanT5/ixIkTAKBxLOiYMWMAAEeOHClvuSri4uKQk5OD2rVrIygoSGW5k5MTAgMDS9xG9+7d1T6hX9o5aNGiBV555RWV9mbNmqFly5aQy+Vqx0Eak5iYGMjlcrRs2RLNmzdXWf7SSy+hZ8+eANRfTzMzM7XXpVGjRgCKvn++vr4al2sauxoUFKR2mETfvn3h6OgIqVSK+Ph4leX//PMPFi9ejIkTJ2L06NEYNWoURo0ahadPn0Iul+PGjRtq9/f666+rbS9J27ZtkZWVhZCQEMTFxUEul2vse/bsWeTk5MDBwQH9+vVTWW5jY4Phw4cD0Pxzo249S0tL1K9fH0DJP8tEVQXHyBJVMx4eHmrbJRIJgKJxe+o4OzurDX/p6enIz88HAJXpnYo1aNAAgG7+w/rvv/8CgMbxmaUtA0o/B8XH8zxNx1e8LD4+XlGfPjk5OeHevXtIS0sr97rF16ikYy3perq6usLMTPU/LcVjezWdezs7OwDanft69eohPT1d6dzn5ubirbfews6dOzWuBwBSqVTjNstr+fLl6Nu3LzZs2IANGzbAzs4Obdq0QUBAAN566y2lY6/oeQa0/x4TVSW8I0tUzZT2BLsmzz/V/iKUdEfr+RkTyroM0P4clIVYjhcYlHR8FdG6dWsAwJkzZypl+yUp7dy+qHM/ffp07Ny5E97e3ti1axfu37+PgoICiEVD6uDn56eyzrO0+b43adIEV69exR9//IEPPvgAvr6+OHbsGD777DM0atQIGzdu1O7ANKjMc0lkLPhTQEQV4ujoqHia+9atW2r7FLe/9NJLSu0WFhYAgOzsbLXr3blzR6WteBslTUNVlimqtJGYmFjqPuvWrato0+b4dGHAgAEAiqZDS01NLde6xedX07V8dtnz17Mylffcb926FQDw22+/YcCAAXBzc1NcDwC4fv16pdRpZmaG3r1745tvvsHJkyfx8OFDzJw5E0+ePMG7776rmGKs+NyVdFz6OM9ExoZBlogqxMzMDJ06dQKg+c1iv/zyCwCgW7duSu3F/4FWN39tSkqK2jGPrVu3ho2NDR48eICDBw+qLH/48CGioqLKdQxldf78eZw/f16l/dKlS4iPj4eJiYnS9ErFx3flyhW12/vjjz807qt4+q+nT5+Wu84333wT9erVw5MnTzB+/PhS7/zGxcUp5mLt0qULTExMcO7cOfzzzz8qfZOTkxEZGQlA9XpWpgMHDqgdKvHnn38iPT0ddnZ2ijvRABRz1np6eqqss3//fjx8+LDyin2GRCLBrFmzULNmTeTl5eHatWsAgFdffRW2trbIyMhARESEynqPHz/Gli1bALzY80xkbBhkiajCPvjgAwDAihUrcOjQIaVla9euRUREBMzNzTFp0iSlZT169AAALFiwQOlFBA8ePEBISAhycnJU9mVjY4N33nkHABAeHq50x7GgoAATJkxQO7G+LoiiiPHjxyu9ECArKwvjx4+HKIoYPHgw3N3dFcvatm0LiUSChIQEbNiwQWlb27Ztw3fffadxX8V3F5+dq7eszM3NsXXrVlhZWWHnzp0YOHCg2jt/GRkZmDFjBjp27KgYG+3h4YEhQ4ZAFEW8++67SE9PV/TPzc3FuHHjkJ+fjw4dOqBDhw7lrk1bjx8/xvjx45VefpCUlKT47r333ntK88gWP7j3/fffK23n6tWreO+993ReX15eHhYvXowHDx6oLDt27BgyMzNhamqquK5WVlYICwsDUPTz8+zdeZlMhkmTJiElJQVeXl5aPXhGVF3wYS8iqrBevXrhs88+w9y5cxEYGIiOHTvCw8MDV65cQXx8PExNTbFy5Uo0bdpUab2wsDCsWrUK8fHxaNy4Mfz8/JCbm4szZ87Aw8MDAwcOxK5du1T29+WXX+LEiROIi4tDw4YNERAQACsrKxw/fhxPnjxRvFDg2V8l60L//v1x8eJF1K9fH926dVO8ECEjIwONGjXCDz/8oNTf2toas2fPRnh4OEJCQrBixQq89NJLuHz5MhISEvDZZ59hzpw5avc1ePBgfPPNN+jRowcCAgIUD0MtWLAAjo6Opdbapk0bxMTEYMiQIdizZw/27t2Lli1bon79+jAxMcGdO3dw9uxZFBYWon79+kqT/S9btgxXrlzBX3/9hQYNGqBbt24wMzNDdHQ0Hjx4AC8vL2zatKkCZ7L8QkJCsHfvXtSvXx+dO3dGfn4+Dh8+jNzcXPj5+WH27NlK/WfOnInXX38dM2bMwNatW9G0aVOkpaXh2LFj6Ny5M9zc3HDy5Emd1ffkyRN88MEHmDp1Kpo1a4ZGjRrB3Nwct2/fRmxsLADg008/VXphxuzZs3H27FkcOnQITZo0Qbdu3WBnZ4dTp07h7t27cHR0xLZt23T+PSaqSnhHloh0Ys6cOdi3bx969eqleKNXUlIShgwZgpMnT6p9q1fNmjVx4sQJhISEAAD27duHmzdvYty4cTh58qTG99nb2tri6NGj+OSTT+Ds7IzIyEjExMSge/fuiIuLg6mpKQCgdu3aOj3GWrVqITY2FsOGDcOZM2ewd+9e1KhRA++//z5iY2Ph7Oysss7kyZOxbt06tGrVCn///TcOHDiAOnXq4MCBAyW+6WzOnDn46KOPULNmTezatQurV6/G6tWrNY63VadNmza4du0afv75Z/Tt2xepqanYs2cPIiIikJaWhtdeew2//vorrly5ovRwk6OjI06ePIl58+bBy8sLBw4cwN69e1G7dm188skniIuL0+qp/orw8vLC2bNn0a1bN8TExGD//v1wdXXF559/joMHD6o8nDVo0CBER0eje/fuSE5OVhzzrFmzsG/fPrVvbqsIW1tbrFy5EsOGDUNBQQGioqKwa9cupKWlYdCgQTh06JBK2La0tERkZCSWL1+OFi1a4NixY9i5cyfMzc0xceJE/PPPP0rDJYhIlSCW5xFbIiIDJ5PJ4Ovri2vXriEuLk7j6z3LY+3atXj77bcRGhqqcRwwVY5Zs2Zh9uzZmDlzJmbNmqXvcojIwPCOLBEZJXUTzufk5GDChAm4du0amjdvrpMQS0REhotjZInIKA0ePBh5eXlo1qwZnJ2dkZaWhnPnziEjIwMODg68c0pEVA3wjiwRGaUpU6agadOmSEhIwM6dO3Hq1Ck4Ozvj/fffx7lz59CyZUt9l0hERJWMY2SJiIiIyCjxjiwRERERGSUGWSIiIiIyStXuYS+5XI6kpCTY2dlBEAR9l0NEREREzxFFEdnZ2XBzc4OJieb7rtUuyCYlJSm9QpKIiIiIDNO9e/cUr3ZWp9oF2eLXPN67dw8SiUTP1dCLJJPJcODAAQQFBen8rT5EusLvqeHgtdCsqp4bYzwuQ625onVJpVK4u7srcpsm1S7IFg8nkEgkDLLVjEwmg42NDSQSiUH9sBM9i99Tw8FroVlVPTfGeFyGWrOu6iptGCgf9iIiIiIio8QgS0RERERGiUGWiIiIiIwSgywRERERGSUGWSIiIiIyStVu1gIiIiJ6cURRRGFhIZ4+farvUkolk8lgZmaG/Px8FBYW6rucMjHUmtXVZWZmBlNTU52+kIpBloiIiHROFEVkZmbiwYMHBhWwSiKKIlxcXHDv3j2jefunodasqS5TU1M4OzvD3t5eJ/UyyBIREZHOpaSkIDMzUzFvu5mZmUEFLXXkcjlycnJga2tb4mtRDYmh1vx8XaIo4unTp5BKpUhOTsbjx4/h6upa4f0wyBIREZFOFRYWIisrC05OTqhdu7a+yykzuVyOJ0+ewMrKyqBCYUkMtWZNddnZ2cHS0hIPHz6Es7MzTE1NK7QfwzliIiIiqhJkMhlEUUSNGjX0XQoZoBo1akAURchksgpvi0GWiIiIKoWhDyUg/dDl94JBloiIiIiMEoMsERERERklPuxVmeSFwJ2TQE4qYFsH8OwAmFRsUDMRERERFeEd2cqSEAEs8QXW9QV+H1P0zyW+Re1EREREpRg1alSFxpPevn0bgiBg1qxZuiuqBLNmzYIgCLh9+/YL2R/AIFs5EiKArSGANEm5XZpc1M4wS0REVOVIpVLMmTMHrVq1gp2dHWxsbODj44OpU6ciNTVV3+VVSQYbZOfPnw9BEDB58mRFW35+PsLCwuDo6AhbW1sMHjzY8L4Y8kIgchoAUc3C/9oiPy7qR0RERFXCtWvX0KJFC8ycORP169fH/PnzsWTJErRv3x5Lly5F06ZNcerUqXJtc9WqVXj8+LHWNXl6euLx48f47LPPtN6GoTPIIHvmzBn8+OOPaN68uVJ7eHg49uzZg23btiE6OhpJSUkYNGiQnqrU4M5J1TuxSkRAer+oHxEREZVboVzEqZvp2H3uPk7dTEehXN3NoxcnLy8P/fr1w/3797Fnzx5s374dYWFhGDduHH755RecPHkST58+xYABA0q9ASeKInJycgAA5ubmsLKy0rouQRBgZWUFM7Oq+0iUwQXZnJwcvPnmm1i1ahVq1aqlaM/KysLq1auxePFiBAQEoHXr1lizZg1OnjyJ2NhYPVb8nJwy3iEuaz8iIiJSiLyYjE4LDuONVbGYtOUc3lgVi04LDiPyYrLealq9ejWuXbuGyZMno0+fPirLX331VXz11Vd48OABvv76a0X70aNHIQgC1q5di2XLlsHHxwdWVlb45ptvAGgeIxsdHQ0/Pz9YW1vDzc0NH3/8MS5duqQyHlbdGNln2/bu3Ys2bdrAysoKrq6umDp1Kp4+faq0r9OnT2PUqFF4+eWXYWNjAzs7O3Ts2BE7d+6s4FnTDYOL6GFhYejTpw969OiBuXPnKtrj4uIgk8nQo0cPRZu3tzc8PDxw6tQptG/fXu32CgoKUFBQoPgslUoBFL11RBdvlFBh7QyYlOH/nqydgcrYP2lUfL0r5boT6Qi/p4aD10Kz0s5N8Zu95HI55HK5zvYbeTEFYZv/Vhm8l5KVj/Eb47FsREsE+7povX1RFBX/LE/d27dvBwC88847GtcLCQnB5MmT8fvvv2PhwoUAoOi7ZMkSpKen45133kGdOnXg7u4OuVyuqOfZbR4/fhxBQUGoVasWpk2bBnt7e2zZsgVvv/22Su3F/1TX9ueff2L58uV49913MWrUKEREROCbb75BzZo1MX36dMX+duzYgStXrmDIkCHw8PBARkYG1q9fj0GDBmHDhg0YMWKEyvl7tnZ157J4uUwm0/iK2rL+3BlUkN2yZQvi4+Nx5swZlWUpKSmwsLBAzZo1ldrr1KmDlJQUjducN28eZs+erdJ+4MAB2NjYVLhmtVr8VHqfS4+AS39Wzv6pRFFRUfougahU/J4aDl4LzTSdGzMzM7i4uCAnJwdPnjzRyb4K5SJm77mk8QkUAcDsPZfQrq41TE0q9uao7OzscvW/cOEC7Ozs4OzsrLhhpk6jRo2QkJCApKQk2NraIi8vDwBw584dnD59Gk5OToq+UqlUEeae3WZ4eDgEQUBkZCTq1asHABg5ciT69u0LoOgGXnH/4iEK6touXbqEU6dOwcPDAwAwYsQIdOjQAd9//z3CwsIU+5s4cSI+/vhjpeMIDQ1F165dMXfuXMV+i/dTvI/ic6juXD558gSPHz9GTEyMyh3gYsXnpjQGE2Tv3buHSZMmISoqqkLjQZ43ffp0TJkyRfFZKpXC3d0dQUFBkEgkOtuPkit/Ajvf/e/Dsz9y//1gvfYj4N27cvZNGslkMkRFRSEwMBDm5ub6LodILX5PDQevhWalnZv8/Hzcu3cPtra2OvtveuytdKRmaw7FIoDU7Ce4mvEU7es7arUPURSRnZ0NOzu7ck17lZ2dDRcXl1JzRfGQSVEUIZFIFDfUQkJC0KBBA5X+xee2eLupqamIj4/HkCFDFM8RFdccHh6ON954A5aWlor+tra2AKC2bcCAAfD19VXaX/fu3bFs2TKYmJgo+j17THl5eXj8+DHMzMzQvXt3/Pjjj0p9LC0tFfuws7PTeC7z8/NhbW2NLl26aPx+lPQ/BM8ymCAbFxeHtLQ0tGrVStFWWFiImJgY/PDDD9i/fz+ePHmCzMxMpbuyqampcHHR/GsES0tLxYl9lrm5eeX9i6nZAMBUKJq94NkHvyQvAcHzAZ/+lbNfKpNKvfZEOsLvqeHgtdBM07kpLCyEIAgwMTGBiYluHsd5kFO2O7sPcp5ovc/iX4EX115WEokEUqm01HWKw1mtWrWUzk3jxo3VrlscAIuX3blzB0DR0MrituKaGzdurFJ78T/VtTVo0EBln7Vr1wYAPHr0SBFO09LS8Nlnn2H37t1IS0tTe0zFuezZeov/ru5cFi8v6WerrD9zBhNku3fvjgsXLii1vf322/D29sa0adPg7u4Oc3NzHDp0CIMHDwYAXL16FXfv3oWfn58+Si6ZT3/Auw/f7EVERKQDznZlu7Nb1n665Ovri5iYGNy4cQMNGzZU2ycvLw9XrlxBvXr1FHc7i1XaUMcSaBqbCiiPFQ4KCsLly5cxadIkvPrqq7C3t4epqSnWrFmDzZs363QMtDYMJsja2dmp3OKuUaMGHB0dFe1jxozBlClT4ODgAIlEgokTJ8LPz0/jg156Z2IKeHXWdxVERERGr62XA1ztrZCSla92nKwAwMXeCm29HF50aRg0aBBiYmLw888/Y/78+Wr7rF+/HjKZrELThhaPib169arKMnVtFXX+/Hn8888/+Pzzz1WeN/r55591vj9tGNz0WyX59ttv0bdvXwwePBhdunSBi4sLduzYoe+yiIiIqJKZmgiY2c8HgOKJE4XizzP7+VT4QS9tvPPOO2jYsCEWL16MyMhIleXx8fGYPn06nJycMHXqVK334+LigldffRW7d+/GrVu3FO0ymQzff/+91tvVpPiubfEd2mIXL17k9FtlcfToUaXPVlZWWLZsGZYtW6afgoiIiEhvgn1dsWJkK8zek4DkrHxFu4u9FWb280Gwr6te6qpRowYiIiIQHByMPn36YPDgwfD394eZmRlOnz6NDRs2wNbWFrt27SrxuZ6y+OabbxAYGIgOHTrg//7v/yCRSPDrr78qje/VlSZNmqBp06ZYuHAh8vLy0LhxY1y7dg0//vgjmjVrhri4OJ3tS1sGHWSJiIiInhXs64pAHxecTsxAWnY+nO2KhhPo407ss5o0aYLz589j6dKl2LFjB/78808UFhbC09MTEydOxIcffljhEAsAXbt2RWRkJD755BN89dVXqFmzJgYOHIjQ0FB06NAB1tbWOjiaIqampvjjjz/w4YcfYt26dcjNzYWvry/WrVuHf/75xyCCrCA+f7+4ipNKpbC3t0dWVlblTb9FBkkmk+HPP/9E7969+QQyGSx+Tw0Hr4VmpZ2b/Px8JCYmwsvLS6dTalY2uVwOqVQKiUSis9kWKltxzVFRURg6dCh+/fVXDB8+XN9llXguy/L9KGteM46rREREREQQRRH5+flKbTKZDEuWLIGZmRn8/f31U5iecGgBERERkZEoKCiAp6cn3nzzTTRu3BgPHz7Er7/+ikuXLmHatGk6Gb5gTBhkiYiIiIyEubk5+vTpg927dyM5ORmiKKJhw4b44YcflF4tW10wyBIREREZCVNTU/zyyy+Kz8+ORa2OOEaWiIiIiIwSgywRERERGSUGWSIiIiIySgyyRERERGSUGGSJiIiIyCgxyBIRERGRUWKQJSIiIiKjxCBLREREREaJQZaIiIiIjJJBBdkVK1agefPmkEgkkEgk8PPzw759+xTL/f39IQiC0p/33ntPjxUTERERFZFKpZgzZw5atWoFOzs72NjYwMfHB1OnTkVqaqq+y6uSDOoVtXXr1sX8+fPRqFEjiKKIdevWYcCAAfj777/RtGlTAMDYsWPxxRdfKNaxsbHRV7lEREREAIBr166hZ8+euHPnDgYNGoQxY8bA3NwcsbGxWLp0KdasWYM9e/bAz89P36VWKQYVZPv166f0+csvv8SKFSsQGxurCLI2NjZwcXHRR3lERERkCOSFwJ2TQE4qYFsH8OwAmJjqrZy8vDz069cP9+/fx549e9CnTx/FsnHjxuH//u//0KNHDwwYMAAXLlxAnTp1dLbv7OxsnW3LGBlUkH1WYWEhtm3bhtzcXKX/e9m0aRM2btwIFxcX9OvXDzNmzCjxrmxBQQEKCgoUn6VSKQBAJpNBJpNV3gGQwSm+3rzuZMj4PTUcvBaalXZuZDIZRFGEXC6HXC7X7c4v74Gw/2MI0iRFkyhxg9hzPtCkXwkrlk4URcU/y1P3zz//jGvXruHDDz9Er169VNZt1aoVvvzyS0yYMAELFy7E119/DQBYu3YtxowZg0OHDsHf319pnYCAANy+fRu3bt1StNWvXx/16tXDokWLMH36dPz1119wcHDA33//jcePH2PhwoXYsmUL7t27BwsLC7i7u6Nnz55YuHChlmdEeyWdS7lcDlEUIZPJYGqq/n9AyvpzJ4jFezIQFy5cgJ+fH/Lz82Fra4vNmzejd+/eAICffvoJnp6ecHNzw/nz5zFt2jS0bdsWO3bs0Li9WbNmYfbs2Srtmzdv5rAEIiKiSmBmZgYXFxe4u7vDwsJCZ9s1v7EPNnvHAxAhPNNe/Cmv7wrIGvbS2f7Kqk+fPjh58iTi4uJQv359tX3y8vJQr149uLq64p9//gFQlEXCwsKwZ88edOrUSal/3759cffuXZw/f17R1rx5c5iYmCAzMxMDBw7EK6+8gtzcXISFhWHixInYuHEjhg8fjrZt2+Lp06e4desWTp06haNHj1basWvjyZMnuHfvHlJSUvD06VO1ffLy8jBixAhkZWVBIpFo3JbBBdknT57g7t27yMrKwvbt2/Hzzz8jOjoaPj4+Kn0PHz6M7t2748aNG2jQoIHa7am7I+vu7o6HDx+WeGKo6pHJZIiKikJgYCDMzc31XQ6RWvyeGg5eC81KOzf5+fm4d+8e6tWrBysrK93sVF4I4bvmgDRJKcQWEyEAEjeI7/+j9TADURSRnZ0NOzs7CIK6vajn5OQEmUyGzMzMEvu98soruHDhArKysmBra6vVHdk7d+7gxx9/xDvvvKNUc/369dGuXTv88ccfZa67MpV0LvPz83H79m24u7tr/H5IpVLUrl271CBrcEMLLCws0LBhQwBA69atcebMGSxduhQ//vijSt927doBQIlB1tLSEpaWlirt5ubm/BdTNcVrT8aA31PDwWuhmaZzU1hYCEEQYGJiAhMTHU2QdOcE8MxwgucJEAHpfQj3YgGvzlrtovhX4MW1l5VUKoWLi0up6xQHsuzsbEgkEkX/ks7T8+0ODg4YM2aMor24Znt7eyQkJCAhIQG+vr5lrr2ylHQuTUxMIAhCiT9bZf2ZM6jpt9SRy+VKd1Sfde7cOQCAq6vrC6yIiIiIXricMk5fVdZ+OiSRSBTP4JSkuI+9vb3W+2rQoIHacaWLFy/Go0eP0KxZMzRo0ADvvPMOdu/erfsxygbGoILs9OnTERMTg9u3b+PChQuYPn06jh49ijfffBM3b97EnDlzEBcXh9u3byMiIgIhISHo0qULmjdvru/SiYiIqDLZlvFJ/7L20yFfX19IpVLcuHFDY5+8vDxcuXIF9erVg62tLQCUOHxB09hRTc/3DBgwALdv38aGDRsQEBCAQ4cOYeDAgfD398eTJ0/KcTTGxaCCbFpaGkJCQtC4cWN0794dZ86cwf79+xEYGAgLCwscPHgQQUFB8Pb2xgcffIDBgwdjz549+i6biIiIKptnB0DiBqgdIYuidslLRf1esEGDBgEomr1Ak/Xr10Mmkyn6AkXDBAAgIyNDpX9iYmK563BwcMDIkSOxatUq3Lp1Cx999BGOHTuG3bt3l3tbxsKgxsiuXr1a4zJ3d3dER0e/wGqIiIjIYJiYAsELgK0hKAqzzz6r/l+4DZ6vl/lk33nnHfzwww9YvHgx/P39ERwcrLQ8Pj4e06dPh5OTE6ZOnapof/nllwEABw8eVAq4v/76K5KSkuDp6Vmm/RcWFiIzM1MRjIGiu70tW7YEoD4oVxUGFWSJiIiINPLpDwxdD0ROU37wS+JWFGJ9+uulrBo1aiAiIgLBwcHo06cPBg8eDH9/f5iZmeH06dPYsGEDbG1tsWvXLqWXOjVu3Bg9evTAjz/+CFEU8corr+DcuXPYuXMnGjZsWOa5VHNycuDt7Y3+/fujZcuWcHZ2RmJiIlasWIFatWqpvHCqKmGQJSIiIuPh0x/w7mNQb/YCgCZNmuD8+fNYunQpduzYgT///BOFhYXw9PTExIkT8eGHH6p9M+mGDRswceJEbNq0CRs2bEDnzp1x5MgRjB8/Hrdv3y7Tvq2trTFp0iQcPnwYBw8eRE5ODlxdXdG/f39Mnz4dbm5uOj5aw8EgS0RERMbFxFTrKbYqk729PT7//HN8/vnnZV7HxcUF27ZtU2lX9xIDTcHWwsICX331le6mOjMi1e+IiYiIiKhKYJAlIiIiIqPEIEtERERERolBloiIiIiMEoMsERERERklBlkiIiIiMkoMslQ9yAuBO6eK/n7nVNFnIiKqVKIolt6Jqh1dfi8YZKnqS4gAlvgCm4cUfd48pOhzQoR+6yIiqqLMzc0hCAJyc3P1XQoZoNzcXAiCAHNz8wpviy9EoKotIeK/93KLgInV/9qlyUXtQ9fr7ZWGRERVlampKezt7fHgwQMUFBRAIpHAzMwMgiDou7QSyeVyPHnyBPn5+UbzcgFDrfn5ukRRxNOnTyGVSiGVSlGzZk2Ymlb8bWwMslR1yQuL3scNdb/CEAEIQOTHRa861POrDYmIqhoXFxdYW1sjLS0NUqlU3+WUiSiKePz4MaytrQ0+dBcz1Jo11WVqagpXV1fY29vrZD8MslR13TkJSJNK6CAC0vtF/QzwVYdERMZMEATUrFkT9vb2KCwsxNOnT/VdUqlkMhliYmLQpUsXnfza+0Uw1JrV1WVmZgZTU1OdBm4GWaq6clJ124+IiMpNEASYmZnBzMzwI4epqSmePn0KKysrgwqFJTHUml9UXYYzmIJI12zr6LYfERERGRQGWaq6PDsAEjcAmn6FIQCSl4r6ERERkdFhkKWqy8QUCF7w34fnw+x/n4Pn80EvIiIiI8UgS1WbT/+iKbYkrsrtEjdOvUVERGTkDH/kNVFF+fQvmmLr1gng0iNgxDagfkfeiSUiIjJyBnVHdsWKFWjevDkkEgkkEgn8/Pywb98+xfL8/HyEhYXB0dERtra2GDx4MFJT+cQ5lYGJKeDpV/R3Tz+GWCIioirAoIJs3bp1MX/+fMTFxeHs2bMICAjAgAEDcOnSJQBAeHg49uzZg23btiE6OhpJSUkYNGiQnqsmIiIiIn0wqKEF/fr1U/r85ZdfYsWKFYiNjUXdunWxevVqbN68GQEBAQCANWvWoEmTJoiNjUX79u31UTIRERER6YlBBdlnFRYWYtu2bcjNzYWfnx/i4uIgk8nQo0cPRR9vb294eHjg1KlTGoNsQUEBCgoKFJ+LX5Mnk8kgk8kq9yDIoBRfb153MmT8nhoOXgvNquq5McbjMtSaK1pXWdczuCB74cIF+Pn5IT8/H7a2tti5cyd8fHxw7tw5WFhYoGbNmkr969Spg5SUFI3bmzdvHmbPnq3SfuDAAdjY2Oi6fDICUVFR+i6BqFT8nhoOXgvNquq5McbjMtSata0rLy+vTP0MLsg2btwY586dQ1ZWFrZv347Q0FBER0drvb3p06djypQpis9SqRTu7u4ICgqCRCLRRclkJGQyGaKiohAYGGhQr/Ejeha/p4aD10KzqnpujPG4DLXmitZV/Bv00hhckLWwsEDDhg0BAK1bt8aZM2ewdOlSDBs2DE+ePEFmZqbSXdnU1FS4uLho3J6lpSUsLS1V2s3NzQ3qgtOLw2tPxoDfU8PBa6FZVT03xnhchlqztnWVdR2DmrVAHblcjoKCArRu3Rrm5uY4dOiQYtnVq1dx9+5d+Pn56bFCIiIiItIHg7ojO336dPTq1QseHh7Izs7G5s2bcfToUezfvx/29vYYM2YMpkyZAgcHB0gkEkycOBF+fn6csYCoIuSFwJ2TQE4qYFsH8OzAeXaJiMgoGFSQTUtLQ0hICJKTk2Fvb4/mzZtj//79CAwMBAB8++23MDExweDBg1FQUICePXti+fLleq6ayIglRACR0wBp0v/aJG5A8AK+vpeIiAyeQQXZ1atXl7jcysoKy5Ytw7Jly15QRURVWEIEsDUEgKjcLk0uah+6nmGWiIgMmsGPkSWiSiAvLLoT+3yIBf7XFvlxUT8iIiIDxSBLVB3dOak8nECFCEjvF/UjIiIyUAyyRNVRTqpu+xEREekBgyxRdWRbR7f9iIiI9IBBlqg68uxQNDsBBA0dBEDyUlE/IiIiA8UgS1QdmZgWTbEFQDXM/vc5eD7nkyUiIoPGIEtUXfn0L5piS+Kq3C5x49RbRERkFAxqHlkiesF8+gPeffhmLyIiMkoMskTVnYkp4NVZ31UQERGVG4cWEBEREZFRYpAlIiIiIqPEIEtERERERolBloiIiIiMEoMsERERERklBlkiIiIiMkoMskRERERklBhkiYiIiMgoMcgSERERkVFikCUiIiIio8QgS0RERERGiUGWiIiIiIwSgywRERERGSUGWSIiIiIySgyyRERERGSUGGSJiIiIyCgZVJCdN28e2rRpAzs7Ozg7O2PgwIG4evWqUh9/f38IgqD057333tNTxURERESkLwYVZKOjoxEWFobY2FhERUVBJpMhKCgIubm5Sv3Gjh2L5ORkxZ+FCxfqqWIiIiIi0hczfRfwrMjISKXPa9euhbOzM+Li4tClSxdFu42NDVxcXF50eURERERkQLQOsteuXcOlS5eQlpYGQRDg5OQEX19fNGrUSGfFZWVlAQAcHByU2jdt2oSNGzfCxcUF/fr1w4wZM2BjY6N2GwUFBSgoKFB8lkqlAACZTAaZTKazWsnwFV9vXncyZPyeGg5eC82q6rkxxuMy1JorWldZ1xNEURTLutHLly9j5cqV2L59O1JSUgAAxasLggAAqFOnDoYOHYp3330XTZo0KW/dCnK5HP3790dmZiaOHz+uaP/pp5/g6ekJNzc3nD9/HtOmTUPbtm2xY8cOtduZNWsWZs+erdK+efNmjeGXiIiIiPQnLy8PI0aMQFZWFiQSicZ+ZQqyN2/exLRp07Bz505YW1ujc+fO8PPzQ4MGDeDo6AhRFJGRkYEbN24gNjYWx44dw+PHjzFo0CAsWLAA9evXL/cBjB8/Hvv27cPx48dRt25djf0OHz6M7t2748aNG2jQoIHKcnV3ZN3d3fHw4cMSTwxVPTKZDFFRUQgMDIS5ubm+yyFSi99Tw8FroVlVPTfGeFyGWnNF65JKpahdu3apQbZMQwt8fHzQrFkzrF27FoMGDUKNGjVK7J+bm4vt27dj6dKl8PHxQX5+frmKnzBhAvbu3YuYmJgSQywAtGvXDgA0BllLS0tYWlqqtJubmxvUBacXh9eejAG/p4aD10KzqnpujPG4DLVmbesq6zplCrLbtm1D//79y7zzGjVqIDQ0FKGhodi9e3eZ1xNFERMnTsTOnTtx9OhReHl5lbrOuXPnAACurq5l3g8RVQPyQuDOSSAnFbCtA3h2AExM9V0VERHpUJmCbHlC7PMGDBhQ5r5hYWHYvHkzdu/eDTs7O8U4XHt7e1hbW+PmzZvYvHkzevfuDUdHR5w/fx7h4eHo0qULmjdvrnWNRFTFJEQAkdMAadL/2iRuQPACwEf7f58REZFhMah5ZFesWIGsrCz4+/vD1dVV8ee3334DAFhYWODgwYMICgqCt7c3PvjgAwwePBh79uzRc+VEZDASIoCtIcohFgCkyUXtCRH6qYuIiHROq+m3Zs6cid9//x0XL15Uu7x58+YYOnQoPvvss3Jtt7Tnztzd3REdHV2ubRJRNSIvLLoTC3X/LhEBCEDkx4B3Hw4zICKqArS6I7tz504EBgZqXB4YGIjt27drXRQRkVbunFS9E6tEBKT3i/oREZHR0yrIJiYmwtvbW+Pyxo0bIzExUeuiiIi0kpOq235ERGTQtB4jm5mZqXHZo0ePUFhYqO2miYi0Y1tHt/2IiMigaRVkmzZtqnFaLVEUERERUeIdWyKiSuHZoWh2AggaOgiA5KWifkREZPS0CrJjxoxBbGwsRo0ahQcPHijaHzx4gNGjRyM2NhZjxozRWZFERGViYlo0xRYA1TD73+fg+XzQi4ioitBq1oKxY8ciOjoa69evx4YNGxQvI0hOToYoihg2bBjGjx+v00KJiMrEpz8wdL2GeWTncx5ZIqIqRKsgCwAbN25E//79sWnTJty4cQMA0KZNG7z55pt4/fXXdVYgEVG5+fQvmmKLb/YiIqrStA6yADB06FAMHTpUV7UQEemOiSng1VnfVRARUSWq8Ju9CgoKcP/+fTx58kQX9RARERERlYnWQTY+Ph4BAQGws7ODh4cHjh8/DgBIS0tD9+7dcfDgQZ0VSURERET0PK2C7Llz59C5c2fcvHkTISEhSsucnZ3x+PFjrFu3TicFEhERERGpo1WQ/fzzz+Hm5oZLly5h/vz5EEXl95p3794dp0+f1kmBRETVhrwQuHOq6O93ThV9JiIijbQKsseOHcPYsWNha2sLQVCdeNzDwwNJSSW975yIiJQkRABLfIHNQ4o+bx5S9DkhQr91EREZMK2CbH5+Puzt7TUul0qlWhdERFTtJEQAW0OU570FAGlyUTvDLBGRWloF2QYNGiAuLk7j8sOHD8PHx0frooiIqg15YdHLGyCqWfhfW+THHGZARKSGVkF2xIgR2LBhg9LMBMVDDBYtWoTIyEi89dZbuqmQiKgqu3NS9U6sEhGQ3i/qR0RESrR6IcKHH36IqKgo9OzZE97e3hAEAeHh4Xjw4AFSUlIQGBiI//u//9N1rUREVU9Oqm77ERFVI1rdkbWwsEBUVBS++eYbWFtbw8rKCteuXUPt2rWxcOFC7N27FyYmFX7XAhFR1WdbR7f9iIiqEa1fUWtmZobw8HCEh4frsh4iourFswMgcSt6sEvtOFmhaLlnhxddGRGRwdP5bdOCggJdb5KIqOoyMQWCF/z34fnpDP/7HDy/qB8RESnRKsju27cPs2bNUmpbvnw5JBIJatSogREjRkAmk+miPiKiqs+nPzB0PSBxVW6XuBW1+/TXT11ERAZOq6EFX3/9NZydnRWfL1++jEmTJqFBgwbw8vLCb7/9hrZt22Ly5Mm6qpOIqGrz6Q949wFunQAuPQJGbAPqd+SdWCKiEmh1R/by5ct49dVXFZ9/++03WFtb4/Tp09i3bx+GDRuGdevW6axIIqJqwcQU8PQr+runH0MsEVEptAqyjx49Qu3atRWfDx48iICAAEgkEgCAv78/EhMTdVMhEREREZEaWgXZ2rVr486dOwCA7OxsnDlzBp07d1Ysl8lkKCzkW2iIiIiIqPJoNUbWz88PK1euRNOmTbFv3z48ffoUvXr1Uiy/ceMGXF1dS9gCEREREVHFaBVkZ8+ejW7dumHo0KEAgNDQUPj4+AAARFHEzp070a1bN91VSURERET0HK2CrI+PDy5fvowTJ07A3t4eXbp0USzLzMxEeHg4/P39dVUjEREREZGKMgfZVatWYcCAAYpptxwcHNCvXz+VfrVq1cKkSZN0VyERERERkRplfthr/PjxcHNzQ6dOnbB48WLcvHmzMusiIiIiIipRmYNscnIyVqxYAXt7e3zyySd4+eWX0bx5c8ycORN///13ZdZIRERERKSizEHWyckJY8eOxR9//IEHDx5g06ZN8PHxwZIlS/Dqq6+iXr16CA8PR0xMDERRrMyaiYiIiIi0m0fWzs4Ow4cPx5YtW/DgwQPs2bMHgYGB+PXXX+Hv7w9nZ2eMHj0ae/bsQX5+vq5rJiIiIiLSLsg+y8LCAr1798aqVauQnJyM6OhovPXWW4iOjsbAgQOxcOFCXdRJRERERKSkwkH2WYIgoHPnzoqHwf7++2+lFyWUZt68eWjTpg3s7Ozg7OyMgQMH4urVq0p98vPzERYWBkdHR9ja2mLw4MFITU3V5WEQERERkRHQKsgWFhYiLy9PqS0zMxOLFi3Cp59+iosXLwIAmjdvjjZt2pR5u9HR0QgLC0NsbCyioqIgk8kQFBSE3NxcRZ/w8HDs2bMH27ZtQ3R0NJKSkjBo0CBtDoOIiIiIjJhWL0R49913ERsbqwisMpkMnTp1QkJCAgBg8eLFOHXqFF555ZVybTcyMlLp89q1a+Hs7Iy4uDh06dIFWVlZWL16NTZv3oyAgAAAwJo1a9CkSRPExsaiffv22hwOERERERkhrYLs8ePHle6Cbt++HQkJCVi2bBlatmyJ4cOHY/78+diyZUuFisvKygJQ9PIFAIiLi4NMJkOPHj0Ufby9veHh4YFTp06pDbIFBQUoKChQfJZKpQCKwrdMJqtQfWRciq83rzsZMn5PDQevhWZV9dwY43EZas0Vraus62kVZJOTk+Hl5aX4/Mcff6Bp06YYP348AGDcuHH48ccftdm0glwux+TJk9GxY0f4+voCAFJSUmBhYYGaNWsq9a1Tpw5SUlLUbmfevHmYPXu2SvuBAwdgY2NToRrJOEVFRem7BKJS8XtqOHgtNKuq58YYj8tQa9a2rueHsGqiVZAVRRGFhYWKz0ePHlW6Q+vq6oq0tDRtNq0QFhaGixcv4vjx4xXazvTp0zFlyhTFZ6lUCnd3dwQFBUEikVRo22RcZDIZoqKiEBgYCHNzc32XQ6QWv6eGg9dCs6p6bozxuAy15orWVfwb9NJoFWS9vLywf/9+vPfeezhx4gSSk5PRrVs3xfKkpCTY29trs2kAwIQJE7B3717ExMSgbt26inYXFxc8efIEmZmZSndlU1NT4eLionZblpaWsLS0VGk3Nzc3qAtOLw6vPRkDfk8NB6+FZlX13BjjcRlqzdrWVdZ1tJq14O2338bu3bvh6+uLvn37wtnZGT179lQs/+uvv+Dt7V3u7YqiiAkTJmDnzp04fPiw0vAFAGjdujXMzc1x6NAhRdvVq1dx9+5d+Pn5aXMoRERERGSktLojO2nSJGRnZ2PXrl1o2bIlvvrqK8V40/T0dMTGxuLDDz8s93bDwsKwefNm7N69G3Z2dopxr/b29rC2toa9vT3GjBmDKVOmwMHBARKJBBMnToSfnx9nLCAiIiKqZrQKsoIgYMaMGZgxY4bKMkdHR63Hx65YsQIA4O/vr9S+Zs0ajBo1CgDw7bffwsTEBIMHD0ZBQQF69uyJ5cuXa7U/IiIiIjJeWgXZZxUUFODhw4dwcnKChYVFhbYlimKpfaysrLBs2TIsW7asQvsiIiIiIuOm9Stq4+PjERAQADs7O3h4eChmF0hLS0P37t1x8OBBnRVJRERERPQ8rYLsuXPn0LlzZ9y8eRMhISFKy5ydnfH48WOsW7dOJwUSEREREamjVZD9/PPP4ebmhkuXLmH+/PkqQwK6d++O06dP66RAIiIiIiJ1tAqyx44dw9ixY2FrawtBEFSWe3h4ICkpqcLFERERERFpolWQzc/PL/GFB2V9GwMRERERkba0CrINGjRAXFycxuWHDx+Gj4+P1kUREREREZVGqyA7YsQIbNiwQWlmguIhBosWLUJkZCTeeust3VRIRERERKSGVvPIfvjhh4iKikLPnj3h7e0NQRAQHh6OBw8eICUlBYGBgfi///s/XddKRERERKSg1R1ZCwsLREVF4ZtvvoG1tTWsrKxw7do11K5dGwsXLsTevXthYqL1FLVERERERKXS+s1eZmZmCA8PR3h4uC7rISIiIiIqE942JSIiIiKjVKY7sjExMVptvEuXLlqtR0RERERUmjIFWX9/f7UvPtBEFEUIgoDCwkKtCyMiIiIiKkmZguyaNWsquw4iIiIionIpU5ANDQ2t7DqIiIiIiMqFD3sRERERkVHSevotANi6dSt27tyJW7duAQDq16+P1157DUOHDtVJcUREREREmmgVZHNzczFw4EAcPnwYoiiiZs2aAIAzZ85g69at+PHHHxEREYEaNWroslYiIiIiIgWthhZ8+umnOHToECZOnIikpCRkZGQgIyMDSUlJmDhxIo4cOYJPP/1U17USERERESloFWR/++03DBkyBEuWLIGLi4ui3cXFBUuWLMHgwYPx22+/6axIIiIiIqLnaRVkpVIpunXrpnF5QEAApFKp1kUREREREZVGqyDbvHlzXL9+XePy69evo1mzZloXRURERERUGq0e9po7dy5ee+01+Pv7o1+/fkrLdu/ejZ9//hm7du3SRX1ERFTdyQuBOyeBnFTAtg7g2QEwMdV3VURkALQKsps2bYKXlxcGDhyIxo0bo0mTJgCAy5cv4+rVq2jWrBk2btyIjRs3KtYRBAGrV6/WTdVERFQ9JEQAkdMAadL/2iRuQPACwKe//uoiIoOgVZBdu3at4u9XrlzBlStXlJafP38e58+fV2pjkCUionJJiAC2hgAQldulyUXtQ9czzBJVc1qNkZXL5eX+U1hYqOvaiYioqpIXFt2JfT7EAv9ri/y4qB8RVVt8RS0RERmeOyeVhxOoEAHp/aJ+RFRtMcgSEZHhyUnVbT8iqpK0GiMLACdPnsSyZctw/fp1pKenQxSVf/0jCAJu3rxZ4QKJiKgasq2j235EVCVpFWRXrVqF9957DxYWFmjcuDE8PDx0XRcREVVnnh2KZieQJkP9OFmhaLlnhxddGREZEK2C7FdffYVXXnkF+/fvR+3atXVdExERVXcmpkVTbG0NASBAOcwKRf8Ins/5ZImqOa3GyKampmLMmDEMsUREVHl8+hdNsSVxVW6XuHHqLSICoOUd2SZNmuDRo0e6roWIiEiZT3/Auw/f7EVEaml1R/bTTz/F8uXLkZRU0tQoREREOmBiCnh1Bpq9XvRPhlgi+o9Wd2QHDRqEvLw8+Pj4YMCAAahXrx5MTZX/xSIIAmbMmKGTIomIiIiInqdVkL127Ro+//xzSKVSbNiwQW0fBlkiIqJSyAs5bIKoArQKsv/3f/+HtLQ0LF26FJ07d0atWrV0XRcREVHVlhBR9BreZ99gJnErmq2BD7IRlYlWQfbUqVOYOnUqJk6cqOt6iIiIqr6EiP+mFntujlxpclE7Z2UgKhOtHvayt7eHk5OTrmshIiKq+uSFRXdi1b7o4b+2yI+L+hFRibQKskOHDsWOHTt0XQsREVHVd+ek8nACFSIgvV/Uj4hKpFWQfffdd5GdnY2BAwfi8OHDSExMxN27d1X+EBER0XNyUnXbj6ga02qMbNOmTSEIAs6ePYs9e/Zo7FdYyF+LEBERKbGto9t+RNWYVkH2888/hyAIuq6FiIio6vPsUDQ7gTQZ6sfJCkXLPTu86MqIjI5WQXbWrFk6LoOIiKiaMDEtmmJrawgAAcph9r+bRMHzOZ8sURloNUaWiIiIKsCnf9EUWxJX5XaJG6feIioHre7IFissLMSVK1fw6NEjyOVyleVdunSpyOaJiIiqLp/+gHcfvtmLqAK0DrILFizA/PnzIZVKNfYp78NeMTEx+PrrrxEXF4fk5GTs3LkTAwcOVCwfNWoU1q1bp7ROz549ERkZWa79EBERGQQTU8Crs76rIDJaWg0tWL16NaZPn45XXnkFc+fOhSiKmDx5MqZOnQoHBwe8+uqr+OWXX8q93dzcXLRo0QLLli3T2Cc4OBjJycmKP7/++qs2h0BERERERk6rO7IrVqxA+/btceTIEaSnp+PTTz9Fnz59EBAQgEmTJuGVV17RauqtXr16oVevXiX2sbS0hIuLizZlExEREVEVolWQvXz5MubOnQsAimm4ioOrq6srxo0bh6VLl2L06NE6KvN/jh49CmdnZ9SqVQsBAQGYO3cuHB0dNfYvKChAQUGB4nPxUAiZTAaZTKbz+shwFV9vXncyZPyeGg5eC82q6rkxxuMy1JorWldZ19MqyJqamqJGjRoAoPhnenq6Ynm9evVw/fp1bTZdouDgYAwaNAheXl64efMmPvnkE/Tq1QunTp2Cqan6wfHz5s3D7NmzVdoPHDgAGxsbnddIhi8qKkrfJRCVit9Tw8FroVlVPTfGeFyGWrO2deXl5ZWpn1ZB1sPDA4mJiQCKftXv7u6OY8eOYfjw4QCAM2fOwMHBQZtNl6h4+wDQrFkzNG/eHA0aNMDRo0fRvXt3tetMnz4dU6ZMUXyWSqVwd3dHUFAQJBKJzmskwyWTyRAVFYXAwECYm5vruxwitfg9NRy8FppV1XNjjMdlqDVXtK6SJhN4llZBtkuXLvjjjz8wb948AMCQIUOwZMkSPH78GHK5HBs3bqyUYQXPq1+/PmrXro0bN25oDLKWlpawtLRUaTc3NzeoC04vDq89GQN+Tw0Hr4VmVfXcGONxGWrN2tZV1nW0CrKTJk1CixYt8PjxY1hbW2P27Nm4du2aYmqsoKAgzJ8/X5tNl8u///6L9PR0uLq6lt6ZiIiIiKoUrYJs48aN0bhxY8XnGjVqICIiAllZWTA1NYWtra1WxeTk5ODGjRuKz4mJiTh37hwcHBzg4OCA2bNnY/DgwXBxccHNmzfx0UcfoWHDhujZs6dW+yMiIiIi41WhN3s9z97evkLrnz17Ft26dVN8Lh7bGhoaihUrVuD8+fNYt24dMjMz4ebmhqCgIMyZM0ft0AEiIiIiqtq0CrI3btzAjRs3EBwcrGj766+/MHfuXGRkZCA0NBTjxo0r93b9/f0hiqLG5fv379emXCIiIiKqgrQKstOmTUNGRoYiyD58+BC9evVCTk4OrK2tMX78eDg7Oyu9XpaIiIiISJe0ekXt2bNn0aNHD8XnX3/9FVKpFPHx8Xjw4AHatWuHpUuX6qxIIiIiIqLnaRVkHzx4ADc3N8XnyMhIdOzYEb6+vrCwsMDw4cORkJCgsyKJiIiIiJ6nVZCtUaMGMjMzARS9mvb48ePo0qWLYrm1tXWZJ7IlIiIiIyIvBO6cKvr7nVNFn4n0RKsg27RpU6xfvx7p6elYtWoVcnJyEBgYqFh+584dODk56axIIiIiMgAJEcASX2DzkKLPm4cUfU6I0G9dVG1p9bDX1KlTMWDAADg7OwMAWrZsic6dOyuWHzhwAK1atdJNhURERKR/CRHA1hAAImBi9b92aXJR+9D1gE9/vZVH1ZNWQbZPnz44fPgwdu/eDXt7e0yYMAGCIAAA0tPTUbduXYSEhOi0UCIiItITeSEQOQ2AuikyRQACEPkx4N0HMDF9wcVRdab1CxG6dOmiNC62mKOjI3bs2FGhooiIiMiA3DkJSJNK6CAC0vtF/bw6l9CPSLe0GiNLRERE1UhOqm77EelImYLsW2+9hVu3bpV749evX8fIkSPLvR4REREZENs6uu1HpCNlCrI3btxAkyZN8PrrryMiIgKPHz/W2DcnJwfbt2/HgAED0LRpUyQmJuqsWCIiItIDzw6AxA2AoKGDAEheKupH9AKVaYzsqVOnsHnzZsyZMwcDBw6EmZkZmjZtigYNGsDBwQGiKCIjIwPXr1/H5cuXUVhYCB8fH2zYsAHDhg2r7GMgIiKiymRiCgQv+G/WgufD7H+fg+fzQS964cr8sNeIESMwYsQIHDp0CNu2bUNMTAx27doFuVwOADAxMUGTJk3w3nvvYciQIWofBCMiIiIj5dO/aIqtyGlATsb/2iVuRSGWU2+RHpR71oLu3buje/fuAAC5XI709HQIggBHR0fFFFxERERUBfn0L5pi69YJ4NIjYMQ2oH5H3oklvanQrAUmJiZwcnJC7dq1GWKJiIiqAxNTwNOv6O+efgyxpFecfouIiIiIjBKDLBEREREZJQZZIiIiIjJKDLJEREREZJQYZImIiIjIKGkVZGNiYvDgwQONyx8+fIiYmBitiyIiIiIiKo1WQbZbt26IiorSuPzQoUPo1q2b1kUREREREZVGqyArimKJywsLC2FiwlELRERERFR5tE6bJb0A4eTJk6hdu7a2myYiIiIiKlWZX1G7dOlSLF26VPF58uTJ+PTTT1X6PXr0CFKpFKNHj9ZNhUREREREapQ5yNasWROenp4AgNu3b8PR0RF16tRR6iMIAnx9fdG+fXuEh4frtlIiIiIiomeUOciGhoYiNDQUAODl5YX58+ejf//+lVYYEREREVFJyhxkn5WYmKjrOoiIiIiIyoVTCxARERGRUdLqjqyJiUmJsxYAReNlnz59qlVRRERERESl0SrIhoSEqATZp0+f4ubNm/jrr7/QvHlzvPLKK7qoj4iIiIhILa2C7Nq1azUuO3nyJPr3748VK1ZoWxMRERERUal0Pka2Q4cOePvttzFt2jRdb5qIiIiISKFSHvZq1KgR4uLiKmPTREREREQAKinIHj16FNbW1pWxaSIiIiIiAFqOkV2/fr3a9oyMDBw8eBD79u3DmDFjKlQYEREREVFJtAqyo0aNgiAIEEVRdYNmZhgzZgy+/fbbChdHRERERKSJVkH2yJEjKm2CIMDBwQFeXl6oUaNGhQsjIiIiIiqJVkG2a9euuq6DiIiIiKhctAqyz8rLy8OdO3cAAJ6enrCxsalwUUREREREpdF61oKEhAT07t0bNWvWhK+vL3x9fVGzZk307t0bly5d0mWNREREREQqtLoj+/fff8Pf3x85OTkIDAyEj48PAODSpUs4cOAATpw4gejoaL6mloiIiIgqjVZBdurUqTAxMcGZM2fQqlUrpWXx8fEICAjA1KlTERUVpZMiiYiIiIiep9XQgtjYWEyYMEElxAJAq1atEBYWhlOnTlW4OCIiIiIiTbQKslZWVnBxcdG43M3NjW/2IiIiIqJKpVWQ7d27NyIiIjQuj4iIQK9evbQuioiIiIioNFoF2cWLFyM9PR1DhgzBmTNnkJ2djezsbJw+fRqvv/46MjIy+GYvIiIiIqpUWj3s5ezsDEEQEB8fjx07digtK35trbOzs1K7IAh4+vSplmUSERERESnTKsiGhIRAEARd10JEREREVGZaBdm1a9fquAwiIiIiovLR+s1eRERERET6pNUd2WJ5eXm4ffs20tPTFWNjn9WlS5eKbJ6IiIiISCOtgmxeXh6mTJmCNWvWqH2ASxRFCIKAwsLCChdIRERERKSOVkF20qRJWL16NXr37o2AgAA4Ojrqui4iIiIiohJpFWR37tyJN954A5s2bdJ1PUREREREZaLVw175+fnw9/fXcSlERERERGWnVZB99dVXcf36dV3XQkRERERUZloF2fnz52PNmjU4e/asrushIiIiIiqTMo2RHT16tEpb3bp10b59e/j5+aF+/fowNTVVWi4IAlavXq2bKomIiIiInlOmIFvSm7xOnDiBEydOqLQzyBIRERFRZSpTkJXL5ZVdBxERERFRufAVtURERERklBhkiYiIiMgoafVChICAgBKXC4IAa2treHh4ICgoCAMGDIAgCFoVSERERESkjlZB9tatW3j8+DEePHgAAKhZsyYAIDMzEwDg5OQEuVyOP//8Ez/++CM6duyIffv2oUaNGjopmoiIiIhIq6EFR48ehY2NDaZOnYrU1FRkZGQgIyMDqamp+PDDD1GjRg2cPXsWDx8+xJQpU3D8+HF88cUXuq6diIiIiKoxrYJseHg4OnbsiAULFsDJyUnR7uTkhIULF8LPzw/h4eFwcHDA119/jT59+uD333/XWdFERERERFoF2cOHD6Nz584al3fu3BmHDx9WfO7Rowf+/fdfbXZFRERERKSW1rMWXLlypcRloij+bycmJrC2ttZ2V0REREREKrQKsj169MCKFSuwZcsWlWW//vorVq5cicDAQEVbfHw86tWrp3WRRERERETP0yrILl68GE5OTnjzzTdRt25d+Pv7w9/fH3Xr1sXIkSNRu3ZtLFq0CACQn5+PO3fuICQkpNTtxsTEoF+/fnBzc4MgCNi1a5fSclEU8fnnn8PV1RXW1tbo0aMHrl+/rs0hEBEREZGR0yrIenp64p9//sEHH3wAiUSCv/76C3/99Rfs7OzwwQcf4J9//oGnpycAwMrKCocPH0Z4eHip283NzUWLFi2wbNkytcsXLlyI7777DitXrsRff/2FGjVqoGfPnsjPz9fmMIiIiIjIiGk1jywAODg4YOHChVi4cKHOiunVqxd69eqldpkoiliyZAk+++wzDBgwAACwfv161KlTB7t27cLw4cN1VgcRERERGT6tg+yLlpiYiJSUFPTo0UPRZm9vj3bt2uHUqVMag2xBQQEKCgoUn6VSKQBAJpNBJpNVbtFkUIqvN687GTJ+Tw0Hr4VmVfXcGONxGWrNFa2rrOtpFWTXr19fpn5lGRdbVikpKQCAOnXqKLXXqVNHsUydefPmYfbs2SrtBw4cgI2Njc7qI+MRFRWl7xKISsXvqeHgtdCsqp4bYzwuQ61Z27ry8vLK1E+rIDtq1CgIgqA0xRYACIKg9FmXQVZb06dPx5QpUxSfpVIp3N3dERQUBIlEosfK6EWTyWSIiopCYGAgzM3N9V0OkVr8nhoOXgvNquq5McbjMtSaK1pX8W/QS6NVkD1y5IhK29OnT3Hz5k0sX74cNjY2+PLLL7XZtEYuLi4AgNTUVLi6uiraU1NT8corr2hcz9LSEpaWlirt5ubmBnXB6cXhtSdjwO+p4eC10KyqnhtjPC5DrVnbusq6jlZBtmvXrmrbu3fvjtDQULRt2xbx8fHo1q2bNptXy8vLCy4uLjh06JAiuEqlUvz1118YP368zvZDRERERMZB6zd7aWJpaYmRI0di+fLl5V43JycH586dw7lz5wAUPeB17tw53L17F4IgYPLkyZg7dy4iIiJw4cIFhISEwM3NDQMHDtTtQRARERGRwauUWQssLS1x//79cq939uxZpbu4xWNbQ0NDsXbtWnz00UfIzc3FuHHjkJmZiU6dOiEyMhJWVlY6q52IiIiIjIPOg2xycjJWrlwJLy+vcq/r7++v8gDZswRBwBdffIEvvviiIiUSERERURWgVZANCAhQ256RkYErV67gyZMnWLduXYUKIyIiAoBCuYjTiRlIy86Hs50V2no5wNREKH1FItINeSFw5ySQkwrY1gE8OwAmpvquCoCWQfbWrVsqU20JggAHBwcMGjQIEyZMQIcOHXRSIBERVV+RF5Mxe08CkrP+9ypyV3srzOzng2Bf1xLWJCKdSIgAIqcB0qT/tUncgOAFgE9//dX1H62C7O3bt3VcBhERkbLIi8kYvzEezw84S8nKx/iN8VgxshXDLFFlSogAtoYAz/8USpOL2oeu13uY1fmsBURERBVVKBcxe0+CSogF/vef1Nl7ElAo1/xcBRFVgLyw6E5sST+FkR8X9dOjCj3sdfPmTezevRu3bt0CANSvXx8DBgxAgwYNdFIcERFVT6cTM5SGEzxPBJCclY/TiRnwa+D44gojqi7unFQeTqBCBKT3i/p5dX5hZT1P6yA7Y8YMzJ8/H4WFykn8o48+wieffMKZBYiISGtp2ZpDrDb9iKicclJ126+SaDW04JdffsGXX36Jdu3aYdeuXbh+/TquX7+OXbt2wc/PD19++SXWrl2r41KJiKi6cLYr2/zgZe1HROVkW0e3/SqJVndkly1bhnbt2uHo0aMwM/vfJho0aIDevXujc+fO+P777zFq1Chd1UlERNVIWy8HuNpbISUrX+0IPQGAi33RVFxEVAk8OxTNTiBNhvpxskLRck/9zlKl1R3Zy5cvY/jw4UohtpiZmRmGDx+Oy5cvV7g4IiKqnkxNBMzs5wOgKLQ+q/jzzH4+nE+WqLKYmBZNsQVA409h8Hy9zyerVZC1sLBATk6OxuXZ2dmwsLDQuigiIqJgX1esGNkKLvbKwwdc7K049RbRi+DTv2iKLclzP2sSN4OYegvQcmhBmzZt8OOPP+Kdd95BnTrKYyPS0tLw008/oV27djopkIiIqq9gX1cE+rjwzV5E+uLTH/DuU7Xe7DVjxgx0794dTZo0wZgxY+DjU/Trn0uXLmHNmjXIzs7Gpk2bdFooERFVT6YmAqfYItInE1O9TrFVEq2CbJcuXbBjxw5MmDABixYtUlrm4eGBdevWoXNnwzxgIiIiIqoatJ5Htl+/fujTpw/i4uKQmJgIoOiFCK1atYKJCV8YRkRERESVq9xBNicnBy1atMDEiRMxefJktGnTBm3atKmM2oiIiIiINCr3rVNbW1ukp6fD1ta2MuohIiIiIioTrcYAtG/fHmfPntV1LUREREREZaZVkJ0/fz62bt2KNWvWQBTVve2BiIiIiKhyafWw15QpU1CrVi288847+Oijj9CgQQPY2Ngo9REEAYcOHdJJkUREREREz9MqyN66dQuCIMDDwwMAkJqaqtOiiIiIiIhKo1WQvX37to7LICIiIiIqH074SkRERERGiUGWiIiIiIyS1kF2y5Yt6NixI5ydnWFqaqryx8xM65eGERERERGVSqu0+fXXX+Pjjz+Go6Mj2rdvD0dHR13XRURERERUIq2C7LJly9CuXTscOnQI1tbWuq6JiIiIiKhUWg0tSElJwciRIxliiYiIiEhvtAqyDRs2RGZmpo5LISIiIiIqO62C7AcffIDVq1cjJydH1/UQEREREZVJmcbIrl+/XumzqakpnJ2d4e3tjdGjR8PLywumpqYq64WEhOimSiIiIiKi55QpyI4aNQqCIEAURZVlc+fOVbuOIAgMskRERERUacoUZI8cOVLZdRARERERlUuZgmzXrl1x9+5dODk5caYCIiIiIjIIZX7Yy8vLCzt37qzMWoiIiIiIyqzMQVbd+FgiIiIiIn3RavotIiIiIiJ9Y5AlIiIiIqNUpoe9ih07dgxPnz4tc39Ov0VERERElaVcQfann37CTz/9VGo/URQ5jywRERERVapyBdlx48ahffv2lVULEREREVGZlSvIdu7cGSNGjKisWoiIiIiIyowPexERERGRUWKQJSIiIiKjxCBLREREREapzGNk5XJ5ZdZBRERERFQuvCNLREREREaJQZaIiIiIjBKDLBEREREZJQZZIiIiKrNCuYjTiRkAgNOJGSiUi3quiKozBlkiIiIqk8iLyei04DBGrzsDABi97gw6LTiMyIvJeq6MqisGWSIiIipV5MVkjN8Yj+SsfKX2lKx8jN8YzzBLesEgS0RERCUqlIuYvScB6gYRFLfN3pPAYQb0wjHIEhERUYlOJ2ao3Il9lgggOStfMXaW6EVhkCUiIqISpWVrDrHa9CPSFQZZIiIiKpGznZVO+xHpCoMsERERlaitlwNc7a0gaFguAHC1t0JbL4cXWRYRgywRERGVzNREwMx+PgCgEmaLP8/s5wNTE01Rl6hyMMgSERFRqYJ9XbFiZCu42CsPH3Cxt8KKka0Q7Ouqp8qoOjPTdwFERERkHIJ9XRHo44LYG2l4eDkWv4S2QfuGzrwTS3rDO7JERERUZqYmgmIsbFsvB4ZY0isGWSIiIiIySgyyRERERGSUGGSJiIiIyCgxyBIRERGRUWKQJSIiIiKjxCBLREREREaJQZaIiIiIjBKDLBEREREZJQZZIiIiIjJKDLJEREREZJQYZImIiIjIKDHIEhEREZFRYpAlIiIiIqPEIEtERERERolBloiIiIiMEoMsERERERklBlkiIiIiMkoMskRERERklBhkiYiIiMgoMcgSERERkVFikCUiIiIio8QgS0RERERGiUGWiIiIiIwSgywRERERGSUGWSIiIiIySgyyRERERGSUGGSJiIiIyCgxyBIRERGRUWKQJSIiIiKjxCBLREREREaJQZaIiIiIjBKDLBEREREZJQZZIiIiIjJKDLJEREREZJQYZImIiIjIKDHIEhEREZFRYpAlIiIiIqPEIEtERERERolBloiIiIiMEoMsERERERklBlkiIiIiMkoMskRERERklBhkiYiIiMgoMcgSERERkVFikCUiIiIio8QgS0RERERGiUGWiIiIiIwSgywRERERGSUGWSIiIiIySgyyRERERGSUGGSJiIiIyCgxyBIRERGRUWKQJSIiIiKjxCBLREREREaJQZaIiIiIjBKDLBEREREZJQZZIiIiIjJKDLJEREREZJQYZImIiIjIKDHIEhEREZFRYpAlIiIiIqPEIEtERERERolBloiIiIiMEoMsERERERklBlkiIiIiMkoMskRERERklBhkiYiIiMgoGVWQnTVrFgRBUPrj7e2t77KIiIiISA/M9F1AeTVt2hQHDx5UfDYzM7pDICIiIiIdMLoUaGZmBhcXF32XQURERER6ZnRB9vr163Bzc4OVlRX8/Pwwb948eHh4aOxfUFCAgoICxWepVAoAkMlkkMlklV4vGY7i683rToaM31PDwWuhWVU9N8Z4XC+i5kK5iLg7j/AwpwC1bS3R2rMWTE2ESq2rrOsJoiiKWu1BD/bt24ecnBw0btwYycnJmD17Nu7fv4+LFy/Czs5O7TqzZs3C7NmzVdo3b94MGxubyi6ZiIiIiMopLy8PI0aMQFZWFiQSicZ+RhVkn5eZmQlPT08sXrwYY8aMUdtH3R1Zd3d3PHz4sMQTQ1WPTCZDVFQUAgMDYW5uru9yiNTi99Rw8FpoVlXPjTEeV2XWfPByKsJ/O4fng2Lxvdhvh72CHk3qVEpdUqkUtWvXLjXIGt3QgmfVrFkTL7/8Mm7cuKGxj6WlJSwtLVXazc3NjeZLSrrFa0/GgN9Tw8FroVlVPTfGeFy6rrlQLuKLP64iv1D9EAIBwBd/XEWQ70slDjPQtq6yrmNU0289LycnBzdv3oSrq6u+SyEiIiKqMk4nZiA5K1/jchFAclY+TidmvLii1DCqIPvhhx8iOjoat2/fxsmTJ/Haa6/B1NQUb7zxhr5LIyIiIqoy0rI1h1ht+lUWoxpa8O+//+KNN95Aeno6nJyc0KlTJ8TGxsLJyUnfpRERERFVGc52VjrtV1mMKshu2bJF3yUQERERVXltvRzgam+FlKx8lYe9gKIxsi72Vmjr5fCiS1NiVEMLiIiIiKjymZoImNnPB8D/ZikoVvx5Zj+fUueTrWwMskRERESkItjXFStGtoKLvfLwARd7K6wY2QrBvvp/2N6ohhYQERER0YsT7OuKQB8XnE7MQFp2PpztioYT6PtObDEGWSIiIiLSyNREgF8DR32XoRaHFhARERGRUWKQJSIiIiKjxCBLREREREaJQZaIiIiIjBKDLBEREREZJQZZIiIiIjJKDLJEREREZJQYZImIiIjIKDHIEhEREZFRYpAlIiIiIqPEIEtERERERolBloiIiIiMEoMsERERERklM30X8KKJoggAkEqleq6EXjSZTIa8vDxIpVKYm5vruxwitfg9NRy8FppV1XNjjMdlqDVXtK7inFac2zSpdkE2OzsbAODu7q7nSoiIiIioJNnZ2bC3t9e4XBBLi7pVjFwuR1JSEuzs7CAIgr7LoRdIKpXC3d0d9+7dg0Qi0Xc5RGrxe2o4eC00q6rnxhiPy1BrrmhdoigiOzsbbm5uMDHRPBK22t2RNTExQd26dfVdBumRRCIxqB92InX4PTUcvBaaVdVzY4zHZag1V6Suku7EFuPDXkRERERklBhkiYiIiMgoMchStWFpaYmZM2fC0tJS36UQacTvqeHgtdCsqp4bYzwuQ635RdVV7R72IiIiIqKqgXdkiYiIiMgoMcgSERERkVFikCUiIiIio8QgS0RERERGiUGWqrwVK1agefPmikmZ/fz8sG/fPn2XRaTi/v37GDlyJBwdHWFtbY1mzZrh7Nmz+i6rWsrOzsbkyZPh6ekJa2trdOjQAWfOnNF3WS9UTEwM+vXrBzc3NwiCgF27dimWyWQyTJs2Dc2aNUONGjXg5uaGkJAQJCUl6a/gcijp2ABg1KhREARB6U9wcLB+iv1PaTXn5ORgwoQJqFu3LqytreHj44OVK1dWak3z5s1DmzZtYGdnB2dnZwwcOBBXr15V6vPTTz/B398fEokEgiAgMzNTpzUwyFKVV7duXcyfPx9xcXE4e/YsAgICMGDAAFy6dEnfpREpPHr0CB07doS5uTn27duHhIQELFq0CLVq1dJ3adXSO++8g6ioKGzYsAEXLlxAUFAQevTogfv37+u7tBcmNzcXLVq0wLJly1SW5eXlIT4+HjNmzEB8fDx27NiBq1evon///nqotPxKOrZiwcHBSE5OVvz59ddfX2CFqkqrecqUKYiMjMTGjRtx+fJlTJ48GRMmTEBERESl1RQdHY2wsDDExsYiKioKMpkMQUFByM3NVfTJy8tDcHAwPvnkk8opQiSqhmrVqiX+/PPP+i6DSGHatGlip06d9F0GiaKYl5cnmpqainv37lVqb9Wqlfjpp5/qqSr9AiDu3LmzxD6nT58WAYh37tx5MUXpiLpjCw0NFQcMGKCXespCXc1NmzYVv/jiC6W2F/2dTUtLEwGI0dHRKsuOHDkiAhAfPXqk033yjixVK4WFhdiyZQtyc3Ph5+en73KIFCIiIvDqq69iyJAhcHZ2RsuWLbFq1Sp9l1UtPX36FIWFhbCyslJqt7a2xvHjx/VUleHLysqCIAioWbOmvkvRiaNHj8LZ2RmNGzfG+PHjkZ6eru+SStShQwdERETg/v37EEURR44cwbVr1xAUFPTCasjKygIAODg4vLB9MshStXDhwgXY2trC0tIS7733Hnbu3AkfHx99l0WkcOvWLaxYsQKNGjXC/v37MX78eLz//vtYt26dvkurduzs7ODn54c5c+YgKSkJhYWF2LhxI06dOoXk5GR9l2eQ8vPzMW3aNLzxxhuQSCT6LqfCgoODsX79ehw6dAgLFixAdHQ0evXqhcLCQn2XptH3338PHx8f1K1bFxYWFggODsayZcvQpUuXF7J/uVyOyZMno2PHjvD19X0h+wQAsxe2JyI9aty4Mc6dO4esrCxs374doaGhiI6OZpglgyGXy/Hqq6/iq6++AgC0bNkSFy9exMqVKxEaGqrn6qqfDRs2YPTo0XjppZdgamqKVq1a4Y033kBcXJy+SzM4MpkMQ4cOhSiKWLFihb7L0Ynhw4cr/t6sWTM0b94cDRo0wNGjR9G9e3c9VqbZ999/j9jYWERERMDT0xMxMTEICwuDm5sbevToUen7DwsLw8WLF1/4by14R5aqBQsLCzRs2BCtW7fGvHnz0KJFCyxdulTfZREpuLq6qvyPVZMmTXD37l09VVS9NWjQANHR0cjJycG9e/dw+vRpyGQy1K9fX9+lGZTiEHvnzh1ERUVVibux6tSvXx+1a9fGjRs39F2KWo8fP8Ynn3yCxYsXo1+/fmjevDkmTJiAYcOG4Ztvvqn0/U+YMAF79+7FkSNHULdu3Urf37MYZKlaksvlKCgo0HcZRAodO3ZUmbbm2rVr8PT01FNFBAA1atSAq6srHj16hP3792PAgAH6LslgFIfY69ev4+DBg3B0dNR3SZXm33//RXp6OlxdXfVdiloymQwymQwmJsqxztTUFHK5vNL2K4oiJkyYgJ07d+Lw4cPw8vKqtH1pwqEFVOVNnz4dvXr1goeHB7Kzs7F582YcPXoU+/fv13dpRArh4eHo0KEDvvrqKwwdOhSnT5/GTz/9hJ9++knfpVVL+/fvhyiKaNy4MW7cuIGpU6fC29sbb7/9tr5Le2FycnKU7kAmJibi3LlzcHBwgKurK15//XXEx8dj7969KCwsREpKCoCiB30sLCz0VXaZlHRsDg4OmD17NgYPHgwXFxfcvHkTH330ERo2bIiePXsaZM0eHh7o2rUrpk6dCmtra3h6eiI6Ohrr16/H4sWLK62msLAwbN68Gbt374adnZ3iO2Bvbw9ra2sAQEpKClJSUhS1X7hwAXZ2dvDw8NDNQ2E6nQOByACNHj1a9PT0FC0sLEQnJyexe/fu4oEDB/RdFpGKPXv2iL6+vqKlpaXo7e0t/vTTT/ouqdr67bffxPr164sWFhaii4uLGBYWJmZmZuq7rBeqeLqk5/+EhoaKiYmJapcBEI8cOaLv0ktV0rHl5eWJQUFBopOTk2hubi56enqKY8eOFVNSUgy2ZlEUxeTkZHHUqFGim5ubaGVlJTZu3FhctGiRKJfLK60mTd+BNWvWKPrMnDmz1D4VIfxXCBERERGRUeEYWSIiIiIySgyyRERERGSUGGSJiIiIyCgxyBIRERGRUWKQJSIiIiKjxCBLREREREaJQZaIiIiIjBKDLBEREREZJQZZIqJyEgQBo0aN0ncZWsnLy8P7778PDw8PmJqaol69evouiYhIawyyRGQQjh49CkEQIAgCVq1apbaPIAjo27fvC66salmwYAG+//57DBs2DGvXrsWSJUtK7M9zTkSGzEzfBRARPW/WrFkYOXIkrK2t9V1KlRMVFYVmzZrh66+/1ncpREQVxjuyRGRQXn31VSQlJZV6p7C6KCwsRF5ens62l5KSAgcHB51tz5CJooicnBx9l0FElYhBlogMytChQ9G6dWssWLAA6enppfbXNF517dq1EAQBR48eVbTNmjULgiAgISEBkydPhqurK2xsbNC9e3dcvXoVALBjxw60atUK1tbWqFevHn766SeN+z548CDat28PGxsbuLi4YNKkSWqDU1ZWFqZNm4aGDRvC0tISTk5OeOONN3Dr1i21NR88eBBz5sxBgwYNYGVlha1bt5Z4Dp4+fYoFCxbAx8cHVlZWcHR0xGuvvYYLFy6obDsxMRHR0dGKYRyzZs0qcdtltXz5cgQFBeGll16ChYUFXF1dMXLkSNy+fVvR58mTJ3ByckLHjh3VbuPrr7+GIAiIiYlRtBUUFOCrr75C06ZNYWVlhZo1a6Jfv374+++/ldYtHpqydu1aLFu2THEuvvnmGwDApUuXMGTIELz00kuwtLSEi4sLunXrhj/++EMnx09E+sGhBURkUARBwPz58xEYGIgvv/wSixcv1vk+QkNDYWtri08++QQPHjzAokWL0LNnT8yZMwcfffQRxo8fj9GjR2P16tV499134ePjg06dOiltIz4+Htu3b8fYsWMREhKCI0eO4LvvvsPFixcRFRUFE5Oi+wRZWVno0KED7t69i9GjR6Np06ZITk7G8uXL0a5dO5w9exaenp5K2/7www8hk8kwduxYSCQSNG7cuMTjefPNN7F161YEBgZi/PjxSElJwbJly+Dn54djx46hZcuW6NKlCzZs2IDw8HDUrl0bn376KQCgefPmOjmn33zzDdq3b4/3338fDg4OuHjxIn7++WccPnwYFy5cgKOjIywsLBAaGopFixbh6tWrKsf1yy+/4OWXX0aXLl0AADKZDMHBwTh58iTeeustTJgwAVlZWVi1ahU6duyImJgYvPrqq0rbWLJkCdLT0zF27Fi4uLjA3d0d6enpCAgIAAC899578PT0xMOHD3H27Fn89ddf6NOnj07OARHpgUhEZACOHDkiAhC//vprURRFMTAwULS0tBRv376t6ANA7NOnj9J6AMTQ0FCV7a1Zs0YEIB45ckTRNnPmTBGA2LdvX1Eulyvaly5dKgIQ7ezsxLt37yra09LSREtLS3H48OEq+wQg7ty5U6n9/fffFwGIv/76q1KblZWVeO7cOaW+t2/fFu3s7JRqL6755ZdfFnNzc9WfqOccOHBABCAOHTpU6ZjOnTsnmpqaip06dVLq7+npKXbt2rVM2xZF9edcnZycHJW2gwcPigDEBQsWKNquXr0qAhCnTp2q1Pf48eMqfRcvXiwCECMjI5X6ZmVlie7u7krHUfz9qVWrlpiamqrUf/fu3SIA8bfffiv1OIjIuHBoAREZpAULFuDJkyeYMWOGzrf9/vvvQxAExefOnTsDAPr37w93d3dFu5OTExo3bozr16+rbKNx48YYOHCgUtvHH38MANi5cyeAojGamzZtQpcuXfDSSy/h4cOHij81atRA+/btceDAAZVtjx8/HjY2NmU6luJ9ffrpp0rH1KJFC/Tr1w/Hjx/HgwcPyrStiqhRowYAQC6XIysrCw8fPkSLFi1gb2+Pv/76S9Hv5ZdfRteuXbF+/Xo8ffpU0b569WqYmZkhNDRU0bZx40Z4e3ujdevWSufuyZMnCAwMxPHjx/H48WOlOkJCQuDs7KzUZm9vDwDYt28fpFKpzo+diPSHQZaIDFLLli3xxhtvYNOmTTh//rxOt12/fn2lz7Vq1QIAeHl5qfStVauW2rG6TZo0UWlzdXVFzZo1FWNfHzx4gPT0dBw4cABOTk4qf6KiopCamqqynZdffrnMx5KYmAgTExO19TRt2lTRp7IdPnwY/v7+qFGjBmrWrKk4xqysLDx69Eip77hx45Camoq9e/cCALKzs7F161b07dsXderUUfS7fPkyrly5ovbc/fLLLygsLMTDhw+Vtq3u3HXt2hUhISFYu3YtateujY4dO2LmzJlISEiohDNBRC8Sx8gSkcGaO3cutm/fjmnTpmHfvn3lWvfZu33PMzU1LVe7KIrl2vfz6/Xo0QPTpk0r83plvRtrKM6cOYOgoCA0bNgQ8+fPh5eXF6ytrSEIAoYPHw65XK7Uf/DgwXj//fexevVqDBw4EL/99htyc3PxzjvvKPUTRRHNmjUrcZy0k5OT0mdN527dunWYOnUq9u3bh2PHjmHRokX48ssvsWTJEkyYMEHLIycifWOQJSKD5eXlhfHjx2Pp0qVKsw88y8HBARkZGSrtz88IoGuXL19WaUtOTkZmZqbijq+TkxNq1qwJqVSKHj16VEod9evXh1wux+XLl1Ue3Cq+46juTrMubd68GYWFhdi3b5/SvnJzc1XuxgKApaUlQkJC8N133yEpKQmrV6/GSy+9hODgYKV+jRo1woMHDxAQEKB4eK4ifH194evri6lTpyIzMxPt2rXDxx9/jLCwMKVhGURkPDi0gIgM2meffQaJRIKPPvpI7fKXX34Zp06dUppr9dGjR1izZk2l1nX16lXs2rVLqW3BggUAoBg7a2JigjfffBOnT5/G9u3b1W4nLS2tQnUU72vevHlKd44vXryIiIgIdOrUSeWupa4V38l+/s71V199pXI3ttjYsWNRWFiIadOmITY2FqNGjVK5Ix4SEoKUlBSNd2TVDctQJyMjQ6WOmjVrwsvLC3l5ecjPzy/TdojI8PCOLBEZtNq1a2Pq1KkaH/qaMGECRo4ciYCAALz11lvIzMzEqlWr4OnpiZSUlEqrq1mzZhg5ciTGjh2LRo0a4ciRI9i+fTu6du2KYcOGKfp9+eWXOHHiBIYOHYqhQ4eiffv2sLCwwJ07d/Dnn3+idevWWLt2rdZ1BAYGYujQodiyZQsePXqEvn37KqbfsrKywnfffVfhY71x4wbmzp2rdll4eDhee+01fPvtt+jduzfGjRsHCwsLREVF4fz586hdu7ba9Zo0aYJOnTph48aNEAQBo0ePVukzadIkREVFYerUqTh8+DACAgIgkUhw9+5dHDp0CFZWVjhy5Eip9a9fvx7ffvstXnvtNTRs2BDm5uaIjo7G/v37MXToUL5BjsiIMcgSkcGbMmUKli9fjuTkZJVlb775JpKSkvDDDz9gypQpqF+/Pj7//HOYmJgoPS2va61atcLixYvx6aefYuXKlZBIJJgwYQK++uorpV+D29vb48SJE1i0aBG2bt2K3bt3w8zMDHXr1kWnTp1UxoVqY9OmTWjVqhXWrl2LDz74ADVq1EDXrl0xZ84cNGvWrMLbv3r1qsb/kXjnnXfQsWNH/P7775gzZw5mzJgBa2tr9OjRA9HR0Yo5YdUZN24cjh8/jm7duqk8gAcA5ubm+OOPP7B8+XJs2LABM2fOBAC4ubmhbdu2SjMclMTf3x9///039u7di+TkZJiamsLLywvffPMNx8cSGTlB1PYpBiIiogrYunUrhg0bhs2bN+ONN97QdzlEZIQYZImISC+6du2KhIQE/Pvvv7C0tNR3OURkhDi0gIiIXpi0tDQcOnQIx44dQ0xMDObNm8cQS0Ra4x1ZIiJ6YY4ePYpu3bqhZs2aGD58OL777juYm5vruywiMlIMskRERERklDiPLBEREREZJQZZIiIiIjJKDLJEREREZJQYZImIiIjIKDHIEhEREZFRYpAlIiIiIqPEIEtERERERolBloiIiIiM0v8DTxwo2HVHzXgAAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 800x600 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "from matplotlib.ticker import MaxNLocator, FormatStrFormatter\n",
    "\n",
    "# Data from the table\n",
    "models = ['eqV2-S', 'eqV2-M', 'eqV2-L'] \n",
    "layers = [2, 4, 8, 3, 6, 12, 5, 10, 20]\n",
    "original_layers = [8, 12, 20]\n",
    "original_throughput = [9.4, 7.4, 4.9]\n",
    "ours_throughput = [40.4, 28.7, 16.8, 31.6, 22.3, 13.9, 24.1, 15.8, 9.4]\n",
    "\n",
    "# Create the plot\n",
    "fig, ax = plt.subplots(figsize=(8, 6))\n",
    "ax.scatter(original_layers, original_throughput, label='Original')\n",
    "ax.scatter(layers, ours_throughput, label='Ours')\n",
    "ax.set_xscale('log', base=2)\n",
    "ax.set_yscale('log',base=2)\n",
    "ax.set_title('Throughput Comparison', fontsize=16)\n",
    "ax.set_xlabel('Number of Layers', fontsize=13)\n",
    "ax.set_ylabel('Throughput (samples/sec)', fontsize=13)\n",
    "ax.legend(fontsize=12)\n",
    "\n",
    "# Set the tick locator and formatter to show integer values\n",
    "ax.xaxis.set_major_locator(MaxNLocator(integer=True))\n",
    "ax.xaxis.set_major_formatter(FormatStrFormatter('%.0f'))\n",
    "ax.yaxis.set_major_locator(MaxNLocator(integer=True))\n",
    "ax.yaxis.set_major_formatter(FormatStrFormatter('%.0f'))\n",
    "ax.grid(True)\n",
    "\n",
    "plt.show()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "lavt",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.21"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}