Delete scienceqa.log
Browse files- scienceqa.log +0 -174
scienceqa.log
DELETED
@@ -1,174 +0,0 @@
|
|
1 |
-
+ CHECKPOINT=work_dirs/InternVL2-2B
|
2 |
-
+ DATASET=scienceqa
|
3 |
-
++ pwd
|
4 |
-
+ CHECKPOINT=/mnt/petrelfs/wangweiyun/workspace_zyc/VLM-Dev/work_dirs/InternVL2-2B
|
5 |
-
++ pwd
|
6 |
-
+ export PYTHONPATH=/mnt/petrelfs/wangweiyun/workspace_zyc/VLM-Dev:/mnt/petrelfs/wangweiyun/workspace_wwy/pkgs/petrel-oss-sdk-2.3.14:/mnt/petrelfs/share_data/wangweiyun/share_pkgs/petrel-oss-sdk-2.3.12:
|
7 |
-
+ PYTHONPATH=/mnt/petrelfs/wangweiyun/workspace_zyc/VLM-Dev:/mnt/petrelfs/wangweiyun/workspace_wwy/pkgs/petrel-oss-sdk-2.3.14:/mnt/petrelfs/share_data/wangweiyun/share_pkgs/petrel-oss-sdk-2.3.12:
|
8 |
-
+ echo 'CHECKPOINT: /mnt/petrelfs/wangweiyun/workspace_zyc/VLM-Dev/work_dirs/InternVL2-2B'
|
9 |
-
CHECKPOINT: /mnt/petrelfs/wangweiyun/workspace_zyc/VLM-Dev/work_dirs/InternVL2-2B
|
10 |
-
+ MASTER_PORT=63669
|
11 |
-
+ PORT=63665
|
12 |
-
+ GPUS=8
|
13 |
-
+ GPUS_PER_NODE=8
|
14 |
-
+ NODES=1
|
15 |
-
+ export MASTER_PORT=63669
|
16 |
-
+ MASTER_PORT=63669
|
17 |
-
+ export PORT=63665
|
18 |
-
+ PORT=63665
|
19 |
-
+ ARGS=("$@")
|
20 |
-
+ [[ 5 -gt 0 ]]
|
21 |
-
+ case "$1" in
|
22 |
-
+ shift
|
23 |
-
+ [[ 4 -gt 0 ]]
|
24 |
-
+ case "$1" in
|
25 |
-
+ shift
|
26 |
-
+ [[ 3 -gt 0 ]]
|
27 |
-
+ case "$1" in
|
28 |
-
+ shift
|
29 |
-
+ [[ 2 -gt 0 ]]
|
30 |
-
+ case "$1" in
|
31 |
-
+ shift
|
32 |
-
+ [[ 1 -gt 0 ]]
|
33 |
-
+ case "$1" in
|
34 |
-
+ shift
|
35 |
-
+ [[ 0 -gt 0 ]]
|
36 |
-
GPUS: 8
|
37 |
-
+ echo 'GPUS: 8'
|
38 |
-
+ [[ /mnt/petrelfs/wangweiyun/workspace_zyc/VLM-Dev/work_dirs/InternVL2-2B == */ ]]
|
39 |
-
+ '[' scienceqa == mme ']'
|
40 |
-
+ '[' scienceqa == caption ']'
|
41 |
-
+ '[' scienceqa == caption-coco ']'
|
42 |
-
+ '[' scienceqa == caption-flickr30k ']'
|
43 |
-
+ '[' scienceqa == caption-nocaps ']'
|
44 |
-
+ '[' scienceqa == vqa ']'
|
45 |
-
+ '[' scienceqa == vqa-okvqa-val ']'
|
46 |
-
+ '[' scienceqa == vqa-textvqa-val ']'
|
47 |
-
+ '[' scienceqa == vqa-textvqa-val-ocr ']'
|
48 |
-
+ '[' scienceqa == vqa-vizwiz-val ']'
|
49 |
-
+ '[' scienceqa == vqa-vizwiz-test ']'
|
50 |
-
+ '[' scienceqa == vqa-vqav2-testdev ']'
|
51 |
-
+ '[' scienceqa == vqa-ai2d-test ']'
|
52 |
-
+ '[' scienceqa == vqa-vqav2-val ']'
|
53 |
-
+ '[' scienceqa == vqa-gqa-testdev ']'
|
54 |
-
+ '[' scienceqa == vqa-docvqa-val ']'
|
55 |
-
+ '[' scienceqa == vqa-docvqa-test ']'
|
56 |
-
+ '[' scienceqa == vqa-chartqa-test ']'
|
57 |
-
+ '[' scienceqa == vqa-infovqa-val ']'
|
58 |
-
+ '[' scienceqa == vqa-infovqa-test ']'
|
59 |
-
+ '[' scienceqa == vqa-chartqa-test-human ']'
|
60 |
-
+ '[' scienceqa == vqa-chartqa-test-augmented ']'
|
61 |
-
+ '[' scienceqa == vqa-ocrvqa-val ']'
|
62 |
-
+ '[' scienceqa == vqa-ocrvqa-test ']'
|
63 |
-
+ '[' scienceqa == refcoco ']'
|
64 |
-
+ '[' scienceqa == refcoco-val ']'
|
65 |
-
+ '[' scienceqa == llava-bench ']'
|
66 |
-
+ '[' scienceqa == pope ']'
|
67 |
-
+ '[' scienceqa == tiny_lvlm ']'
|
68 |
-
+ '[' scienceqa == mmvet ']'
|
69 |
-
+ '[' scienceqa == cmmmu ']'
|
70 |
-
+ '[' scienceqa == mmbench-dev-en ']'
|
71 |
-
+ '[' scienceqa == mmbench-dev-cn ']'
|
72 |
-
+ '[' scienceqa == mmbench-test-en ']'
|
73 |
-
+ '[' scienceqa == mmbench-test-cn ']'
|
74 |
-
+ '[' scienceqa == ccbench-dev ']'
|
75 |
-
+ '[' scienceqa == scienceqa ']'
|
76 |
-
+ torchrun --nnodes=1 --node_rank=0 --master_addr=127.0.0.1 --nproc_per_node=8 --master_port=63669 eval/scienceqa/evaluate_scienceqa.py --checkpoint /mnt/petrelfs/wangweiyun/workspace_zyc/VLM-Dev/work_dirs/InternVL2-2B --datasets sqa_test --dynamic --max-num 6
|
77 |
-
[2024-08-07 21:55:38,022] torch.distributed.run: [WARNING]
|
78 |
-
[2024-08-07 21:55:38,022] torch.distributed.run: [WARNING] *****************************************
|
79 |
-
[2024-08-07 21:55:38,022] torch.distributed.run: [WARNING] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed.
|
80 |
-
[2024-08-07 21:55:38,022] torch.distributed.run: [WARNING] *****************************************
|
81 |
-
datasets: ['sqa_test']
|
82 |
-
datasets: ['sqa_test']
|
83 |
-
datasets: ['sqa_test']
|
84 |
-
datasets: ['sqa_test']
|
85 |
-
datasets: ['sqa_test']
|
86 |
-
datasets: ['sqa_test']
|
87 |
-
datasets: ['sqa_test']
|
88 |
-
datasets: ['sqa_test']
|
89 |
-
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
90 |
-
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
91 |
-
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
92 |
-
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
93 |
-
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
94 |
-
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
95 |
-
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
96 |
-
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
97 |
-
[test] total_params: 2.205754368B, use num_beams: 1[test] total_params: 2.205754368B, use num_beams: 1
|
98 |
-
|
99 |
-
[test] image_size: 448[test] image_size: 448
|
100 |
-
|
101 |
-
[test] template: internlm2-chat[test] template: internlm2-chat
|
102 |
-
|
103 |
-
[test] dynamic_image_size: True[test] dynamic_image_size: True
|
104 |
-
|
105 |
-
[test] use_thumbnail: True[test] use_thumbnail: True
|
106 |
-
|
107 |
-
[test] total_params: 2.205754368B, use num_beams: 1
|
108 |
-
[test] image_size: 448
|
109 |
-
[test] template: internlm2-chat
|
110 |
-
[test] dynamic_image_size: True
|
111 |
-
[test] use_thumbnail: True
|
112 |
-
[test] total_params: 2.205754368B, use num_beams: 1[test] total_params: 2.205754368B, use num_beams: 1
|
113 |
-
|
114 |
-
[test] image_size: 448
|
115 |
-
[test] image_size: 448
|
116 |
-
[test] template: internlm2-chat[test] template: internlm2-chat
|
117 |
-
|
118 |
-
[test] dynamic_image_size: True[test] dynamic_image_size: True
|
119 |
-
|
120 |
-
[test] use_thumbnail: True[test] use_thumbnail: True
|
121 |
-
|
122 |
-
[test] total_params: 2.205754368B, use num_beams: 1
|
123 |
-
[test] image_size: 448
|
124 |
-
[test] template: internlm2-chat
|
125 |
-
[test] dynamic_image_size: True
|
126 |
-
[test] use_thumbnail: True
|
127 |
-
[test] total_params: 2.205754368B, use num_beams: 1
|
128 |
-
[test] image_size: 448
|
129 |
-
[test] template: internlm2-chat
|
130 |
-
[test] dynamic_image_size: True
|
131 |
-
[test] use_thumbnail: True
|
132 |
-
[test] total_params: 2.205754368B, use num_beams: 1
|
133 |
-
[test] image_size: 448
|
134 |
-
[test] template: internlm2-chat
|
135 |
-
[test] dynamic_image_size: True
|
136 |
-
[test] use_thumbnail: True
|
137 |
-
|
138 |
-
warnings.warn(
|
139 |
-
/mnt/petrelfs/wangweiyun/miniconda3/envs/internvl/lib/python3.10/site-packages/transformers/generation/configuration_utils.py:392: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
140 |
-
warnings.warn(
|
141 |
-
/mnt/petrelfs/wangweiyun/miniconda3/envs/internvl/lib/python3.10/site-packages/transformers/generation/configuration_utils.py:392: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
142 |
-
warnings.warn(
|
143 |
-
/mnt/petrelfs/wangweiyun/miniconda3/envs/internvl/lib/python3.10/site-packages/transformers/generation/configuration_utils.py:392: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
144 |
-
warnings.warn(
|
145 |
-
/mnt/petrelfs/wangweiyun/miniconda3/envs/internvl/lib/python3.10/site-packages/transformers/generation/configuration_utils.py:392: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
146 |
-
warnings.warn(
|
147 |
-
/mnt/petrelfs/wangweiyun/miniconda3/envs/internvl/lib/python3.10/site-packages/transformers/generation/configuration_utils.py:392: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
148 |
-
warnings.warn(
|
149 |
-
/mnt/petrelfs/wangweiyun/miniconda3/envs/internvl/lib/python3.10/site-packages/transformers/generation/configuration_utils.py:392: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
150 |
-
warnings.warn(
|
151 |
-
/mnt/petrelfs/wangweiyun/miniconda3/envs/internvl/lib/python3.10/site-packages/transformers/generation/configuration_utils.py:392: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
152 |
-
warnings.warn(
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
Evaluating sqa_test ...
|
162 |
-
Results saved to results/sqa_test_240807215618.jsonl
|
163 |
-
Acc@1: 0.9400099157164105
|
164 |
-
+ '[' scienceqa == mmmu-dev ']'
|
165 |
-
+ '[' scienceqa == mmmu-val ']'
|
166 |
-
+ '[' scienceqa == mmmu-test ']'
|
167 |
-
+ '[' scienceqa == mmmu-dev-cot ']'
|
168 |
-
+ '[' scienceqa == mmmu-val-cot ']'
|
169 |
-
+ '[' scienceqa == mmmu-test-cot ']'
|
170 |
-
+ '[' scienceqa == mmvp ']'
|
171 |
-
+ '[' scienceqa == mathvista-testmini ']'
|
172 |
-
+ '[' scienceqa == mathvista-test ']'
|
173 |
-
+ '[' scienceqa == seed ']'
|
174 |
-
+ '[' scienceqa == mvbench ']'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|