Upload infer_.py with huggingface_hub
Browse files
infer_.py
CHANGED
@@ -51,7 +51,7 @@ for batch_idx in tqdm(range(begin, end, batch_size)):
|
|
51 |
input_text_list = []
|
52 |
data_list = []
|
53 |
save_data = []
|
54 |
-
|
55 |
# while True:
|
56 |
for idx, i in enumerate(batch):
|
57 |
save_ = {
|
@@ -95,6 +95,7 @@ for batch_idx in tqdm(range(begin, end, batch_size)):
|
|
95 |
save_['content'][1]['image'] = image_path
|
96 |
save_['content'][2]['text'] = question
|
97 |
save_['answer'] = answer
|
|
|
98 |
data_list.append(messages)
|
99 |
save_data.append(save_)
|
100 |
|
@@ -119,8 +120,9 @@ for batch_idx in tqdm(range(begin, end, batch_size)):
|
|
119 |
output_text = processor.batch_decode(
|
120 |
generated_ids_trimmed, skip_special_tokens=True, clean_up_tokenization_spaces=False
|
121 |
)
|
122 |
-
for x in output_text:
|
123 |
-
print(x)
|
|
|
124 |
|
125 |
save_["answer"] = output_text
|
126 |
if output_text == answer:
|
|
|
51 |
input_text_list = []
|
52 |
data_list = []
|
53 |
save_data = []
|
54 |
+
sd_ans = []
|
55 |
# while True:
|
56 |
for idx, i in enumerate(batch):
|
57 |
save_ = {
|
|
|
95 |
save_['content'][1]['image'] = image_path
|
96 |
save_['content'][2]['text'] = question
|
97 |
save_['answer'] = answer
|
98 |
+
sd_ans.append(answer)
|
99 |
data_list.append(messages)
|
100 |
save_data.append(save_)
|
101 |
|
|
|
120 |
output_text = processor.batch_decode(
|
121 |
generated_ids_trimmed, skip_special_tokens=True, clean_up_tokenization_spaces=False
|
122 |
)
|
123 |
+
for idx,x in enumerate(output_text):
|
124 |
+
print('model_output: ',x)
|
125 |
+
print('standard_output: ',sd_ans[idx])
|
126 |
|
127 |
save_["answer"] = output_text
|
128 |
if output_text == answer:
|