Spaces:
Running
on
A10G
Running
on
A10G
bug fix
Browse files- finetune.py +39 -38
finetune.py
CHANGED
@@ -3,6 +3,7 @@ import sys
|
|
3 |
from pathlib import Path
|
4 |
import shutil
|
5 |
import os
|
|
|
6 |
|
7 |
from espnet2.tasks.s2t import S2TTask
|
8 |
from espnet2.text.sentencepiece_tokenizer import SentencepiecesTokenizer
|
@@ -18,24 +19,9 @@ import gradio as gr
|
|
18 |
import librosa
|
19 |
|
20 |
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
self.log = open(filename, "w")
|
25 |
-
|
26 |
-
def write(self, message):
|
27 |
-
self.terminal.write(message)
|
28 |
-
self.log.write(message)
|
29 |
-
|
30 |
-
def flush(self):
|
31 |
-
self.terminal.flush()
|
32 |
-
self.log.flush()
|
33 |
-
|
34 |
-
def isatty(self):
|
35 |
-
return False
|
36 |
-
|
37 |
-
|
38 |
-
sys.stdout = Logger("output.log")
|
39 |
|
40 |
|
41 |
def count_parameters(model):
|
@@ -128,11 +114,11 @@ class CustomFinetuneModel(ESPnetS2TModel):
|
|
128 |
def finetune_model(lang, task, tempdir_path, log_every, max_epoch, scheduler, warmup_steps, optimizer, learning_rate, weight_decay):
|
129 |
"""Main function for finetuning the model."""
|
130 |
|
131 |
-
|
132 |
gr.Info("Start generating baseline...")
|
133 |
-
baseline_model(lang, task, tempdir_path)
|
134 |
|
135 |
-
|
136 |
gr.Info("Start Fine-tuning process...")
|
137 |
if len(tempdir_path) == 0:
|
138 |
raise gr.Error("Please upload a zip file first.")
|
@@ -153,11 +139,11 @@ def finetune_model(lang, task, tempdir_path, log_every, max_epoch, scheduler, wa
|
|
153 |
|
154 |
# load dataset and define data_info
|
155 |
train_dataset, test_dataset, test_list = get_dataset(tempdir_path, data_info)
|
156 |
-
|
157 |
gr.Info("Loaded dataset.")
|
158 |
|
159 |
# load and update configuration
|
160 |
-
|
161 |
pretrain_config = ez.config.from_yaml(
|
162 |
"s2t",
|
163 |
"assets/owsm_ebf_v3.1_base/config.yaml",
|
@@ -197,24 +183,38 @@ def finetune_model(lang, task, tempdir_path, log_every, max_epoch, scheduler, wa
|
|
197 |
ngpu=1
|
198 |
)
|
199 |
gr.Info("start collect stats")
|
200 |
-
|
201 |
trainer.collect_stats()
|
|
|
202 |
gr.Info("Finished collect stats, starting training.")
|
203 |
-
|
204 |
trainer.train()
|
205 |
gr.Info("Finished Fine-tuning! Archiving experiment files...")
|
206 |
-
print("Finished fine-tuning.")
|
207 |
-
print("Start archiving experiment files...")
|
208 |
-
print("Create zip file for the following files into `finetune.zip`:")
|
209 |
-
for f in glob.glob(f"{tempdir_path}/exp/finetune/*"):
|
210 |
-
print(f.replace(tempdir_path, ""))
|
211 |
|
212 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
213 |
gr.Info("Finished generating result file in zip!")
|
214 |
-
|
215 |
|
216 |
-
print("Start generating test result...")
|
217 |
gr.Info("Start generating output for test set!")
|
|
|
218 |
|
219 |
del trainer
|
220 |
model = Speech2Text(
|
@@ -224,7 +224,7 @@ def finetune_model(lang, task, tempdir_path, log_every, max_epoch, scheduler, wa
|
|
224 |
token_type="bpe",
|
225 |
bpemodel="assets/owsm_ebf_v3.1_base/bpe.model",
|
226 |
beam_size=5,
|
227 |
-
ctc_weight=0.
|
228 |
lang_sym=f"<{lang}>",
|
229 |
task_sym=f"<{task}>",
|
230 |
)
|
@@ -240,12 +240,13 @@ def finetune_model(lang, task, tempdir_path, log_every, max_epoch, scheduler, wa
|
|
240 |
f_hyp.write(out + '\n')
|
241 |
hyp += out + '\n'
|
242 |
|
243 |
-
return [f"{tempdir_path}/finetune.zip", f"{tempdir_path}/ref.txt", f"{tempdir_path}/base.txt", f"{tempdir_path}/hyp.txt"], hyp
|
244 |
|
245 |
|
246 |
def baseline_model(lang, task, tempdir_path):
|
247 |
-
|
248 |
if len(tempdir_path) == 0:
|
|
|
249 |
raise gr.Error("Please upload a zip file first.")
|
250 |
|
251 |
# define tokenizer
|
@@ -264,11 +265,11 @@ def baseline_model(lang, task, tempdir_path):
|
|
264 |
|
265 |
# load dataset and define data_info
|
266 |
train_dataset, test_dataset, test_list = get_dataset(tempdir_path, data_info)
|
267 |
-
|
268 |
gr.Info("Loaded dataset.")
|
269 |
|
270 |
-
print("Loading pretrained model...")
|
271 |
gr.Info("Loading pretrained model...")
|
|
|
272 |
|
273 |
model = Speech2Text(
|
274 |
"assets/owsm_ebf_v3.1_base/config.yaml",
|
|
|
3 |
from pathlib import Path
|
4 |
import shutil
|
5 |
import os
|
6 |
+
import zipfile
|
7 |
|
8 |
from espnet2.tasks.s2t import S2TTask
|
9 |
from espnet2.text.sentencepiece_tokenizer import SentencepiecesTokenizer
|
|
|
19 |
import librosa
|
20 |
|
21 |
|
22 |
+
def log(temp_dir, text):
|
23 |
+
with open(f"{temp_dir}/output.log", "a") as f:
|
24 |
+
f.write(text + "\n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
|
26 |
|
27 |
def count_parameters(model):
|
|
|
114 |
def finetune_model(lang, task, tempdir_path, log_every, max_epoch, scheduler, warmup_steps, optimizer, learning_rate, weight_decay):
|
115 |
"""Main function for finetuning the model."""
|
116 |
|
117 |
+
log(tempdir_path, "Start generating baseline...")
|
118 |
gr.Info("Start generating baseline...")
|
119 |
+
ref, base = baseline_model(lang, task, tempdir_path)
|
120 |
|
121 |
+
log(tempdir_path, "Start generating hypothesis...")
|
122 |
gr.Info("Start Fine-tuning process...")
|
123 |
if len(tempdir_path) == 0:
|
124 |
raise gr.Error("Please upload a zip file first.")
|
|
|
139 |
|
140 |
# load dataset and define data_info
|
141 |
train_dataset, test_dataset, test_list = get_dataset(tempdir_path, data_info)
|
142 |
+
log(tempdir_path, "Loading dataset...")
|
143 |
gr.Info("Loaded dataset.")
|
144 |
|
145 |
# load and update configuration
|
146 |
+
log(tempdir_path, "Setting up the training configuration...")
|
147 |
pretrain_config = ez.config.from_yaml(
|
148 |
"s2t",
|
149 |
"assets/owsm_ebf_v3.1_base/config.yaml",
|
|
|
183 |
ngpu=1
|
184 |
)
|
185 |
gr.Info("start collect stats")
|
186 |
+
log(tempdir_path, "Start collect stats process...")
|
187 |
trainer.collect_stats()
|
188 |
+
|
189 |
gr.Info("Finished collect stats, starting training.")
|
190 |
+
log(tempdir_path, "Finished collect stats, starting training...")
|
191 |
trainer.train()
|
192 |
gr.Info("Finished Fine-tuning! Archiving experiment files...")
|
|
|
|
|
|
|
|
|
|
|
193 |
|
194 |
+
log(tempdir_path, "Finished fine-tuning.")
|
195 |
+
log(tempdir_path, "Start archiving experiment files...")
|
196 |
+
log(tempdir_path, "Create zip file for the following files into `finetune.zip`:")
|
197 |
+
log(tempdir_path, "exp/s2t_stats_raw_bpe50000")
|
198 |
+
log(tempdir_path, "exp/finetune/tensorboard")
|
199 |
+
log(tempdir_path, "exp/finetune/images")
|
200 |
+
log(tempdir_path, "exp/finetune/train.log")
|
201 |
+
log(tempdir_path, "exp/finetune/config.yaml")
|
202 |
+
log(tempdir_path, "exp/finetune/valid.acc.ave.pth")
|
203 |
+
|
204 |
+
finetune_zip = zipfile.ZipFile(f"{tempdir_path}/finetune.zip", "w", zipfile.ZIP_DEFLATED)
|
205 |
+
finetune_zip.write(f"{tempdir_path}/exp/s2t_stats_raw_bpe50000")
|
206 |
+
finetune_zip.write(f"{tempdir_path}/exp/finetune/tensorboard")
|
207 |
+
finetune_zip.write(f"{tempdir_path}/exp/finetune/images")
|
208 |
+
finetune_zip.write(f"{tempdir_path}/exp/finetune/train.log")
|
209 |
+
finetune_zip.write(f"{tempdir_path}/exp/finetune/config.yaml")
|
210 |
+
finetune_zip.write(f"{tempdir_path}/exp/finetune/valid.acc.ave.pth")
|
211 |
+
finetune_zip.close()
|
212 |
+
|
213 |
gr.Info("Finished generating result file in zip!")
|
214 |
+
log(tempdir_path, "Finished generating result file in zip!")
|
215 |
|
|
|
216 |
gr.Info("Start generating output for test set!")
|
217 |
+
log(tempdir_path, "Start generating output for test set!")
|
218 |
|
219 |
del trainer
|
220 |
model = Speech2Text(
|
|
|
224 |
token_type="bpe",
|
225 |
bpemodel="assets/owsm_ebf_v3.1_base/bpe.model",
|
226 |
beam_size=5,
|
227 |
+
ctc_weight=0.0,
|
228 |
lang_sym=f"<{lang}>",
|
229 |
task_sym=f"<{task}>",
|
230 |
)
|
|
|
240 |
f_hyp.write(out + '\n')
|
241 |
hyp += out + '\n'
|
242 |
|
243 |
+
return [f"{tempdir_path}/finetune.zip", f"{tempdir_path}/ref.txt", f"{tempdir_path}/base.txt", f"{tempdir_path}/hyp.txt"], ref, base, hyp
|
244 |
|
245 |
|
246 |
def baseline_model(lang, task, tempdir_path):
|
247 |
+
log(tempdir_path, "Start loading dataset...")
|
248 |
if len(tempdir_path) == 0:
|
249 |
+
log(tempdir_path, "Please upload a zip file first.")
|
250 |
raise gr.Error("Please upload a zip file first.")
|
251 |
|
252 |
# define tokenizer
|
|
|
265 |
|
266 |
# load dataset and define data_info
|
267 |
train_dataset, test_dataset, test_list = get_dataset(tempdir_path, data_info)
|
268 |
+
log(tempdir_path, "Loaded dataset.")
|
269 |
gr.Info("Loaded dataset.")
|
270 |
|
|
|
271 |
gr.Info("Loading pretrained model...")
|
272 |
+
log(tempdir_path, "Loading pretrained model...")
|
273 |
|
274 |
model = Speech2Text(
|
275 |
"assets/owsm_ebf_v3.1_base/config.yaml",
|