Upload 6 files
Browse files- checkpoints/.DS_Store +0 -0
- checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/config.yaml +44 -0
- checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/last_best_checkpoint.pt +3 -0
- checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/last_checkpoint.pt +3 -0
- checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/log_2024-10-02(16:13:54).txt +803 -0
- checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/tensorboard/events.out.tfevents.1727856848.dsw-106518-965b74ddc-cdclg.3549648.0 +3 -0
checkpoints/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/config.yaml
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
## Config file
|
| 2 |
+
|
| 3 |
+
# Log
|
| 4 |
+
seed: 777
|
| 5 |
+
use_cuda: 1 # 1 for True, 0 for False
|
| 6 |
+
|
| 7 |
+
# dataset
|
| 8 |
+
speaker_no: 2
|
| 9 |
+
mix_lst_path: ./data/wsj0_2mix/
|
| 10 |
+
audio_direc: /mnt/nas_sg/wulanchabu/zexu.pan/datasets/
|
| 11 |
+
reference_direc: /mnt/nas_sg/wulanchabu/zexu.pan/datasets/
|
| 12 |
+
audio_sr: 8000
|
| 13 |
+
ref_sr: 8000
|
| 14 |
+
|
| 15 |
+
# dataloader
|
| 16 |
+
num_workers: 4
|
| 17 |
+
batch_size: 4 # 2-GPU training with a total effective batch size of 8
|
| 18 |
+
accu_grad: 0
|
| 19 |
+
effec_batch_size: 4 # per GPU, only used if accu_grad is set to 1, must be multiple times of batch size
|
| 20 |
+
max_length: 4 # truncate the utterances in dataloader, in seconds
|
| 21 |
+
|
| 22 |
+
# network settings
|
| 23 |
+
init_from: None # 'None' or a log name 'log_2024-07-22(18:12:13)'
|
| 24 |
+
causal: 0 # 1 for True, 0 for False
|
| 25 |
+
network_reference:
|
| 26 |
+
cue: speech # lip or speech or gesture or EEG
|
| 27 |
+
network_audio:
|
| 28 |
+
backbone: SpEx-plus
|
| 29 |
+
L: 20
|
| 30 |
+
N: 256
|
| 31 |
+
X: 8
|
| 32 |
+
R: 4
|
| 33 |
+
B: 256
|
| 34 |
+
H: 512
|
| 35 |
+
P: 3
|
| 36 |
+
norm: gLN
|
| 37 |
+
non_linear: relu
|
| 38 |
+
speakers: 101 # 101 speakers in wsj0-2mix training set
|
| 39 |
+
|
| 40 |
+
# optimizer
|
| 41 |
+
loss_type: SpEx-plus # spex loss in paper
|
| 42 |
+
init_learning_rate: 0.001
|
| 43 |
+
max_epoch: 200
|
| 44 |
+
clip_grad_norm: 5
|
checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/last_best_checkpoint.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5ffcf87a45f46ece3fa43db5b4d7f9779a73392933fed0b563f0ead9bd9b492f
|
| 3 |
+
size 134255410
|
checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/last_checkpoint.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2399b66b4eb8eab7c4c31482a20c24cfb92f0c49682400b97c5d5eb8d6b8b69f
|
| 3 |
+
size 134246515
|
checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/log_2024-10-02(16:13:54).txt
ADDED
|
@@ -0,0 +1,803 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
## Config file
|
| 2 |
+
|
| 3 |
+
# Log
|
| 4 |
+
seed: 777
|
| 5 |
+
use_cuda: 1 # 1 for True, 0 for False
|
| 6 |
+
|
| 7 |
+
# dataset
|
| 8 |
+
speaker_no: 2
|
| 9 |
+
mix_lst_path: ./data/wsj0_2mix/
|
| 10 |
+
audio_direc: /mnt/nas_sg/wulanchabu/zexu.pan/datasets/
|
| 11 |
+
reference_direc: /mnt/nas_sg/wulanchabu/zexu.pan/datasets/
|
| 12 |
+
audio_sr: 8000
|
| 13 |
+
ref_sr: 8000
|
| 14 |
+
|
| 15 |
+
# dataloader
|
| 16 |
+
num_workers: 4
|
| 17 |
+
batch_size: 4 # 2-GPU training with a total effective batch size of 8
|
| 18 |
+
accu_grad: 0
|
| 19 |
+
effec_batch_size: 4 # per GPU, only used if accu_grad is set to 1, must be multiple times of batch size
|
| 20 |
+
max_length: 4 # truncate the utterances in dataloader, in seconds
|
| 21 |
+
|
| 22 |
+
# network settings
|
| 23 |
+
init_from: None # 'None' or a log name 'log_2024-07-22(18:12:13)'
|
| 24 |
+
causal: 0 # 1 for True, 0 for False
|
| 25 |
+
network_reference:
|
| 26 |
+
cue: speech # lip or speech or gesture or EEG
|
| 27 |
+
network_audio:
|
| 28 |
+
backbone: SpEx-plus
|
| 29 |
+
L: 20
|
| 30 |
+
N: 256
|
| 31 |
+
X: 8
|
| 32 |
+
R: 4
|
| 33 |
+
B: 256
|
| 34 |
+
H: 512
|
| 35 |
+
P: 3
|
| 36 |
+
norm: gLN
|
| 37 |
+
non_linear: relu
|
| 38 |
+
speakers: 101 # 101 speakers in wsj0-2mix training set
|
| 39 |
+
|
| 40 |
+
# optimizer
|
| 41 |
+
loss_type: SpEx-plus # spex loss in paper
|
| 42 |
+
init_learning_rate: 0.001
|
| 43 |
+
max_epoch: 200
|
| 44 |
+
clip_grad_norm: 5
|
| 45 |
+
W1002 16:13:58.402247 140563653224256 torch/distributed/run.py:779]
|
| 46 |
+
W1002 16:13:58.402247 140563653224256 torch/distributed/run.py:779] *****************************************
|
| 47 |
+
W1002 16:13:58.402247 140563653224256 torch/distributed/run.py:779] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed.
|
| 48 |
+
W1002 16:13:58.402247 140563653224256 torch/distributed/run.py:779] *****************************************
|
| 49 |
+
[W1002 16:14:02.131922970 Utils.hpp:135] Warning: Environment variable NCCL_ASYNC_ERROR_HANDLING is deprecated; use TORCH_NCCL_ASYNC_ERROR_HANDLING instead (function operator())
|
| 50 |
+
[W1002 16:14:02.132797365 Utils.hpp:135] Warning: Environment variable NCCL_ASYNC_ERROR_HANDLING is deprecated; use TORCH_NCCL_ASYNC_ERROR_HANDLING instead (function operator())
|
| 51 |
+
started on checkpoints/log_2024-10-02(16:13:54)
|
| 52 |
+
|
| 53 |
+
namespace(accu_grad=0, audio_direc='/mnt/nas_sg/wulanchabu/zexu.pan/datasets/', audio_sr=8000, batch_size=4, causal=0, checkpoint_dir='checkpoints/log_2024-10-02(16:13:54)', clip_grad_norm=5.0, config=[<yamlargparse.Path object at 0x7feab35cbd00>], device=device(type='cuda'), distributed=True, effec_batch_size=4, init_from='None', init_learning_rate=0.001, local_rank=0, loss_type='SpEx-plus', lr_warmup=0, max_epoch=200, max_length=4, mix_lst_path='./data/wsj0_2mix/', network_audio=namespace(B=256, H=512, L=20, N=256, P=3, R=4, X=8, backbone='SpEx-plus', non_linear='relu', norm='gLN', speakers=101), network_reference=namespace(cue='speech'), num_workers=4, ref_sr=8000, reference_direc='/mnt/nas_sg/wulanchabu/zexu.pan/datasets/', seed=777, speaker_no=2, train_from_last_checkpoint=0, use_cuda=1, world_size=2)
|
| 54 |
+
network_wrapper(
|
| 55 |
+
(sep_network): SpEx_plus(
|
| 56 |
+
(encoder_1d_short): Conv1D(1, 256, kernel_size=(20,), stride=(10,))
|
| 57 |
+
(encoder_1d_middle): Conv1D(1, 256, kernel_size=(80,), stride=(10,))
|
| 58 |
+
(encoder_1d_long): Conv1D(1, 256, kernel_size=(160,), stride=(10,))
|
| 59 |
+
(ln): ChannelWiseLayerNorm((768,), eps=1e-05, elementwise_affine=True)
|
| 60 |
+
(proj): Conv1D(768, 256, kernel_size=(1,), stride=(1,))
|
| 61 |
+
(conv_block_1): Conv1DBlock_v2(
|
| 62 |
+
(conv1x1): Conv1D(512, 512, kernel_size=(1,), stride=(1,))
|
| 63 |
+
(prelu1): PReLU(num_parameters=1)
|
| 64 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 65 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(1,), groups=512)
|
| 66 |
+
(prelu2): PReLU(num_parameters=1)
|
| 67 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 68 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 69 |
+
)
|
| 70 |
+
(conv_block_1_other): Sequential(
|
| 71 |
+
(0): Conv1DBlock(
|
| 72 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 73 |
+
(prelu1): PReLU(num_parameters=1)
|
| 74 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 75 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(2,), dilation=(2,), groups=512)
|
| 76 |
+
(prelu2): PReLU(num_parameters=1)
|
| 77 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 78 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 79 |
+
)
|
| 80 |
+
(1): Conv1DBlock(
|
| 81 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 82 |
+
(prelu1): PReLU(num_parameters=1)
|
| 83 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 84 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(4,), dilation=(4,), groups=512)
|
| 85 |
+
(prelu2): PReLU(num_parameters=1)
|
| 86 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 87 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 88 |
+
)
|
| 89 |
+
(2): Conv1DBlock(
|
| 90 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 91 |
+
(prelu1): PReLU(num_parameters=1)
|
| 92 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 93 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(8,), dilation=(8,), groups=512)
|
| 94 |
+
(prelu2): PReLU(num_parameters=1)
|
| 95 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 96 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 97 |
+
)
|
| 98 |
+
(3): Conv1DBlock(
|
| 99 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 100 |
+
(prelu1): PReLU(num_parameters=1)
|
| 101 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 102 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(16,), dilation=(16,), groups=512)
|
| 103 |
+
(prelu2): PReLU(num_parameters=1)
|
| 104 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 105 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 106 |
+
)
|
| 107 |
+
(4): Conv1DBlock(
|
| 108 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 109 |
+
(prelu1): PReLU(num_parameters=1)
|
| 110 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 111 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(32,), dilation=(32,), groups=512)
|
| 112 |
+
(prelu2): PReLU(num_parameters=1)
|
| 113 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 114 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 115 |
+
)
|
| 116 |
+
(5): Conv1DBlock(
|
| 117 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 118 |
+
(prelu1): PReLU(num_parameters=1)
|
| 119 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 120 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(64,), dilation=(64,), groups=512)
|
| 121 |
+
(prelu2): PReLU(num_parameters=1)
|
| 122 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 123 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 124 |
+
)
|
| 125 |
+
(6): Conv1DBlock(
|
| 126 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 127 |
+
(prelu1): PReLU(num_parameters=1)
|
| 128 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 129 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(128,), dilation=(128,), groups=512)
|
| 130 |
+
(prelu2): PReLU(num_parameters=1)
|
| 131 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 132 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 133 |
+
)
|
| 134 |
+
)
|
| 135 |
+
(conv_block_2): Conv1DBlock_v2(
|
| 136 |
+
(conv1x1): Conv1D(512, 512, kernel_size=(1,), stride=(1,))
|
| 137 |
+
(prelu1): PReLU(num_parameters=1)
|
| 138 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 139 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(1,), groups=512)
|
| 140 |
+
(prelu2): PReLU(num_parameters=1)
|
| 141 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 142 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 143 |
+
)
|
| 144 |
+
(conv_block_2_other): Sequential(
|
| 145 |
+
(0): Conv1DBlock(
|
| 146 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 147 |
+
(prelu1): PReLU(num_parameters=1)
|
| 148 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 149 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(2,), dilation=(2,), groups=512)
|
| 150 |
+
(prelu2): PReLU(num_parameters=1)
|
| 151 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 152 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
)
|
| 154 |
+
(1): Conv1DBlock(
|
| 155 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 156 |
+
(prelu1): PReLU(num_parameters=1)
|
| 157 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 158 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(4,), dilation=(4,), groups=512)
|
| 159 |
+
(prelu2): PReLU(num_parameters=1)
|
| 160 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 161 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 162 |
+
)
|
| 163 |
+
(2): Conv1DBlock(
|
| 164 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 165 |
+
(prelu1): PReLU(num_parameters=1)
|
| 166 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 167 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(8,), dilation=(8,), groups=512)
|
| 168 |
+
(prelu2): PReLU(num_parameters=1)
|
| 169 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 170 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 171 |
+
)
|
| 172 |
+
(3): Conv1DBlock(
|
| 173 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(prelu1): PReLU(num_parameters=1)
|
| 175 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 176 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(16,), dilation=(16,), groups=512)
|
| 177 |
+
(prelu2): PReLU(num_parameters=1)
|
| 178 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 179 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 180 |
+
)
|
| 181 |
+
(4): Conv1DBlock(
|
| 182 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 183 |
+
(prelu1): PReLU(num_parameters=1)
|
| 184 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 185 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(32,), dilation=(32,), groups=512)
|
| 186 |
+
(prelu2): PReLU(num_parameters=1)
|
| 187 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 188 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 189 |
+
)
|
| 190 |
+
(5): Conv1DBlock(
|
| 191 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 192 |
+
(prelu1): PReLU(num_parameters=1)
|
| 193 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 194 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(64,), dilation=(64,), groups=512)
|
| 195 |
+
(prelu2): PReLU(num_parameters=1)
|
| 196 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 197 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 198 |
+
)
|
| 199 |
+
(6): Conv1DBlock(
|
| 200 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 201 |
+
(prelu1): PReLU(num_parameters=1)
|
| 202 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 203 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(128,), dilation=(128,), groups=512)
|
| 204 |
+
(prelu2): PReLU(num_parameters=1)
|
| 205 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 206 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 207 |
+
)
|
| 208 |
+
)
|
| 209 |
+
(conv_block_3): Conv1DBlock_v2(
|
| 210 |
+
(conv1x1): Conv1D(512, 512, kernel_size=(1,), stride=(1,))
|
| 211 |
+
(prelu1): PReLU(num_parameters=1)
|
| 212 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 213 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(1,), groups=512)
|
| 214 |
+
(prelu2): PReLU(num_parameters=1)
|
| 215 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 216 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 217 |
+
)
|
| 218 |
+
(conv_block_3_other): Sequential(
|
| 219 |
+
(0): Conv1DBlock(
|
| 220 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 221 |
+
(prelu1): PReLU(num_parameters=1)
|
| 222 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 223 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(2,), dilation=(2,), groups=512)
|
| 224 |
+
(prelu2): PReLU(num_parameters=1)
|
| 225 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 226 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 227 |
+
)
|
| 228 |
+
(1): Conv1DBlock(
|
| 229 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 230 |
+
(prelu1): PReLU(num_parameters=1)
|
| 231 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 232 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(4,), dilation=(4,), groups=512)
|
| 233 |
+
(prelu2): PReLU(num_parameters=1)
|
| 234 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 235 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 236 |
+
)
|
| 237 |
+
(2): Conv1DBlock(
|
| 238 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 239 |
+
(prelu1): PReLU(num_parameters=1)
|
| 240 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 241 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(8,), dilation=(8,), groups=512)
|
| 242 |
+
(prelu2): PReLU(num_parameters=1)
|
| 243 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 244 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 245 |
+
)
|
| 246 |
+
(3): Conv1DBlock(
|
| 247 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 248 |
+
(prelu1): PReLU(num_parameters=1)
|
| 249 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 250 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(16,), dilation=(16,), groups=512)
|
| 251 |
+
(prelu2): PReLU(num_parameters=1)
|
| 252 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 253 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 254 |
+
)
|
| 255 |
+
(4): Conv1DBlock(
|
| 256 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 257 |
+
(prelu1): PReLU(num_parameters=1)
|
| 258 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 259 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(32,), dilation=(32,), groups=512)
|
| 260 |
+
(prelu2): PReLU(num_parameters=1)
|
| 261 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 262 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 263 |
+
)
|
| 264 |
+
(5): Conv1DBlock(
|
| 265 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 266 |
+
(prelu1): PReLU(num_parameters=1)
|
| 267 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 268 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(64,), dilation=(64,), groups=512)
|
| 269 |
+
(prelu2): PReLU(num_parameters=1)
|
| 270 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 271 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 272 |
+
)
|
| 273 |
+
(6): Conv1DBlock(
|
| 274 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 275 |
+
(prelu1): PReLU(num_parameters=1)
|
| 276 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 277 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(128,), dilation=(128,), groups=512)
|
| 278 |
+
(prelu2): PReLU(num_parameters=1)
|
| 279 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 280 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 281 |
+
)
|
| 282 |
+
)
|
| 283 |
+
(conv_block_4): Conv1DBlock_v2(
|
| 284 |
+
(conv1x1): Conv1D(512, 512, kernel_size=(1,), stride=(1,))
|
| 285 |
+
(prelu1): PReLU(num_parameters=1)
|
| 286 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 287 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(1,), groups=512)
|
| 288 |
+
(prelu2): PReLU(num_parameters=1)
|
| 289 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 290 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 291 |
+
)
|
| 292 |
+
(conv_block_4_other): Sequential(
|
| 293 |
+
(0): Conv1DBlock(
|
| 294 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 295 |
+
(prelu1): PReLU(num_parameters=1)
|
| 296 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 297 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(2,), dilation=(2,), groups=512)
|
| 298 |
+
(prelu2): PReLU(num_parameters=1)
|
| 299 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 300 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 301 |
+
)
|
| 302 |
+
(1): Conv1DBlock(
|
| 303 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 304 |
+
(prelu1): PReLU(num_parameters=1)
|
| 305 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 306 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(4,), dilation=(4,), groups=512)
|
| 307 |
+
(prelu2): PReLU(num_parameters=1)
|
| 308 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 309 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 310 |
+
)
|
| 311 |
+
(2): Conv1DBlock(
|
| 312 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 313 |
+
(prelu1): PReLU(num_parameters=1)
|
| 314 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 315 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(8,), dilation=(8,), groups=512)
|
| 316 |
+
(prelu2): PReLU(num_parameters=1)
|
| 317 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 318 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 319 |
+
)
|
| 320 |
+
(3): Conv1DBlock(
|
| 321 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 322 |
+
(prelu1): PReLU(num_parameters=1)
|
| 323 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 324 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(16,), dilation=(16,), groups=512)
|
| 325 |
+
(prelu2): PReLU(num_parameters=1)
|
| 326 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 327 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 328 |
+
)
|
| 329 |
+
(4): Conv1DBlock(
|
| 330 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 331 |
+
(prelu1): PReLU(num_parameters=1)
|
| 332 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 333 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(32,), dilation=(32,), groups=512)
|
| 334 |
+
(prelu2): PReLU(num_parameters=1)
|
| 335 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 336 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 337 |
+
)
|
| 338 |
+
(5): Conv1DBlock(
|
| 339 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 340 |
+
(prelu1): PReLU(num_parameters=1)
|
| 341 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 342 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(64,), dilation=(64,), groups=512)
|
| 343 |
+
(prelu2): PReLU(num_parameters=1)
|
| 344 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 345 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 346 |
+
)
|
| 347 |
+
(6): Conv1DBlock(
|
| 348 |
+
(conv1x1): Conv1D(256, 512, kernel_size=(1,), stride=(1,))
|
| 349 |
+
(prelu1): PReLU(num_parameters=1)
|
| 350 |
+
(lnorm1): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 351 |
+
(dconv): Conv1d(512, 512, kernel_size=(3,), stride=(1,), padding=(128,), dilation=(128,), groups=512)
|
| 352 |
+
(prelu2): PReLU(num_parameters=1)
|
| 353 |
+
(lnorm2): GlobalChannelLayerNorm(512, eps=1e-05, elementwise_affine=True)
|
| 354 |
+
(sconv): Conv1d(512, 256, kernel_size=(1,), stride=(1,))
|
| 355 |
+
)
|
| 356 |
+
)
|
| 357 |
+
(mask1): Conv1D(256, 256, kernel_size=(1,), stride=(1,))
|
| 358 |
+
(mask2): Conv1D(256, 256, kernel_size=(1,), stride=(1,))
|
| 359 |
+
(mask3): Conv1D(256, 256, kernel_size=(1,), stride=(1,))
|
| 360 |
+
(decoder_1d_1): ConvTrans1D(256, 1, kernel_size=(20,), stride=(10,))
|
| 361 |
+
(decoder_1d_2): ConvTrans1D(256, 1, kernel_size=(80,), stride=(10,))
|
| 362 |
+
(decoder_1d_3): ConvTrans1D(256, 1, kernel_size=(160,), stride=(10,))
|
| 363 |
+
(aux_enc3): Sequential(
|
| 364 |
+
(0): ChannelWiseLayerNorm((768,), eps=1e-05, elementwise_affine=True)
|
| 365 |
+
(1): Conv1D(768, 256, kernel_size=(1,), stride=(1,))
|
| 366 |
+
(2): ResBlock(
|
| 367 |
+
(conv1): Conv1d(256, 256, kernel_size=(1,), stride=(1,), bias=False)
|
| 368 |
+
(conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,), bias=False)
|
| 369 |
+
(batch_norm1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 370 |
+
(batch_norm2): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 371 |
+
(prelu1): PReLU(num_parameters=1)
|
| 372 |
+
(prelu2): PReLU(num_parameters=1)
|
| 373 |
+
(mp): MaxPool1d(kernel_size=3, stride=3, padding=0, dilation=1, ceil_mode=False)
|
| 374 |
+
)
|
| 375 |
+
(3): ResBlock(
|
| 376 |
+
(conv1): Conv1d(256, 512, kernel_size=(1,), stride=(1,), bias=False)
|
| 377 |
+
(conv2): Conv1d(512, 512, kernel_size=(1,), stride=(1,), bias=False)
|
| 378 |
+
(batch_norm1): SyncBatchNorm(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 379 |
+
(batch_norm2): SyncBatchNorm(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 380 |
+
(prelu1): PReLU(num_parameters=1)
|
| 381 |
+
(prelu2): PReLU(num_parameters=1)
|
| 382 |
+
(mp): MaxPool1d(kernel_size=3, stride=3, padding=0, dilation=1, ceil_mode=False)
|
| 383 |
+
(conv_downsample): Conv1d(256, 512, kernel_size=(1,), stride=(1,), bias=False)
|
| 384 |
+
)
|
| 385 |
+
(4): ResBlock(
|
| 386 |
+
(conv1): Conv1d(512, 512, kernel_size=(1,), stride=(1,), bias=False)
|
| 387 |
+
(conv2): Conv1d(512, 512, kernel_size=(1,), stride=(1,), bias=False)
|
| 388 |
+
(batch_norm1): SyncBatchNorm(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 389 |
+
(batch_norm2): SyncBatchNorm(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
|
| 390 |
+
(prelu1): PReLU(num_parameters=1)
|
| 391 |
+
(prelu2): PReLU(num_parameters=1)
|
| 392 |
+
(mp): MaxPool1d(kernel_size=3, stride=3, padding=0, dilation=1, ceil_mode=False)
|
| 393 |
+
)
|
| 394 |
+
(5): Conv1D(512, 256, kernel_size=(1,), stride=(1,))
|
| 395 |
+
)
|
| 396 |
+
(pred_linear): Linear(in_features=256, out_features=101, bias=True)
|
| 397 |
+
)
|
| 398 |
+
)
|
| 399 |
+
|
| 400 |
+
Total number of parameters: 11138734
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
Total number of trainable parameters: 11138734
|
| 404 |
+
|
| 405 |
+
Initialised Softmax Loss
|
| 406 |
+
Initialised Softmax Loss
|
| 407 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549648 [0] NCCL INFO Bootstrap : Using net0:10.32.15.154<0>
|
| 408 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549648 [0] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation
|
| 409 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549648 [0] NCCL INFO cudaDriverVersion 11040
|
| 410 |
+
NCCL version 2.20.5+cuda11.8
|
| 411 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549649 [1] NCCL INFO cudaDriverVersion 11040
|
| 412 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549649 [1] NCCL INFO Bootstrap : Using net0:10.32.15.154<0>
|
| 413 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549649 [1] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation
|
| 414 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Failed to open libibverbs.so[.1]
|
| 415 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Failed to open libibverbs.so[.1]
|
| 416 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO NET/Socket : Using [0]net0:10.32.15.154<0> [1]eth0:22.5.146.138<0>
|
| 417 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO NET/Socket : Using [0]net0:10.32.15.154<0> [1]eth0:22.5.146.138<0>
|
| 418 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Using non-device net plugin version 0
|
| 419 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Using non-device net plugin version 0
|
| 420 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Using network Socket
|
| 421 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Using network Socket
|
| 422 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO comm 0x78839c0 rank 0 nranks 2 cudaDev 0 nvmlDev 0 busId 10 commId 0x646868210b4fe2c6 - Init START
|
| 423 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO comm 0x8e72ac0 rank 1 nranks 2 cudaDev 1 nvmlDev 1 busId 20 commId 0x646868210b4fe2c6 - Init START
|
| 424 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Setting affinity for GPU 0 to ffffff
|
| 425 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Setting affinity for GPU 1 to ffffff
|
| 426 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO comm 0x78839c0 rank 0 nRanks 2 nNodes 1 localRanks 2 localRank 0 MNNVL 0
|
| 427 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO comm 0x8e72ac0 rank 1 nRanks 2 nNodes 1 localRanks 2 localRank 1 MNNVL 0
|
| 428 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO NCCL_MAX_NCHANNELS set by environment to 2.
|
| 429 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO NCCL_MIN_NCHANNELS set by environment to 2.
|
| 430 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO NCCL_MAX_NCHANNELS set by environment to 2.
|
| 431 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO NCCL_MIN_NCHANNELS set by environment to 2.
|
| 432 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Channel 00/02 : 0 1
|
| 433 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Channel 01/02 : 0 1
|
| 434 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Trees [0] -1/-1/-1->1->0 [1] 0/-1/-1->1->-1
|
| 435 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Trees [0] 1/-1/-1->0->-1 [1] -1/-1/-1->0->1
|
| 436 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO P2P Chunksize set to 524288
|
| 437 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO P2P Chunksize set to 524288
|
| 438 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Channel 00/0 : 1[1] -> 0[0] via P2P/IPC/read
|
| 439 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Channel 00/0 : 0[0] -> 1[1] via P2P/IPC/read
|
| 440 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Channel 01/0 : 1[1] -> 0[0] via P2P/IPC/read
|
| 441 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Channel 01/0 : 0[0] -> 1[1] via P2P/IPC/read
|
| 442 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Connected all rings
|
| 443 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO Connected all trees
|
| 444 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO threadThresholds 8/8/64 | 16/8/64 | 512 | 512
|
| 445 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO 2 coll channels, 0 collnet channels, 0 nvls channels, 2 p2p channels, 2 p2p channels per peer
|
| 446 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Connected all rings
|
| 447 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO Connected all trees
|
| 448 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO threadThresholds 8/8/64 | 16/8/64 | 512 | 512
|
| 449 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO 2 coll channels, 0 collnet channels, 0 nvls channels, 2 p2p channels, 2 p2p channels per peer
|
| 450 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO NCCL_LAUNCH_MODE set by environment to PARALLEL
|
| 451 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO NCCL_LAUNCH_MODE set by environment to PARALLEL
|
| 452 |
+
dsw-106518-965b74ddc-cdclg:3549648:3549739 [0] NCCL INFO comm 0x78839c0 rank 0 nranks 2 cudaDev 0 nvmlDev 0 busId 10 commId 0x646868210b4fe2c6 - Init COMPLETE
|
| 453 |
+
dsw-106518-965b74ddc-cdclg:3549649:3549740 [1] NCCL INFO comm 0x8e72ac0 rank 1 nranks 2 cudaDev 1 nvmlDev 1 busId 20 commId 0x646868210b4fe2c6 - Init COMPLETE
|
| 454 |
+
[rank0]:[W1002 16:14:09.364378929 Utils.hpp:110] Warning: Environment variable NCCL_ASYNC_ERROR_HANDLING is deprecated; use TORCH_NCCL_ASYNC_ERROR_HANDLING instead (function operator())
|
| 455 |
+
Start new training from scratch
|
| 456 |
+
[rank1]:[W1002 16:14:09.364850197 Utils.hpp:110] Warning: Environment variable NCCL_ASYNC_ERROR_HANDLING is deprecated; use TORCH_NCCL_ASYNC_ERROR_HANDLING instead (function operator())
|
| 457 |
+
[rank0]:[W1002 16:14:16.105722831 reducer.cpp:1400] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())
|
| 458 |
+
[rank1]:[W1002 16:14:16.124629939 reducer.cpp:1400] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())
|
| 459 |
+
Train Summary | End of Epoch 1 | Time 1938.77s | Train Loss -5.137
|
| 460 |
+
Valid Summary | End of Epoch 1 | Time 127.26s | Valid Loss -8.814
|
| 461 |
+
Test Summary | End of Epoch 1 | Time 77.56s | Test Loss -9.161
|
| 462 |
+
Fund new best model, dict saved
|
| 463 |
+
Train Summary | End of Epoch 2 | Time 1938.88s | Train Loss -10.133
|
| 464 |
+
Valid Summary | End of Epoch 2 | Time 126.07s | Valid Loss -9.820
|
| 465 |
+
Test Summary | End of Epoch 2 | Time 75.90s | Test Loss -10.366
|
| 466 |
+
Fund new best model, dict saved
|
| 467 |
+
Train Summary | End of Epoch 3 | Time 1937.57s | Train Loss -11.814
|
| 468 |
+
Valid Summary | End of Epoch 3 | Time 125.85s | Valid Loss -11.866
|
| 469 |
+
Test Summary | End of Epoch 3 | Time 76.05s | Test Loss -11.050
|
| 470 |
+
Fund new best model, dict saved
|
| 471 |
+
Train Summary | End of Epoch 4 | Time 1931.60s | Train Loss -12.824
|
| 472 |
+
Valid Summary | End of Epoch 4 | Time 126.25s | Valid Loss -12.386
|
| 473 |
+
Test Summary | End of Epoch 4 | Time 75.53s | Test Loss -11.700
|
| 474 |
+
Fund new best model, dict saved
|
| 475 |
+
Train Summary | End of Epoch 5 | Time 1934.21s | Train Loss -13.589
|
| 476 |
+
Valid Summary | End of Epoch 5 | Time 126.16s | Valid Loss -13.496
|
| 477 |
+
Test Summary | End of Epoch 5 | Time 75.61s | Test Loss -12.582
|
| 478 |
+
Fund new best model, dict saved
|
| 479 |
+
Train Summary | End of Epoch 6 | Time 1935.56s | Train Loss -14.183
|
| 480 |
+
Valid Summary | End of Epoch 6 | Time 126.10s | Valid Loss -13.982
|
| 481 |
+
Test Summary | End of Epoch 6 | Time 75.56s | Test Loss -13.381
|
| 482 |
+
Fund new best model, dict saved
|
| 483 |
+
Train Summary | End of Epoch 7 | Time 1933.10s | Train Loss -14.677
|
| 484 |
+
Valid Summary | End of Epoch 7 | Time 126.04s | Valid Loss -14.077
|
| 485 |
+
Test Summary | End of Epoch 7 | Time 75.73s | Test Loss -13.426
|
| 486 |
+
Fund new best model, dict saved
|
| 487 |
+
Train Summary | End of Epoch 8 | Time 1933.57s | Train Loss -15.064
|
| 488 |
+
Valid Summary | End of Epoch 8 | Time 126.47s | Valid Loss -14.802
|
| 489 |
+
Test Summary | End of Epoch 8 | Time 76.01s | Test Loss -13.831
|
| 490 |
+
Fund new best model, dict saved
|
| 491 |
+
Train Summary | End of Epoch 9 | Time 1936.63s | Train Loss -15.402
|
| 492 |
+
Valid Summary | End of Epoch 9 | Time 125.85s | Valid Loss -15.042
|
| 493 |
+
Test Summary | End of Epoch 9 | Time 75.88s | Test Loss -14.217
|
| 494 |
+
Fund new best model, dict saved
|
| 495 |
+
Train Summary | End of Epoch 10 | Time 1933.70s | Train Loss -15.696
|
| 496 |
+
Valid Summary | End of Epoch 10 | Time 125.93s | Valid Loss -15.002
|
| 497 |
+
Test Summary | End of Epoch 10 | Time 75.76s | Test Loss -13.852
|
| 498 |
+
Train Summary | End of Epoch 11 | Time 1934.22s | Train Loss -15.956
|
| 499 |
+
Valid Summary | End of Epoch 11 | Time 125.82s | Valid Loss -15.403
|
| 500 |
+
Test Summary | End of Epoch 11 | Time 75.53s | Test Loss -14.515
|
| 501 |
+
Fund new best model, dict saved
|
| 502 |
+
Train Summary | End of Epoch 12 | Time 1934.42s | Train Loss -16.151
|
| 503 |
+
Valid Summary | End of Epoch 12 | Time 125.82s | Valid Loss -15.502
|
| 504 |
+
Test Summary | End of Epoch 12 | Time 75.47s | Test Loss -14.775
|
| 505 |
+
Fund new best model, dict saved
|
| 506 |
+
Train Summary | End of Epoch 13 | Time 1935.17s | Train Loss -16.377
|
| 507 |
+
Valid Summary | End of Epoch 13 | Time 125.95s | Valid Loss -15.679
|
| 508 |
+
Test Summary | End of Epoch 13 | Time 75.78s | Test Loss -14.593
|
| 509 |
+
Fund new best model, dict saved
|
| 510 |
+
Train Summary | End of Epoch 14 | Time 1933.74s | Train Loss -16.557
|
| 511 |
+
Valid Summary | End of Epoch 14 | Time 126.33s | Valid Loss -15.069
|
| 512 |
+
Test Summary | End of Epoch 14 | Time 76.02s | Test Loss -14.342
|
| 513 |
+
Train Summary | End of Epoch 15 | Time 1932.98s | Train Loss -16.746
|
| 514 |
+
Valid Summary | End of Epoch 15 | Time 125.66s | Valid Loss -16.075
|
| 515 |
+
Test Summary | End of Epoch 15 | Time 75.63s | Test Loss -14.788
|
| 516 |
+
Fund new best model, dict saved
|
| 517 |
+
Train Summary | End of Epoch 16 | Time 1935.18s | Train Loss -16.889
|
| 518 |
+
Valid Summary | End of Epoch 16 | Time 125.97s | Valid Loss -16.026
|
| 519 |
+
Test Summary | End of Epoch 16 | Time 75.36s | Test Loss -14.844
|
| 520 |
+
Train Summary | End of Epoch 17 | Time 1937.49s | Train Loss -17.054
|
| 521 |
+
Valid Summary | End of Epoch 17 | Time 126.31s | Valid Loss -16.395
|
| 522 |
+
Test Summary | End of Epoch 17 | Time 75.79s | Test Loss -15.114
|
| 523 |
+
Fund new best model, dict saved
|
| 524 |
+
Train Summary | End of Epoch 18 | Time 1934.17s | Train Loss -17.182
|
| 525 |
+
Valid Summary | End of Epoch 18 | Time 125.78s | Valid Loss -16.482
|
| 526 |
+
Test Summary | End of Epoch 18 | Time 75.59s | Test Loss -15.295
|
| 527 |
+
Fund new best model, dict saved
|
| 528 |
+
Train Summary | End of Epoch 19 | Time 1932.89s | Train Loss -17.311
|
| 529 |
+
Valid Summary | End of Epoch 19 | Time 125.77s | Valid Loss -16.488
|
| 530 |
+
Test Summary | End of Epoch 19 | Time 75.90s | Test Loss -15.158
|
| 531 |
+
Fund new best model, dict saved
|
| 532 |
+
Train Summary | End of Epoch 20 | Time 1935.75s | Train Loss -17.433
|
| 533 |
+
Valid Summary | End of Epoch 20 | Time 125.57s | Valid Loss -16.730
|
| 534 |
+
Test Summary | End of Epoch 20 | Time 75.65s | Test Loss -15.259
|
| 535 |
+
Fund new best model, dict saved
|
| 536 |
+
Train Summary | End of Epoch 21 | Time 1933.86s | Train Loss -17.535
|
| 537 |
+
Valid Summary | End of Epoch 21 | Time 125.81s | Valid Loss -16.474
|
| 538 |
+
Test Summary | End of Epoch 21 | Time 76.08s | Test Loss -15.619
|
| 539 |
+
Train Summary | End of Epoch 22 | Time 1358.25s | Train Loss -17.638
|
| 540 |
+
Valid Summary | End of Epoch 22 | Time 68.66s | Valid Loss -16.645
|
| 541 |
+
Test Summary | End of Epoch 22 | Time 38.26s | Test Loss -15.025
|
| 542 |
+
Train Summary | End of Epoch 23 | Time 815.45s | Train Loss -17.745
|
| 543 |
+
Valid Summary | End of Epoch 23 | Time 62.66s | Valid Loss -16.885
|
| 544 |
+
Test Summary | End of Epoch 23 | Time 38.78s | Test Loss -15.387
|
| 545 |
+
Fund new best model, dict saved
|
| 546 |
+
Train Summary | End of Epoch 24 | Time 812.33s | Train Loss -17.824
|
| 547 |
+
Valid Summary | End of Epoch 24 | Time 62.43s | Valid Loss -16.871
|
| 548 |
+
Test Summary | End of Epoch 24 | Time 38.44s | Test Loss -15.098
|
| 549 |
+
Train Summary | End of Epoch 25 | Time 812.85s | Train Loss -17.911
|
| 550 |
+
Valid Summary | End of Epoch 25 | Time 62.76s | Valid Loss -16.963
|
| 551 |
+
Test Summary | End of Epoch 25 | Time 38.86s | Test Loss -15.535
|
| 552 |
+
Fund new best model, dict saved
|
| 553 |
+
Train Summary | End of Epoch 26 | Time 812.48s | Train Loss -18.000
|
| 554 |
+
Valid Summary | End of Epoch 26 | Time 63.16s | Valid Loss -17.169
|
| 555 |
+
Test Summary | End of Epoch 26 | Time 38.82s | Test Loss -15.996
|
| 556 |
+
Fund new best model, dict saved
|
| 557 |
+
Train Summary | End of Epoch 27 | Time 813.20s | Train Loss -18.068
|
| 558 |
+
Valid Summary | End of Epoch 27 | Time 62.98s | Valid Loss -17.208
|
| 559 |
+
Test Summary | End of Epoch 27 | Time 38.97s | Test Loss -15.804
|
| 560 |
+
Fund new best model, dict saved
|
| 561 |
+
Train Summary | End of Epoch 28 | Time 812.99s | Train Loss -18.156
|
| 562 |
+
Valid Summary | End of Epoch 28 | Time 62.74s | Valid Loss -17.093
|
| 563 |
+
Test Summary | End of Epoch 28 | Time 38.68s | Test Loss -15.791
|
| 564 |
+
Train Summary | End of Epoch 29 | Time 813.21s | Train Loss -18.218
|
| 565 |
+
Valid Summary | End of Epoch 29 | Time 62.76s | Valid Loss -17.131
|
| 566 |
+
Test Summary | End of Epoch 29 | Time 38.63s | Test Loss -16.079
|
| 567 |
+
Train Summary | End of Epoch 30 | Time 812.77s | Train Loss -18.288
|
| 568 |
+
Valid Summary | End of Epoch 30 | Time 62.89s | Valid Loss -17.246
|
| 569 |
+
Test Summary | End of Epoch 30 | Time 39.09s | Test Loss -15.575
|
| 570 |
+
Fund new best model, dict saved
|
| 571 |
+
Train Summary | End of Epoch 31 | Time 813.24s | Train Loss -18.363
|
| 572 |
+
Valid Summary | End of Epoch 31 | Time 62.55s | Valid Loss -16.372
|
| 573 |
+
Test Summary | End of Epoch 31 | Time 38.43s | Test Loss -15.847
|
| 574 |
+
Train Summary | End of Epoch 32 | Time 812.31s | Train Loss -18.428
|
| 575 |
+
Valid Summary | End of Epoch 32 | Time 63.25s | Valid Loss -17.360
|
| 576 |
+
Test Summary | End of Epoch 32 | Time 38.70s | Test Loss -15.936
|
| 577 |
+
Fund new best model, dict saved
|
| 578 |
+
Train Summary | End of Epoch 33 | Time 812.89s | Train Loss -18.476
|
| 579 |
+
Valid Summary | End of Epoch 33 | Time 62.54s | Valid Loss -17.510
|
| 580 |
+
Test Summary | End of Epoch 33 | Time 38.51s | Test Loss -16.210
|
| 581 |
+
Fund new best model, dict saved
|
| 582 |
+
Train Summary | End of Epoch 34 | Time 812.46s | Train Loss -18.518
|
| 583 |
+
Valid Summary | End of Epoch 34 | Time 63.26s | Valid Loss -17.510
|
| 584 |
+
Test Summary | End of Epoch 34 | Time 39.20s | Test Loss -16.004
|
| 585 |
+
Fund new best model, dict saved
|
| 586 |
+
Train Summary | End of Epoch 35 | Time 813.37s | Train Loss -18.586
|
| 587 |
+
Valid Summary | End of Epoch 35 | Time 63.13s | Valid Loss -17.287
|
| 588 |
+
Test Summary | End of Epoch 35 | Time 38.75s | Test Loss -16.044
|
| 589 |
+
Train Summary | End of Epoch 36 | Time 813.24s | Train Loss -18.644
|
| 590 |
+
Valid Summary | End of Epoch 36 | Time 62.65s | Valid Loss -17.716
|
| 591 |
+
Test Summary | End of Epoch 36 | Time 38.10s | Test Loss -16.221
|
| 592 |
+
Fund new best model, dict saved
|
| 593 |
+
Train Summary | End of Epoch 37 | Time 812.83s | Train Loss -18.700
|
| 594 |
+
Valid Summary | End of Epoch 37 | Time 62.28s | Valid Loss -15.359
|
| 595 |
+
Test Summary | End of Epoch 37 | Time 38.34s | Test Loss -15.573
|
| 596 |
+
Train Summary | End of Epoch 38 | Time 813.20s | Train Loss -18.734
|
| 597 |
+
Valid Summary | End of Epoch 38 | Time 62.85s | Valid Loss -17.653
|
| 598 |
+
Test Summary | End of Epoch 38 | Time 38.57s | Test Loss -16.252
|
| 599 |
+
Train Summary | End of Epoch 39 | Time 812.63s | Train Loss -18.778
|
| 600 |
+
Valid Summary | End of Epoch 39 | Time 62.37s | Valid Loss -17.796
|
| 601 |
+
Test Summary | End of Epoch 39 | Time 38.66s | Test Loss -16.310
|
| 602 |
+
Fund new best model, dict saved
|
| 603 |
+
Train Summary | End of Epoch 40 | Time 812.81s | Train Loss -18.837
|
| 604 |
+
Valid Summary | End of Epoch 40 | Time 62.47s | Valid Loss -17.680
|
| 605 |
+
Test Summary | End of Epoch 40 | Time 38.50s | Test Loss -16.250
|
| 606 |
+
Train Summary | End of Epoch 41 | Time 813.08s | Train Loss -18.883
|
| 607 |
+
Valid Summary | End of Epoch 41 | Time 62.47s | Valid Loss -16.569
|
| 608 |
+
Test Summary | End of Epoch 41 | Time 38.17s | Test Loss -15.858
|
| 609 |
+
Train Summary | End of Epoch 42 | Time 813.25s | Train Loss -18.922
|
| 610 |
+
Valid Summary | End of Epoch 42 | Time 63.14s | Valid Loss -16.997
|
| 611 |
+
Test Summary | End of Epoch 42 | Time 38.78s | Test Loss -15.927
|
| 612 |
+
Train Summary | End of Epoch 43 | Time 814.01s | Train Loss -18.962
|
| 613 |
+
Valid Summary | End of Epoch 43 | Time 62.87s | Valid Loss -16.562
|
| 614 |
+
Test Summary | End of Epoch 43 | Time 38.42s | Test Loss -16.005
|
| 615 |
+
Train Summary | End of Epoch 44 | Time 813.39s | Train Loss -18.992
|
| 616 |
+
Valid Summary | End of Epoch 44 | Time 62.70s | Valid Loss -17.942
|
| 617 |
+
Test Summary | End of Epoch 44 | Time 38.64s | Test Loss -16.475
|
| 618 |
+
Fund new best model, dict saved
|
| 619 |
+
Train Summary | End of Epoch 45 | Time 812.79s | Train Loss -19.024
|
| 620 |
+
Valid Summary | End of Epoch 45 | Time 62.52s | Valid Loss -17.861
|
| 621 |
+
Test Summary | End of Epoch 45 | Time 38.57s | Test Loss -16.471
|
| 622 |
+
Train Summary | End of Epoch 46 | Time 813.03s | Train Loss -19.068
|
| 623 |
+
Valid Summary | End of Epoch 46 | Time 62.68s | Valid Loss -17.831
|
| 624 |
+
Test Summary | End of Epoch 46 | Time 38.32s | Test Loss -16.308
|
| 625 |
+
Train Summary | End of Epoch 47 | Time 813.93s | Train Loss -19.106
|
| 626 |
+
Valid Summary | End of Epoch 47 | Time 120.20s | Valid Loss -11.629
|
| 627 |
+
Test Summary | End of Epoch 47 | Time 85.00s | Test Loss -15.160
|
| 628 |
+
Train Summary | End of Epoch 48 | Time 4301.71s | Train Loss -19.142
|
| 629 |
+
Valid Summary | End of Epoch 48 | Time 144.13s | Valid Loss -18.040
|
| 630 |
+
Test Summary | End of Epoch 48 | Time 79.20s | Test Loss -16.315
|
| 631 |
+
Fund new best model, dict saved
|
| 632 |
+
Train Summary | End of Epoch 49 | Time 1939.32s | Train Loss -19.162
|
| 633 |
+
Valid Summary | End of Epoch 49 | Time 126.16s | Valid Loss -18.018
|
| 634 |
+
Test Summary | End of Epoch 49 | Time 75.46s | Test Loss -16.465
|
| 635 |
+
Train Summary | End of Epoch 50 | Time 1939.72s | Train Loss -19.214
|
| 636 |
+
Valid Summary | End of Epoch 50 | Time 126.05s | Valid Loss -18.083
|
| 637 |
+
Test Summary | End of Epoch 50 | Time 75.15s | Test Loss -16.414
|
| 638 |
+
Fund new best model, dict saved
|
| 639 |
+
Train Summary | End of Epoch 51 | Time 1933.84s | Train Loss -19.243
|
| 640 |
+
Valid Summary | End of Epoch 51 | Time 125.93s | Valid Loss -18.084
|
| 641 |
+
Test Summary | End of Epoch 51 | Time 75.75s | Test Loss -16.608
|
| 642 |
+
Fund new best model, dict saved
|
| 643 |
+
Train Summary | End of Epoch 52 | Time 1939.58s | Train Loss -19.278
|
| 644 |
+
Valid Summary | End of Epoch 52 | Time 126.17s | Valid Loss -17.739
|
| 645 |
+
Test Summary | End of Epoch 52 | Time 75.92s | Test Loss -16.018
|
| 646 |
+
Train Summary | End of Epoch 53 | Time 1939.34s | Train Loss -19.297
|
| 647 |
+
Valid Summary | End of Epoch 53 | Time 126.20s | Valid Loss -18.189
|
| 648 |
+
Test Summary | End of Epoch 53 | Time 75.89s | Test Loss -16.645
|
| 649 |
+
Fund new best model, dict saved
|
| 650 |
+
Train Summary | End of Epoch 54 | Time 1942.88s | Train Loss -19.345
|
| 651 |
+
Valid Summary | End of Epoch 54 | Time 126.39s | Valid Loss -18.121
|
| 652 |
+
Test Summary | End of Epoch 54 | Time 75.22s | Test Loss -16.255
|
| 653 |
+
Train Summary | End of Epoch 55 | Time 1940.59s | Train Loss -19.363
|
| 654 |
+
Valid Summary | End of Epoch 55 | Time 125.52s | Valid Loss -18.270
|
| 655 |
+
Test Summary | End of Epoch 55 | Time 75.28s | Test Loss -16.622
|
| 656 |
+
Fund new best model, dict saved
|
| 657 |
+
Train Summary | End of Epoch 56 | Time 1935.52s | Train Loss -19.391
|
| 658 |
+
Valid Summary | End of Epoch 56 | Time 126.14s | Valid Loss -17.902
|
| 659 |
+
Test Summary | End of Epoch 56 | Time 75.28s | Test Loss -16.636
|
| 660 |
+
Train Summary | End of Epoch 57 | Time 1947.40s | Train Loss -19.403
|
| 661 |
+
Valid Summary | End of Epoch 57 | Time 141.21s | Valid Loss -18.258
|
| 662 |
+
Test Summary | End of Epoch 57 | Time 81.67s | Test Loss -16.704
|
| 663 |
+
Train Summary | End of Epoch 58 | Time 1937.52s | Train Loss -19.442
|
| 664 |
+
Valid Summary | End of Epoch 58 | Time 125.90s | Valid Loss -18.183
|
| 665 |
+
Test Summary | End of Epoch 58 | Time 76.09s | Test Loss -16.447
|
| 666 |
+
Train Summary | End of Epoch 59 | Time 1938.74s | Train Loss -19.460
|
| 667 |
+
Valid Summary | End of Epoch 59 | Time 125.72s | Valid Loss -18.221
|
| 668 |
+
Test Summary | End of Epoch 59 | Time 75.65s | Test Loss -16.484
|
| 669 |
+
Train Summary | End of Epoch 60 | Time 1938.40s | Train Loss -19.510
|
| 670 |
+
Valid Summary | End of Epoch 60 | Time 126.54s | Valid Loss -18.143
|
| 671 |
+
Test Summary | End of Epoch 60 | Time 76.00s | Test Loss -16.491
|
| 672 |
+
reload weights and optimizer from last best checkpoint
|
| 673 |
+
Learning rate adjusted to: 0.000500
|
| 674 |
+
Train Summary | End of Epoch 61 | Time 1938.53s | Train Loss -19.653
|
| 675 |
+
Valid Summary | End of Epoch 61 | Time 125.69s | Valid Loss -18.524
|
| 676 |
+
Test Summary | End of Epoch 61 | Time 75.75s | Test Loss -16.811
|
| 677 |
+
Fund new best model, dict saved
|
| 678 |
+
Train Summary | End of Epoch 62 | Time 1935.76s | Train Loss -19.721
|
| 679 |
+
Valid Summary | End of Epoch 62 | Time 125.88s | Valid Loss -18.546
|
| 680 |
+
Test Summary | End of Epoch 62 | Time 75.65s | Test Loss -16.786
|
| 681 |
+
Fund new best model, dict saved
|
| 682 |
+
Train Summary | End of Epoch 63 | Time 1938.29s | Train Loss -19.752
|
| 683 |
+
Valid Summary | End of Epoch 63 | Time 126.20s | Valid Loss -18.549
|
| 684 |
+
Test Summary | End of Epoch 63 | Time 75.38s | Test Loss -16.896
|
| 685 |
+
Fund new best model, dict saved
|
| 686 |
+
Train Summary | End of Epoch 64 | Time 1938.66s | Train Loss -19.794
|
| 687 |
+
Valid Summary | End of Epoch 64 | Time 125.45s | Valid Loss -18.592
|
| 688 |
+
Test Summary | End of Epoch 64 | Time 75.49s | Test Loss -16.868
|
| 689 |
+
Fund new best model, dict saved
|
| 690 |
+
Train Summary | End of Epoch 65 | Time 1937.49s | Train Loss -19.819
|
| 691 |
+
Valid Summary | End of Epoch 65 | Time 125.89s | Valid Loss -18.524
|
| 692 |
+
Test Summary | End of Epoch 65 | Time 75.28s | Test Loss -16.894
|
| 693 |
+
Train Summary | End of Epoch 66 | Time 1936.33s | Train Loss -19.840
|
| 694 |
+
Valid Summary | End of Epoch 66 | Time 125.93s | Valid Loss -18.600
|
| 695 |
+
Test Summary | End of Epoch 66 | Time 75.42s | Test Loss -16.922
|
| 696 |
+
Fund new best model, dict saved
|
| 697 |
+
Train Summary | End of Epoch 67 | Time 1938.10s | Train Loss -19.870
|
| 698 |
+
Valid Summary | End of Epoch 67 | Time 125.96s | Valid Loss -18.605
|
| 699 |
+
Test Summary | End of Epoch 67 | Time 75.25s | Test Loss -16.973
|
| 700 |
+
Fund new best model, dict saved
|
| 701 |
+
Train Summary | End of Epoch 68 | Time 1937.18s | Train Loss -19.883
|
| 702 |
+
Valid Summary | End of Epoch 68 | Time 125.45s | Valid Loss -18.623
|
| 703 |
+
Test Summary | End of Epoch 68 | Time 75.31s | Test Loss -17.031
|
| 704 |
+
Fund new best model, dict saved
|
| 705 |
+
Train Summary | End of Epoch 69 | Time 1934.23s | Train Loss -19.902
|
| 706 |
+
Valid Summary | End of Epoch 69 | Time 125.81s | Valid Loss -18.522
|
| 707 |
+
Test Summary | End of Epoch 69 | Time 75.71s | Test Loss -16.899
|
| 708 |
+
Train Summary | End of Epoch 70 | Time 1937.90s | Train Loss -19.928
|
| 709 |
+
Valid Summary | End of Epoch 70 | Time 125.90s | Valid Loss -18.610
|
| 710 |
+
Test Summary | End of Epoch 70 | Time 75.16s | Test Loss -16.751
|
| 711 |
+
Train Summary | End of Epoch 71 | Time 1939.63s | Train Loss -19.949
|
| 712 |
+
Valid Summary | End of Epoch 71 | Time 126.40s | Valid Loss -18.579
|
| 713 |
+
Test Summary | End of Epoch 71 | Time 75.48s | Test Loss -16.833
|
| 714 |
+
Train Summary | End of Epoch 72 | Time 1939.05s | Train Loss -19.958
|
| 715 |
+
Valid Summary | End of Epoch 72 | Time 125.71s | Valid Loss -18.654
|
| 716 |
+
Test Summary | End of Epoch 72 | Time 75.39s | Test Loss -16.874
|
| 717 |
+
Fund new best model, dict saved
|
| 718 |
+
Train Summary | End of Epoch 73 | Time 1935.47s | Train Loss -19.978
|
| 719 |
+
Valid Summary | End of Epoch 73 | Time 126.41s | Valid Loss -18.638
|
| 720 |
+
Test Summary | End of Epoch 73 | Time 75.85s | Test Loss -16.873
|
| 721 |
+
Train Summary | End of Epoch 74 | Time 1937.77s | Train Loss -19.988
|
| 722 |
+
Valid Summary | End of Epoch 74 | Time 126.00s | Valid Loss -18.645
|
| 723 |
+
Test Summary | End of Epoch 74 | Time 75.82s | Test Loss -16.738
|
| 724 |
+
Train Summary | End of Epoch 75 | Time 1938.83s | Train Loss -20.012
|
| 725 |
+
Valid Summary | End of Epoch 75 | Time 125.59s | Valid Loss -18.645
|
| 726 |
+
Test Summary | End of Epoch 75 | Time 75.37s | Test Loss -16.910
|
| 727 |
+
Train Summary | End of Epoch 76 | Time 1937.47s | Train Loss -20.013
|
| 728 |
+
Valid Summary | End of Epoch 76 | Time 126.25s | Valid Loss -18.678
|
| 729 |
+
Test Summary | End of Epoch 76 | Time 73.25s | Test Loss -17.020
|
| 730 |
+
Fund new best model, dict saved
|
| 731 |
+
Train Summary | End of Epoch 77 | Time 1934.53s | Train Loss -20.033
|
| 732 |
+
Valid Summary | End of Epoch 77 | Time 125.89s | Valid Loss -18.653
|
| 733 |
+
Test Summary | End of Epoch 77 | Time 75.59s | Test Loss -16.875
|
| 734 |
+
Train Summary | End of Epoch 78 | Time 1939.31s | Train Loss -20.049
|
| 735 |
+
Valid Summary | End of Epoch 78 | Time 125.67s | Valid Loss -18.647
|
| 736 |
+
Test Summary | End of Epoch 78 | Time 75.40s | Test Loss -16.805
|
| 737 |
+
Train Summary | End of Epoch 79 | Time 1939.14s | Train Loss -20.066
|
| 738 |
+
Valid Summary | End of Epoch 79 | Time 125.73s | Valid Loss -18.706
|
| 739 |
+
Test Summary | End of Epoch 79 | Time 75.69s | Test Loss -16.929
|
| 740 |
+
Fund new best model, dict saved
|
| 741 |
+
Train Summary | End of Epoch 80 | Time 1934.85s | Train Loss -20.078
|
| 742 |
+
Valid Summary | End of Epoch 80 | Time 125.68s | Valid Loss -18.632
|
| 743 |
+
Test Summary | End of Epoch 80 | Time 75.14s | Test Loss -16.703
|
| 744 |
+
Train Summary | End of Epoch 81 | Time 1937.54s | Train Loss -20.091
|
| 745 |
+
Valid Summary | End of Epoch 81 | Time 125.79s | Valid Loss -18.727
|
| 746 |
+
Test Summary | End of Epoch 81 | Time 75.17s | Test Loss -16.949
|
| 747 |
+
Fund new best model, dict saved
|
| 748 |
+
Train Summary | End of Epoch 82 | Time 1940.97s | Train Loss -20.106
|
| 749 |
+
Valid Summary | End of Epoch 82 | Time 125.99s | Valid Loss -18.659
|
| 750 |
+
Test Summary | End of Epoch 82 | Time 75.62s | Test Loss -16.828
|
| 751 |
+
Train Summary | End of Epoch 83 | Time 1940.60s | Train Loss -20.111
|
| 752 |
+
Valid Summary | End of Epoch 83 | Time 125.86s | Valid Loss -18.713
|
| 753 |
+
Test Summary | End of Epoch 83 | Time 75.38s | Test Loss -16.985
|
| 754 |
+
Train Summary | End of Epoch 84 | Time 1934.57s | Train Loss -20.121
|
| 755 |
+
Valid Summary | End of Epoch 84 | Time 126.02s | Valid Loss -18.723
|
| 756 |
+
Test Summary | End of Epoch 84 | Time 75.73s | Test Loss -16.886
|
| 757 |
+
Train Summary | End of Epoch 85 | Time 1939.74s | Train Loss -20.136
|
| 758 |
+
Valid Summary | End of Epoch 85 | Time 138.39s | Valid Loss -18.691
|
| 759 |
+
Test Summary | End of Epoch 85 | Time 80.10s | Test Loss -16.823
|
| 760 |
+
Train Summary | End of Epoch 86 | Time 1937.70s | Train Loss -20.150
|
| 761 |
+
Valid Summary | End of Epoch 86 | Time 125.96s | Valid Loss -18.710
|
| 762 |
+
Test Summary | End of Epoch 86 | Time 75.33s | Test Loss -16.945
|
| 763 |
+
reload weights and optimizer from last best checkpoint
|
| 764 |
+
Learning rate adjusted to: 0.000250
|
| 765 |
+
Train Summary | End of Epoch 87 | Time 1937.52s | Train Loss -20.204
|
| 766 |
+
Valid Summary | End of Epoch 87 | Time 125.28s | Valid Loss -18.809
|
| 767 |
+
Test Summary | End of Epoch 87 | Time 75.78s | Test Loss -16.972
|
| 768 |
+
Fund new best model, dict saved
|
| 769 |
+
Train Summary | End of Epoch 88 | Time 1938.42s | Train Loss -20.240
|
| 770 |
+
Valid Summary | End of Epoch 88 | Time 125.41s | Valid Loss -18.818
|
| 771 |
+
Test Summary | End of Epoch 88 | Time 75.98s | Test Loss -17.035
|
| 772 |
+
Fund new best model, dict saved
|
| 773 |
+
Train Summary | End of Epoch 89 | Time 1939.54s | Train Loss -20.249
|
| 774 |
+
Valid Summary | End of Epoch 89 | Time 126.38s | Valid Loss -18.812
|
| 775 |
+
Test Summary | End of Epoch 89 | Time 76.28s | Test Loss -16.910
|
| 776 |
+
Train Summary | End of Epoch 90 | Time 1945.55s | Train Loss -20.267
|
| 777 |
+
Valid Summary | End of Epoch 90 | Time 126.46s | Valid Loss -18.806
|
| 778 |
+
Test Summary | End of Epoch 90 | Time 75.42s | Test Loss -16.897
|
| 779 |
+
Train Summary | End of Epoch 91 | Time 1949.44s | Train Loss -20.279
|
| 780 |
+
Valid Summary | End of Epoch 91 | Time 124.58s | Valid Loss -18.810
|
| 781 |
+
Test Summary | End of Epoch 91 | Time 75.95s | Test Loss -16.901
|
| 782 |
+
Train Summary | End of Epoch 92 | Time 1941.30s | Train Loss -20.293
|
| 783 |
+
Valid Summary | End of Epoch 92 | Time 124.42s | Valid Loss -18.809
|
| 784 |
+
Test Summary | End of Epoch 92 | Time 75.64s | Test Loss -16.954
|
| 785 |
+
Train Summary | End of Epoch 93 | Time 1941.58s | Train Loss -20.297
|
| 786 |
+
Valid Summary | End of Epoch 93 | Time 125.85s | Valid Loss -18.787
|
| 787 |
+
Test Summary | End of Epoch 93 | Time 75.74s | Test Loss -16.888
|
| 788 |
+
reload weights and optimizer from last best checkpoint
|
| 789 |
+
Learning rate adjusted to: 0.000125
|
| 790 |
+
Train Summary | End of Epoch 94 | Time 1942.52s | Train Loss -20.291
|
| 791 |
+
Valid Summary | End of Epoch 94 | Time 273.14s | Valid Loss -18.839
|
| 792 |
+
Test Summary | End of Epoch 94 | Time 162.06s | Test Loss -17.012
|
| 793 |
+
Fund new best model, dict saved
|
| 794 |
+
Train Summary | End of Epoch 95 | Time 1985.76s | Train Loss -20.312
|
| 795 |
+
Valid Summary | End of Epoch 95 | Time 228.44s | Valid Loss -18.840
|
| 796 |
+
Test Summary | End of Epoch 95 | Time 155.77s | Test Loss -17.009
|
| 797 |
+
Fund new best model, dict saved
|
| 798 |
+
Train Summary | End of Epoch 96 | Time 2972.21s | Train Loss -20.316
|
| 799 |
+
Valid Summary | End of Epoch 96 | Time 210.59s | Valid Loss -9.387
|
| 800 |
+
Start evaluation
|
| 801 |
+
Avg SISNR:i tensor([17.1080], device='cuda:0')
|
| 802 |
+
Avg SNRi: 17.45552202765349
|
| 803 |
+
Avg STOIi: 0.21840867744715423
|
checkpoints/log_wsj0-2mix_speech_SpEx-plus_2spk/tensorboard/events.out.tfevents.1727856848.dsw-106518-965b74ddc-cdclg.3549648.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1932d6a2ebf5f030cf4bc8a3367376ccb0346d292c57480ab66f1c59043454f4
|
| 3 |
+
size 14148
|