# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import lightning.pytorch as pl import torch from lightning.pytorch import seed_everything from omegaconf import OmegaConf from nemo.collections.asr.models import EncDecSpeakerLabelModel from nemo.core.config import hydra_runner from nemo.utils import logging from nemo.utils.exp_manager import exp_manager seed_everything(42) @hydra_runner(config_path="conf", config_name="titanet-finetune.yaml") def main(cfg): logging.info(f'Hydra config: {OmegaConf.to_yaml(cfg)}') trainer = pl.Trainer(**cfg.trainer) log_dir = exp_manager(trainer, cfg.get("exp_manager", None)) speaker_model = EncDecSpeakerLabelModel(cfg=cfg.model, trainer=trainer) speaker_model.maybe_init_from_pretrained_checkpoint(cfg) # save labels to file if log_dir is not None: with open(os.path.join(log_dir, 'labels.txt'), 'w') as f: if speaker_model.labels is not None: for label in speaker_model.labels: f.write(f'{label}\n') trainer.fit(speaker_model) torch.distributed.destroy_process_group() if hasattr(cfg.model, 'test_ds') and cfg.model.test_ds.manifest_filepath is not None: if trainer.is_global_zero: trainer = pl.Trainer(devices=1, accelerator=cfg.trainer.accelerator, strategy=cfg.trainer.strategy) if speaker_model.prepare_test(trainer): trainer.test(speaker_model) if __name__ == '__main__': main()