Update modules/diarize/diarizer.py
Browse files- modules/diarize/diarizer.py +11 -0
modules/diarize/diarizer.py
CHANGED
@@ -4,6 +4,7 @@ from typing import List, Union, BinaryIO, Optional
|
|
4 |
import numpy as np
|
5 |
import time
|
6 |
import logging
|
|
|
7 |
|
8 |
from modules.utils.paths import DIARIZATION_MODELS_DIR
|
9 |
from modules.diarize.diarize_pipeline import DiarizationPipeline, assign_word_speakers
|
@@ -115,6 +116,16 @@ class Diarizer:
|
|
115 |
)
|
116 |
logger.disabled = False
|
117 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
118 |
@staticmethod
|
119 |
def get_device():
|
120 |
if torch.cuda.is_available():
|
|
|
4 |
import numpy as np
|
5 |
import time
|
6 |
import logging
|
7 |
+
import gc
|
8 |
|
9 |
from modules.utils.paths import DIARIZATION_MODELS_DIR
|
10 |
from modules.diarize.diarize_pipeline import DiarizationPipeline, assign_word_speakers
|
|
|
116 |
)
|
117 |
logger.disabled = False
|
118 |
|
119 |
+
def offload(self):
|
120 |
+
"""Offload the model and free up the memory"""
|
121 |
+
if self.pipe is not None:
|
122 |
+
del self.pipe
|
123 |
+
self.pipe = None
|
124 |
+
if self.device == "cuda":
|
125 |
+
torch.cuda.empty_cache()
|
126 |
+
torch.cuda.reset_max_memory_allocated()
|
127 |
+
gc.collect()
|
128 |
+
|
129 |
@staticmethod
|
130 |
def get_device():
|
131 |
if torch.cuda.is_available():
|