File size: 348 Bytes
8d7ec14 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
import torch
from modules import devices, shared
module_in_gpu = None
cpu = torch.device("cpu")
def send_everything_to_cpu():
return
def is_needed(sd_model):
return False
def apply(sd_model):
return
def setup_for_low_vram(sd_model, use_medvram):
return
def is_enabled(sd_model):
return False
|