Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -9,7 +9,7 @@ import numpy as np
|
|
9 |
from PIL import Image
|
10 |
import spaces
|
11 |
import torch
|
12 |
-
from diffusers import StableDiffusionXLPipeline
|
13 |
|
14 |
DESCRIPTION = """
|
15 |
# Proteus V0.1
|
@@ -37,6 +37,11 @@ if torch.cuda.is_available():
|
|
37 |
if ENABLE_CPU_OFFLOAD:
|
38 |
pipe.enable_model_cpu_offload()
|
39 |
else:
|
|
|
|
|
|
|
|
|
|
|
40 |
pipe.to(device)
|
41 |
print("Loaded on Device!")
|
42 |
pipe.load_lora_weights("stabilityai/stable-diffusion-xl-base-1.0", weight_name="sd_xl_offset_example-lora_1.0.safetensors")
|
@@ -168,7 +173,7 @@ with gr.Blocks(title="Proteus V0.1", css=css) as demo:
|
|
168 |
minimum=0.1,
|
169 |
maximum=20,
|
170 |
step=0.1,
|
171 |
-
value=
|
172 |
)
|
173 |
|
174 |
gr.Examples(
|
|
|
9 |
from PIL import Image
|
10 |
import spaces
|
11 |
import torch
|
12 |
+
from diffusers import StableDiffusionXLPipeline, KDPM2AncestralDiscreteScheduler, AutoencoderKL
|
13 |
|
14 |
DESCRIPTION = """
|
15 |
# Proteus V0.1
|
|
|
37 |
if ENABLE_CPU_OFFLOAD:
|
38 |
pipe.enable_model_cpu_offload()
|
39 |
else:
|
40 |
+
vae = AutoencoderKL.from_pretrained(
|
41 |
+
"madebyollin/sdxl-vae-fp16-fix",
|
42 |
+
torch_dtype=torch.float16
|
43 |
+
)
|
44 |
+
pipe.scheduler = KDPM2AncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
45 |
pipe.to(device)
|
46 |
print("Loaded on Device!")
|
47 |
pipe.load_lora_weights("stabilityai/stable-diffusion-xl-base-1.0", weight_name="sd_xl_offset_example-lora_1.0.safetensors")
|
|
|
173 |
minimum=0.1,
|
174 |
maximum=20,
|
175 |
step=0.1,
|
176 |
+
value=7.0,
|
177 |
)
|
178 |
|
179 |
gr.Examples(
|