Spaces:
Configuration error
Configuration error
root
commited on
Commit
·
f76cb2b
1
Parent(s):
8e82a6c
only key alt and alt_m9
Browse files
app.py
CHANGED
@@ -31,13 +31,13 @@ maximum_concepts = 3
|
|
31 |
|
32 |
#Pre download the files
|
33 |
if(is_gpu_associated):
|
34 |
-
model_v1 = snapshot_download(repo_id="multimodalart/sd-fine-tunable")
|
35 |
-
model_v2 = snapshot_download(repo_id="stabilityai/stable-diffusion-2")
|
36 |
-
model_v2_512 = snapshot_download(repo_id="stabilityai/stable-diffusion-2-base")
|
37 |
model_alt = snapshot_download(repo_id="BAAI/AltDiffusion")
|
38 |
model_alt_m9 = snapshot_download(repo_id="BAAI/AltDiffusion-m9")
|
39 |
safety_checker = snapshot_download(repo_id="multimodalart/sd-sc")
|
40 |
-
model_to_load =
|
41 |
|
42 |
with zipfile.ZipFile("mix.zip", 'r') as zip_ref:
|
43 |
zip_ref.extractall(".")
|
@@ -67,16 +67,18 @@ def swap_text(option, base):
|
|
67 |
def swap_base_model(selected_model):
|
68 |
if(is_gpu_associated):
|
69 |
global model_to_load
|
70 |
-
if(selected_model == "v1-5"):
|
71 |
-
|
72 |
-
elif(selected_model == "v2-768"):
|
73 |
-
|
74 |
-
elif(selected_model == "alt"):
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
model_to_load = model_alt
|
76 |
-
elif(selected_model == "alt_m9"):
|
77 |
-
model_to_load = model_alt_m9
|
78 |
-
else:
|
79 |
-
model_to_load = model_v2_512
|
80 |
|
81 |
def count_files(*inputs):
|
82 |
file_counter = 0
|
|
|
31 |
|
32 |
#Pre download the files
|
33 |
if(is_gpu_associated):
|
34 |
+
# model_v1 = snapshot_download(repo_id="multimodalart/sd-fine-tunable")
|
35 |
+
# model_v2 = snapshot_download(repo_id="stabilityai/stable-diffusion-2")
|
36 |
+
# model_v2_512 = snapshot_download(repo_id="stabilityai/stable-diffusion-2-base")
|
37 |
model_alt = snapshot_download(repo_id="BAAI/AltDiffusion")
|
38 |
model_alt_m9 = snapshot_download(repo_id="BAAI/AltDiffusion-m9")
|
39 |
safety_checker = snapshot_download(repo_id="multimodalart/sd-sc")
|
40 |
+
model_to_load = model_alt_m9
|
41 |
|
42 |
with zipfile.ZipFile("mix.zip", 'r') as zip_ref:
|
43 |
zip_ref.extractall(".")
|
|
|
67 |
def swap_base_model(selected_model):
|
68 |
if(is_gpu_associated):
|
69 |
global model_to_load
|
70 |
+
# if(selected_model == "v1-5"):
|
71 |
+
# model_to_load = model_v1
|
72 |
+
# elif(selected_model == "v2-768"):
|
73 |
+
# model_to_load = model_v2
|
74 |
+
# elif(selected_model == "alt"):
|
75 |
+
# model_to_load = model_alt
|
76 |
+
# elif(selected_model == "alt_m9"):
|
77 |
+
# model_to_load = model_alt_m9
|
78 |
+
# else:
|
79 |
+
# model_to_load = model_v2_512
|
80 |
+
if(selected_model == "alt"):
|
81 |
model_to_load = model_alt
|
|
|
|
|
|
|
|
|
82 |
|
83 |
def count_files(*inputs):
|
84 |
file_counter = 0
|