Spaces:
Running
on
Zero
Running
on
Zero
Upload 7 files
Browse files
app.py
CHANGED
|
@@ -108,11 +108,14 @@ def run_lora(prompt, cfg_scale, steps, selected_index, randomize_seed, seed, wid
|
|
| 108 |
if randomize_seed:
|
| 109 |
seed = random.randint(0, MAX_SEED)
|
| 110 |
|
|
|
|
|
|
|
| 111 |
image = generate_image(prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale, progress)
|
| 112 |
pipe.to("cpu")
|
| 113 |
-
if selected_index is not None:
|
|
|
|
|
|
|
| 114 |
pipe.unload_lora_weights()
|
| 115 |
-
if is_valid_lora(lora_json): pipe.unfuse_lora()
|
| 116 |
clear_cache()
|
| 117 |
return image, seed
|
| 118 |
|
|
@@ -214,9 +217,10 @@ with gr.Blocks(theme=gr.themes.Soft(), fill_width=True, css=css) as app:
|
|
| 214 |
lora_search_civitai_desc = gr.Markdown(value="", visible=False)
|
| 215 |
lora_download_url = gr.Textbox(label="URL", placeholder="http://...my_lora_url.safetensors", lines=1)
|
| 216 |
with gr.Row():
|
| 217 |
-
lora_download =
|
| 218 |
-
|
| 219 |
-
|
|
|
|
| 220 |
gallery.select(
|
| 221 |
update_selection,
|
| 222 |
inputs=[width, height],
|
|
@@ -248,22 +252,22 @@ with gr.Blocks(theme=gr.themes.Soft(), fill_width=True, css=css) as app:
|
|
| 248 |
)
|
| 249 |
lora_search_civitai_json.change(search_civitai_lora_json, [lora_search_civitai_query, lora_search_civitai_basemodel], [lora_search_civitai_json], queue=True, show_api=True) # fn for api
|
| 250 |
lora_search_civitai_result.change(select_civitai_lora, [lora_search_civitai_result], [lora_download_url, lora_search_civitai_desc], scroll_to_output=True, queue=False, show_api=False)
|
| 251 |
-
gr.on(
|
| 252 |
-
triggers=[lora_download.click, lora_download_url.submit],
|
| 253 |
-
fn=download_my_lora,
|
| 254 |
-
inputs=[lora_download_url, lora_repo[0]],
|
| 255 |
-
outputs=[lora_repo[0]],
|
| 256 |
-
scroll_to_output=True,
|
| 257 |
-
queue=True,
|
| 258 |
-
show_api=False,
|
| 259 |
-
)
|
| 260 |
|
| 261 |
for i, l in enumerate(lora_repo):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 262 |
gr.on(
|
| 263 |
triggers=[lora_repo[i].change, lora_wt[i].change],
|
| 264 |
fn=update_loras,
|
| 265 |
inputs=[prompt, lora_repo[i], lora_wt[i]],
|
| 266 |
-
outputs=[prompt, lora_repo[i], lora_wt[i], lora_info[i],
|
| 267 |
queue=False,
|
| 268 |
trigger_mode="once",
|
| 269 |
show_api=False,
|
|
@@ -271,6 +275,7 @@ with gr.Blocks(theme=gr.themes.Soft(), fill_width=True, css=css) as app:
|
|
| 271 |
).success(apply_lora_prompt, [lora_info[i]], [lora_trigger[i]], queue=False, show_api=False
|
| 272 |
).success(compose_lora_json, [lora_repo_json, lora_num[i], lora_repo[i], lora_wt[i], lora_weights[i], lora_trigger[i]], [lora_repo_json], queue=False, show_api=False)
|
| 273 |
|
|
|
|
| 274 |
tagger_generate_from_image.click(
|
| 275 |
lambda: ("", "", ""), None, [v2_series, v2_character, prompt], queue=False, show_api=False,
|
| 276 |
).success(
|
|
|
|
| 108 |
if randomize_seed:
|
| 109 |
seed = random.randint(0, MAX_SEED)
|
| 110 |
|
| 111 |
+
progress(1, desc="Preparing Inference.")
|
| 112 |
+
|
| 113 |
image = generate_image(prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale, progress)
|
| 114 |
pipe.to("cpu")
|
| 115 |
+
if selected_index is not None: pipe.unload_lora_weights()
|
| 116 |
+
if is_valid_lora(lora_json):
|
| 117 |
+
pipe.unfuse_lora()
|
| 118 |
pipe.unload_lora_weights()
|
|
|
|
| 119 |
clear_cache()
|
| 120 |
return image, seed
|
| 121 |
|
|
|
|
| 217 |
lora_search_civitai_desc = gr.Markdown(value="", visible=False)
|
| 218 |
lora_download_url = gr.Textbox(label="URL", placeholder="http://...my_lora_url.safetensors", lines=1)
|
| 219 |
with gr.Row():
|
| 220 |
+
lora_download = [None] * num_loras
|
| 221 |
+
for i in range(num_loras):
|
| 222 |
+
lora_download[i] = gr.Button(f"Get and set LoRA to {int(i+1)}")
|
| 223 |
+
|
| 224 |
gallery.select(
|
| 225 |
update_selection,
|
| 226 |
inputs=[width, height],
|
|
|
|
| 252 |
)
|
| 253 |
lora_search_civitai_json.change(search_civitai_lora_json, [lora_search_civitai_query, lora_search_civitai_basemodel], [lora_search_civitai_json], queue=True, show_api=True) # fn for api
|
| 254 |
lora_search_civitai_result.change(select_civitai_lora, [lora_search_civitai_result], [lora_download_url, lora_search_civitai_desc], scroll_to_output=True, queue=False, show_api=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 255 |
|
| 256 |
for i, l in enumerate(lora_repo):
|
| 257 |
+
gr.on(
|
| 258 |
+
triggers=[lora_download[i].click],
|
| 259 |
+
fn=download_my_lora,
|
| 260 |
+
inputs=[lora_download_url, lora_repo[i]],
|
| 261 |
+
outputs=[lora_repo[i]],
|
| 262 |
+
scroll_to_output=True,
|
| 263 |
+
queue=True,
|
| 264 |
+
show_api=False,
|
| 265 |
+
)
|
| 266 |
gr.on(
|
| 267 |
triggers=[lora_repo[i].change, lora_wt[i].change],
|
| 268 |
fn=update_loras,
|
| 269 |
inputs=[prompt, lora_repo[i], lora_wt[i]],
|
| 270 |
+
outputs=[prompt, lora_repo[i], lora_wt[i], lora_info[i], lora_md[i]],
|
| 271 |
queue=False,
|
| 272 |
trigger_mode="once",
|
| 273 |
show_api=False,
|
|
|
|
| 275 |
).success(apply_lora_prompt, [lora_info[i]], [lora_trigger[i]], queue=False, show_api=False
|
| 276 |
).success(compose_lora_json, [lora_repo_json, lora_num[i], lora_repo[i], lora_wt[i], lora_weights[i], lora_trigger[i]], [lora_repo_json], queue=False, show_api=False)
|
| 277 |
|
| 278 |
+
|
| 279 |
tagger_generate_from_image.click(
|
| 280 |
lambda: ("", "", ""), None, [v2_series, v2_character, prompt], queue=False, show_api=False,
|
| 281 |
).success(
|
flux.py
CHANGED
|
@@ -286,7 +286,7 @@ def update_loras(prompt, lora, lora_wt):
|
|
| 286 |
output_prompt = ", ".join(list_uniq(output_prompts + lora_prompts))
|
| 287 |
choices = get_all_lora_tupled_list()
|
| 288 |
return gr.update(value=output_prompt), gr.update(value=lora, choices=choices), gr.update(value=lora_wt),\
|
| 289 |
-
gr.update(value=tag, label=label, visible=on), gr.update(
|
| 290 |
|
| 291 |
|
| 292 |
def search_civitai_lora(query, base_model):
|
|
|
|
| 286 |
output_prompt = ", ".join(list_uniq(output_prompts + lora_prompts))
|
| 287 |
choices = get_all_lora_tupled_list()
|
| 288 |
return gr.update(value=output_prompt), gr.update(value=lora, choices=choices), gr.update(value=lora_wt),\
|
| 289 |
+
gr.update(value=tag, label=label, visible=on), gr.update(value=md, visible=on)
|
| 290 |
|
| 291 |
|
| 292 |
def search_civitai_lora(query, base_model):
|