Spaces:
Runtime error
Runtime error
Your computer must not enter into standby mode
Browse files
app.py
CHANGED
|
@@ -1,6 +1,3 @@
|
|
| 1 |
-
from diffusers import StableDiffusionXLInpaintPipeline
|
| 2 |
-
from PIL import Image, ImageFilter
|
| 3 |
-
|
| 4 |
import gradio as gr
|
| 5 |
import numpy as np
|
| 6 |
import time
|
|
@@ -9,11 +6,15 @@ import random
|
|
| 9 |
import imageio
|
| 10 |
import torch
|
| 11 |
|
|
|
|
|
|
|
|
|
|
| 12 |
max_64_bit_int = 2**63 - 1
|
| 13 |
|
| 14 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 15 |
floatType = torch.float16 if torch.cuda.is_available() else torch.float32
|
| 16 |
variant = "fp16" if torch.cuda.is_available() else None
|
|
|
|
| 17 |
pipe = StableDiffusionXLInpaintPipeline.from_pretrained("diffusers/stable-diffusion-xl-1.0-inpainting-0.1", torch_dtype = floatType, variant = variant)
|
| 18 |
pipe = pipe.to(device)
|
| 19 |
|
|
@@ -283,7 +284,7 @@ with gr.Blocks() as interface:
|
|
| 283 |
<li>To modify <b>anything else</b> on your image, I recommend to use <i>Instruct Pix2Pix</i>.</li>
|
| 284 |
</ul>
|
| 285 |
<br/>
|
| 286 |
-
🐌 Slow process... ~1 hour.<br
|
| 287 |
<a href='https://huggingface.co/spaces/Fabrice-TIERCELIN/Uncrop?duplicate=true'><img src='https://img.shields.io/badge/-Duplicate%20Space-blue?labelColor=white&style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAP5JREFUOE+lk7FqAkEURY+ltunEgFXS2sZGIbXfEPdLlnxJyDdYB62sbbUKpLbVNhyYFzbrrA74YJlh9r079973psed0cvUD4A+4HoCjsA85X0Dfn/RBLBgBDxnQPfAEJgBY+A9gALA4tcbamSzS4xq4FOQAJgCDwV2CPKV8tZAJcAjMMkUe1vX+U+SMhfAJEHasQIWmXNN3abzDwHUrgcRGmYcgKe0bxrblHEB4E/pndMazNpSZGcsZdBlYJcEL9Afo75molJyM2FxmPgmgPqlWNLGfwZGG6UiyEvLzHYDmoPkDDiNm9JR9uboiONcBXrpY1qmgs21x1QwyZcpvxt9NS09PlsPAAAAAElFTkSuQmCC&logoWidth=14'></a>
|
| 288 |
<br/>
|
| 289 |
⚖️ You can use, modify and share the generated images but not for commercial uses.
|
|
@@ -391,6 +392,7 @@ with gr.Blocks() as interface:
|
|
| 391 |
], scroll_to_output = True)
|
| 392 |
|
| 393 |
gr.Examples(
|
|
|
|
| 394 |
inputs = [
|
| 395 |
input_image,
|
| 396 |
enlarge_top,
|
|
@@ -405,7 +407,6 @@ with gr.Blocks() as interface:
|
|
| 405 |
image_guidance_scale,
|
| 406 |
strength,
|
| 407 |
denoising_steps,
|
| 408 |
-
randomize_seed,
|
| 409 |
seed,
|
| 410 |
debug_mode
|
| 411 |
],
|
|
@@ -431,7 +432,6 @@ with gr.Blocks() as interface:
|
|
| 431 |
1.5,
|
| 432 |
0.99,
|
| 433 |
1000,
|
| 434 |
-
True,
|
| 435 |
42,
|
| 436 |
False
|
| 437 |
],
|
|
@@ -449,7 +449,6 @@ with gr.Blocks() as interface:
|
|
| 449 |
1.5,
|
| 450 |
0.99,
|
| 451 |
1000,
|
| 452 |
-
True,
|
| 453 |
42,
|
| 454 |
False
|
| 455 |
],
|
|
@@ -467,7 +466,6 @@ with gr.Blocks() as interface:
|
|
| 467 |
1.5,
|
| 468 |
0.99,
|
| 469 |
1000,
|
| 470 |
-
True,
|
| 471 |
42,
|
| 472 |
False
|
| 473 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import numpy as np
|
| 3 |
import time
|
|
|
|
| 6 |
import imageio
|
| 7 |
import torch
|
| 8 |
|
| 9 |
+
from diffusers import StableDiffusionXLInpaintPipeline
|
| 10 |
+
from PIL import Image, ImageFilter
|
| 11 |
+
|
| 12 |
max_64_bit_int = 2**63 - 1
|
| 13 |
|
| 14 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 15 |
floatType = torch.float16 if torch.cuda.is_available() else torch.float32
|
| 16 |
variant = "fp16" if torch.cuda.is_available() else None
|
| 17 |
+
|
| 18 |
pipe = StableDiffusionXLInpaintPipeline.from_pretrained("diffusers/stable-diffusion-xl-1.0-inpainting-0.1", torch_dtype = floatType, variant = variant)
|
| 19 |
pipe = pipe.to(device)
|
| 20 |
|
|
|
|
| 284 |
<li>To modify <b>anything else</b> on your image, I recommend to use <i>Instruct Pix2Pix</i>.</li>
|
| 285 |
</ul>
|
| 286 |
<br/>
|
| 287 |
+
🐌 Slow process... ~1 hour. Your computer must not enter into standby mode.<br/>You can duplicate this space on a free account, it works on CPU and should also run on CUDA.<br/>
|
| 288 |
<a href='https://huggingface.co/spaces/Fabrice-TIERCELIN/Uncrop?duplicate=true'><img src='https://img.shields.io/badge/-Duplicate%20Space-blue?labelColor=white&style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAP5JREFUOE+lk7FqAkEURY+ltunEgFXS2sZGIbXfEPdLlnxJyDdYB62sbbUKpLbVNhyYFzbrrA74YJlh9r079973psed0cvUD4A+4HoCjsA85X0Dfn/RBLBgBDxnQPfAEJgBY+A9gALA4tcbamSzS4xq4FOQAJgCDwV2CPKV8tZAJcAjMMkUe1vX+U+SMhfAJEHasQIWmXNN3abzDwHUrgcRGmYcgKe0bxrblHEB4E/pndMazNpSZGcsZdBlYJcEL9Afo75molJyM2FxmPgmgPqlWNLGfwZGG6UiyEvLzHYDmoPkDDiNm9JR9uboiONcBXrpY1qmgs21x1QwyZcpvxt9NS09PlsPAAAAAElFTkSuQmCC&logoWidth=14'></a>
|
| 289 |
<br/>
|
| 290 |
⚖️ You can use, modify and share the generated images but not for commercial uses.
|
|
|
|
| 392 |
], scroll_to_output = True)
|
| 393 |
|
| 394 |
gr.Examples(
|
| 395 |
+
fn = uncrop,
|
| 396 |
inputs = [
|
| 397 |
input_image,
|
| 398 |
enlarge_top,
|
|
|
|
| 407 |
image_guidance_scale,
|
| 408 |
strength,
|
| 409 |
denoising_steps,
|
|
|
|
| 410 |
seed,
|
| 411 |
debug_mode
|
| 412 |
],
|
|
|
|
| 432 |
1.5,
|
| 433 |
0.99,
|
| 434 |
1000,
|
|
|
|
| 435 |
42,
|
| 436 |
False
|
| 437 |
],
|
|
|
|
| 449 |
1.5,
|
| 450 |
0.99,
|
| 451 |
1000,
|
|
|
|
| 452 |
42,
|
| 453 |
False
|
| 454 |
],
|
|
|
|
| 466 |
1.5,
|
| 467 |
0.99,
|
| 468 |
1000,
|
|
|
|
| 469 |
42,
|
| 470 |
False
|
| 471 |
],
|