Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -12,7 +12,7 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
| 12 |
MAX_SEED = np.iinfo(np.int32).max
|
| 13 |
MAX_IMAGE_SIZE = 2048
|
| 14 |
|
| 15 |
-
@spaces.GPU()
|
| 16 |
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
|
| 17 |
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16, revision="refs/pr/3").to("cuda")
|
| 18 |
if randomize_seed:
|
|
|
|
| 12 |
MAX_SEED = np.iinfo(np.int32).max
|
| 13 |
MAX_IMAGE_SIZE = 2048
|
| 14 |
|
| 15 |
+
@spaces.GPU(duration=190)
|
| 16 |
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
|
| 17 |
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16, revision="refs/pr/3").to("cuda")
|
| 18 |
if randomize_seed:
|