Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -36,7 +36,7 @@ MAX_IMAGE_SIZE = 2048
|
|
| 36 |
|
| 37 |
#pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(pipe)
|
| 38 |
|
| 39 |
-
@spaces.GPU
|
| 40 |
def infer_flux(prompt, seed=42, randomize_seed=True, width=1024, height=1024, guidance_scale=3.5, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
|
| 41 |
if randomize_seed:
|
| 42 |
seed = random.randint(0, MAX_SEED)
|
|
@@ -269,6 +269,7 @@ def infer(image_input):
|
|
| 269 |
print(clipi_result)
|
| 270 |
|
| 271 |
llama_q = clipi_result
|
|
|
|
| 272 |
|
| 273 |
gr.Info('Calling Llama2 ...')
|
| 274 |
result = llama_gen_fragrance(llama_q)
|
|
|
|
| 36 |
|
| 37 |
#pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(pipe)
|
| 38 |
|
| 39 |
+
@spaces.GPU(duration=75)
|
| 40 |
def infer_flux(prompt, seed=42, randomize_seed=True, width=1024, height=1024, guidance_scale=3.5, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
|
| 41 |
if randomize_seed:
|
| 42 |
seed = random.randint(0, MAX_SEED)
|
|
|
|
| 269 |
print(clipi_result)
|
| 270 |
|
| 271 |
llama_q = clipi_result
|
| 272 |
+
yield None, None, None
|
| 273 |
|
| 274 |
gr.Info('Calling Llama2 ...')
|
| 275 |
result = llama_gen_fragrance(llama_q)
|