fffiloni commited on
Commit
fcb3dcb
·
verified ·
1 Parent(s): e36f65d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -36,7 +36,7 @@ MAX_IMAGE_SIZE = 2048
36
 
37
  #pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(pipe)
38
 
39
- @spaces.GPU
40
  def infer_flux(prompt, seed=42, randomize_seed=True, width=1024, height=1024, guidance_scale=3.5, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
41
  if randomize_seed:
42
  seed = random.randint(0, MAX_SEED)
@@ -269,6 +269,7 @@ def infer(image_input):
269
  print(clipi_result)
270
 
271
  llama_q = clipi_result
 
272
 
273
  gr.Info('Calling Llama2 ...')
274
  result = llama_gen_fragrance(llama_q)
 
36
 
37
  #pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(pipe)
38
 
39
+ @spaces.GPU(duration=75)
40
  def infer_flux(prompt, seed=42, randomize_seed=True, width=1024, height=1024, guidance_scale=3.5, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
41
  if randomize_seed:
42
  seed = random.randint(0, MAX_SEED)
 
269
  print(clipi_result)
270
 
271
  llama_q = clipi_result
272
+ yield None, None, None
273
 
274
  gr.Info('Calling Llama2 ...')
275
  result = llama_gen_fragrance(llama_q)