cbensimon HF Staff commited on
Commit
bf738a2
·
1 Parent(s): a5c23d1

FLUX.1-dev

Browse files
Files changed (1) hide show
  1. app.py +9 -9
app.py CHANGED
@@ -8,26 +8,26 @@ from diffusers import FluxPipeline
8
  from optimization import optimize_pipeline_
9
 
10
 
11
- pipeline = FluxPipeline.from_pretrained('black-forest-labs/FLUX.1-schnell', torch_dtype=torch.bfloat16).to('cuda')
12
  optimize_pipeline_(pipeline, "prompt")
13
 
14
 
15
  @spaces.GPU
16
- def generate_image(prompt: str):
17
  generator = torch.Generator(device='cuda').manual_seed(42)
18
  t0 = datetime.now()
19
- images = []
20
- for _ in range(9):
21
- image = pipeline(prompt, num_inference_steps=4, generator=generator).images[0]
22
- elapsed = -(t0 - (t0 := datetime.now()))
23
- images += [(image, f'{elapsed.total_seconds():.2f}s')]
24
- yield images
25
 
26
 
27
  gr.Interface(
28
  fn=generate_image,
29
  inputs=gr.Text(label="Prompt"),
30
- outputs=gr.Gallery(rows=3, columns=3, height='60vh'),
31
  examples=["A cat playing with a ball of yarn"],
32
  cache_examples=False,
33
  ).launch()
 
8
  from optimization import optimize_pipeline_
9
 
10
 
11
+ pipeline = FluxPipeline.from_pretrained('black-forest-labs/FLUX.1-dev', torch_dtype=torch.bfloat16).to('cuda')
12
  optimize_pipeline_(pipeline, "prompt")
13
 
14
 
15
  @spaces.GPU
16
+ def generate_image(prompt: str, progress=gr.Progress(track_tqdm=True)):
17
  generator = torch.Generator(device='cuda').manual_seed(42)
18
  t0 = datetime.now()
19
+ output = pipeline(
20
+ prompt=prompt,
21
+ num_inference_steps=28,
22
+ generator=generator,
23
+ )
24
+ return (output.images[0], f'{(datetime.now() - t0).total_seconds():.2f}s')
25
 
26
 
27
  gr.Interface(
28
  fn=generate_image,
29
  inputs=gr.Text(label="Prompt"),
30
+ outputs=gr.Image(),
31
  examples=["A cat playing with a ball of yarn"],
32
  cache_examples=False,
33
  ).launch()