WiNE-iNEFF commited on
Commit
88972dd
·
1 Parent(s): edeab3f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -7,7 +7,6 @@ from time import time, ctime
7
  from PIL import Image, ImageColor
8
  from diffusers import DDPMPipeline
9
  from diffusers import DDIMScheduler
10
- from tqdm import tqdm
11
 
12
  device = (
13
  "mps"
@@ -44,10 +43,11 @@ def generate():
44
  return show_images_save(x)
45
 
46
  def generate_g(color, guidance_loss_scale):
 
47
  target_color = ImageColor.getcolor(color, "RGBA") # Target color as RGB
48
  target_color = [a / 255 for a in target_color] # Rescale from (0, 255) to (0, 1)
49
  x = torch.randn(8, 4, 64, 64).to(device)
50
- for i, t in tqdm(enumerate(scheduler.timesteps)):
51
  model_input = scheduler.scale_model_input(x, t)
52
  with torch.no_grad():
53
  noise_pred = image_pipe.unet(model_input, t)["sample"]
 
7
  from PIL import Image, ImageColor
8
  from diffusers import DDPMPipeline
9
  from diffusers import DDIMScheduler
 
10
 
11
  device = (
12
  "mps"
 
43
  return show_images_save(x)
44
 
45
  def generate_g(color, guidance_loss_scale):
46
+ print('--V2--')
47
  target_color = ImageColor.getcolor(color, "RGBA") # Target color as RGB
48
  target_color = [a / 255 for a in target_color] # Rescale from (0, 255) to (0, 1)
49
  x = torch.randn(8, 4, 64, 64).to(device)
50
+ for i, t in enumerate(scheduler.timesteps):
51
  model_input = scheduler.scale_model_input(x, t)
52
  with torch.no_grad():
53
  noise_pred = image_pipe.unet(model_input, t)["sample"]