oyly commited on
Commit
ff5529d
·
1 Parent(s): 2400b90
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -187,6 +187,8 @@ def edit(brush_canvas, source_prompt, inversion_guidance,
187
  print(f'optimizing & editing noise, {target_prompt} with seed {seed}, noise_scale {noise_scale}, training_epochs {training_epochs}')
188
  model.to(device)
189
  if training_epochs != 0:
 
 
190
  torch.set_grad_enabled(True)
191
  inp_optim["img"] = z0
192
  _, info, _, _, trainable_noise_list = denoise_with_noise_optim(model,**inp_optim,token_ids=token_ids,source_mask=source_mask,training_steps=1,training_epochs=training_epochs,learning_rate=0.01,seed=seed,noise_scale=noise_scale,timesteps=timesteps,info=info,guidance=denoise_guidance)
@@ -233,8 +235,8 @@ def edit(brush_canvas, source_prompt, inversion_guidance,
233
  Image.fromarray(binary_mask, mode="L").save(output_path.replace(target_object,f'{target_object}_mask'))
234
  t1 = time.perf_counter()
235
  print(f"Done in {t1 - t0:.1f}s.", f'Saving {output_path} .' if save else 'No saving files.')
236
- t5.to(device)
237
- clip.to(device)
238
 
239
  return img
240
 
 
187
  print(f'optimizing & editing noise, {target_prompt} with seed {seed}, noise_scale {noise_scale}, training_epochs {training_epochs}')
188
  model.to(device)
189
  if training_epochs != 0:
190
+ t5.to('cpu')
191
+ clip.to('cpu')
192
  torch.set_grad_enabled(True)
193
  inp_optim["img"] = z0
194
  _, info, _, _, trainable_noise_list = denoise_with_noise_optim(model,**inp_optim,token_ids=token_ids,source_mask=source_mask,training_steps=1,training_epochs=training_epochs,learning_rate=0.01,seed=seed,noise_scale=noise_scale,timesteps=timesteps,info=info,guidance=denoise_guidance)
 
235
  Image.fromarray(binary_mask, mode="L").save(output_path.replace(target_object,f'{target_object}_mask'))
236
  t1 = time.perf_counter()
237
  print(f"Done in {t1 - t0:.1f}s.", f'Saving {output_path} .' if save else 'No saving files.')
238
+ t5.to(device)
239
+ clip.to(device)
240
 
241
  return img
242