Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -40,8 +40,6 @@ def infer(
|
|
40 |
ip_adapter_image=None,
|
41 |
progress=gr.Progress(track_tqdm=True),
|
42 |
):
|
43 |
-
generator = torch.Generator(device).manual_seed(seed)
|
44 |
-
|
45 |
ckpt_dir='./model_output'
|
46 |
unet_sub_dir = os.path.join(ckpt_dir, "unet")
|
47 |
text_encoder_sub_dir = os.path.join(ckpt_dir, "text_encoder")
|
@@ -49,6 +47,7 @@ def infer(
|
|
49 |
if model_id is None:
|
50 |
raise ValueError("Please specify the base model name or path")
|
51 |
|
|
|
52 |
params = {'prompt': prompt,
|
53 |
'negative_prompt': negative_prompt,
|
54 |
'guidance_scale': guidance_scale,
|
|
|
40 |
ip_adapter_image=None,
|
41 |
progress=gr.Progress(track_tqdm=True),
|
42 |
):
|
|
|
|
|
43 |
ckpt_dir='./model_output'
|
44 |
unet_sub_dir = os.path.join(ckpt_dir, "unet")
|
45 |
text_encoder_sub_dir = os.path.join(ckpt_dir, "text_encoder")
|
|
|
47 |
if model_id is None:
|
48 |
raise ValueError("Please specify the base model name or path")
|
49 |
|
50 |
+
generator = torch.Generator(device).manual_seed(seed)
|
51 |
params = {'prompt': prompt,
|
52 |
'negative_prompt': negative_prompt,
|
53 |
'guidance_scale': guidance_scale,
|