linoyts HF Staff commited on
Commit
26eb536
·
verified ·
1 Parent(s): fe70d6a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -32,16 +32,16 @@ MAX_DURATION = round(MAX_FRAMES_MODEL/FIXED_FPS,1)
32
 
33
  vae = AutoencoderKLWan.from_pretrained("Wan-AI/Wan2.2-T2V-A14B-Diffusers", subfolder="vae", torch_dtype=torch.float32)
34
  pipe = WanPipeline.from_pretrained(MODEL_ID,
35
- transformer=WanTransformer3DModel.from_pretrained('cbensimon/Wan2.2-I2V-A14B-bf16-Diffusers',
36
- subfolder='transformer',
37
- torch_dtype=torch.bfloat16,
38
- device_map='cuda',
39
- ),
40
- transformer_2=WanTransformer3DModel.from_pretrained('cbensimon/Wan2.2-I2V-A14B-bf16-Diffusers',
41
- subfolder='transformer_2',
42
- torch_dtype=torch.bfloat16,
43
- device_map='cuda',
44
- ),
45
  vae=vae,
46
  torch_dtype=torch.bfloat16,
47
  ).to('cuda')
 
32
 
33
  vae = AutoencoderKLWan.from_pretrained("Wan-AI/Wan2.2-T2V-A14B-Diffusers", subfolder="vae", torch_dtype=torch.float32)
34
  pipe = WanPipeline.from_pretrained(MODEL_ID,
35
+ # transformer=WanTransformer3DModel.from_pretrained('cbensimon/Wan2.2-I2V-A14B-bf16-Diffusers',
36
+ # subfolder='transformer',
37
+ # torch_dtype=torch.bfloat16,
38
+ # device_map='cuda',
39
+ # ),
40
+ # transformer_2=WanTransformer3DModel.from_pretrained('cbensimon/Wan2.2-I2V-A14B-bf16-Diffusers',
41
+ # subfolder='transformer_2',
42
+ # torch_dtype=torch.bfloat16,
43
+ # device_map='cuda',
44
+ # ),
45
  vae=vae,
46
  torch_dtype=torch.bfloat16,
47
  ).to('cuda')