AlekseyCalvin commited on
Commit
694df49
·
verified ·
1 Parent(s): 7716209

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -0
app.py CHANGED
@@ -28,6 +28,11 @@ os.environ["HF_HOME"] = cache_path
28
 
29
  torch.set_float32_matmul_precision("high")
30
 
 
 
 
 
 
31
  # Load LoRAs from JSON file
32
  with open('loras.json', 'r') as f:
33
  loras = json.load(f)
@@ -64,6 +69,12 @@ pipe.text_encoder = clip_model.text_model
64
  pipe.tokenizer_max_length = maxtokens
65
  pipe.text_encoder.dtype = torch.bfloat16
66
 
 
 
 
 
 
 
67
  MAX_SEED = 2**32-1
68
 
69
  class calculateDuration:
 
28
 
29
  torch.set_float32_matmul_precision("high")
30
 
31
+ torch._inductor.config.conv_1x1_as_mm = True
32
+ torch._inductor.config.coordinate_descent_tuning = True
33
+ torch._inductor.config.epilogue_fusion = False
34
+ torch._inductor.config.coordinate_descent_check_all_directions = True
35
+
36
  # Load LoRAs from JSON file
37
  with open('loras.json', 'r') as f:
38
  loras = json.load(f)
 
69
  pipe.tokenizer_max_length = maxtokens
70
  pipe.text_encoder.dtype = torch.bfloat16
71
 
72
+ pipe.transformer.to(memory_format=torch.channels_last)
73
+ pipe.vae.to(memory_format=torch.channels_last)
74
+
75
+ pipe.transformer = torch.compile(pipe.transformer, mode="max-autotune", fullgraph=True)
76
+ pipe.vae.decode = torch.compile(pipe.vae.decode, mode="max-autotune", fullgraph=True)
77
+
78
  MAX_SEED = 2**32-1
79
 
80
  class calculateDuration: