KingNish commited on
Commit
b14b18e
Β·
verified Β·
1 Parent(s): a1318fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -9
app.py CHANGED
@@ -51,15 +51,14 @@ pipe_edit.to("cuda")
51
 
52
  # Image Generator
53
  # Keep the models loaded globally for reuse
54
- if torch.cuda.is_available():
55
- pipe = StableDiffusionXLPipeline.from_pretrained(
56
  "fluently/Fluently-XL-v4",
57
  torch_dtype=torch.float16,
58
  use_safetensors=True,
59
  ).to("cuda")
60
- pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
61
- pipe.load_lora_weights("ehristoforu/dalle-3-xl-v2", weight_name="dalle-3-xl-lora-v2.safetensors", adapter_name="dalle")
62
- pipe.set_adapters("dalle")
63
 
64
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
65
  if randomize_seed:
@@ -87,7 +86,6 @@ def king(type = "Image Generation",
87
  text_cfg_scale = text_cfg_scale
88
  image_cfg_scale = image_cfg_scale
89
  input_image = input_image
90
-
91
  steps=steps
92
  generator = torch.manual_seed(seed)
93
  output_image = pipe_edit(
@@ -97,7 +95,7 @@ def king(type = "Image Generation",
97
  return seed, output_image
98
  else :
99
  seed = int(randomize_seed_fn(seed, randomize_seed))
100
- generator = torch.Generator(device="cuda").manual_seed(seed) # Move generator to cuda for speed
101
 
102
  options = {
103
  "prompt":instruction,
@@ -110,8 +108,7 @@ def king(type = "Image Generation",
110
  "output_type":"pil",
111
  }
112
 
113
- with torch.autocast("cuda"): # Use autocast for faster inference
114
- output_image = pipe(**options).images[0]
115
  return seed, output_image
116
 
117
  # Prompt classifier
 
51
 
52
  # Image Generator
53
  # Keep the models loaded globally for reuse
54
+ pipe = StableDiffusionXLPipeline.from_pretrained(
 
55
  "fluently/Fluently-XL-v4",
56
  torch_dtype=torch.float16,
57
  use_safetensors=True,
58
  ).to("cuda")
59
+ pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
60
+ pipe.load_lora_weights("ehristoforu/dalle-3-xl-v2", weight_name="dalle-3-xl-lora-v2.safetensors", adapter_name="dalle")
61
+ pipe.set_adapters("dalle")
62
 
63
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
64
  if randomize_seed:
 
86
  text_cfg_scale = text_cfg_scale
87
  image_cfg_scale = image_cfg_scale
88
  input_image = input_image
 
89
  steps=steps
90
  generator = torch.manual_seed(seed)
91
  output_image = pipe_edit(
 
95
  return seed, output_image
96
  else :
97
  seed = int(randomize_seed_fn(seed, randomize_seed))
98
+ generator = torch.Generator().manual_seed(seed) # Move generator to cuda for speed
99
 
100
  options = {
101
  "prompt":instruction,
 
108
  "output_type":"pil",
109
  }
110
 
111
+ output_image = pipe(**options).images[0]
 
112
  return seed, output_image
113
 
114
  # Prompt classifier