karimbenharrak commited on
Commit
5f4e1b1
·
verified ·
1 Parent(s): 27eaf2b

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +5 -4
handler.py CHANGED
@@ -24,6 +24,7 @@ class EndpointHandler():
24
  # )
25
  # self.smooth_pipe.to("cuda")
26
 
 
27
  self.controlnet = ControlNetModel.from_pretrained(
28
  "lllyasviel/control_v11p_sd15_inpaint", torch_dtype=torch.float16
29
  )
@@ -35,8 +36,8 @@ class EndpointHandler():
35
  self.pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(self.pipe.scheduler.config)
36
  self.pipe.enable_model_cpu_offload()
37
  self.pipe.enable_xformers_memory_efficient_attention()
38
-
39
  """
 
40
  # load StableDiffusionInpaintPipeline pipeline
41
  self.pipe = AutoPipelineForInpainting.from_pretrained(
42
  "runwayml/stable-diffusion-inpainting",
@@ -62,7 +63,6 @@ class EndpointHandler():
62
  self.pipe3 = AutoPipelineForImage2Image.from_pipe(self.pipe2)
63
  #self.pipe3.enable_model_cpu_offload()
64
  self.pipe3.enable_xformers_memory_efficient_attention()
65
- """
66
 
67
 
68
  def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
@@ -114,7 +114,7 @@ class EndpointHandler():
114
  """
115
 
116
  #pipe = AutoPipelineForInpainting.from_pretrained("diffusers/stable-diffusion-xl-1.0-inpainting-0.1", torch_dtype=torch.float16, variant="fp16").to("cuda")
117
- """
118
  # run inference pipeline
119
  out = self.pipe(prompt=prompt, negative_prompt=negative_prompt, image=image, mask_image=mask_image)
120
 
@@ -149,8 +149,8 @@ class EndpointHandler():
149
 
150
  # return first generate PIL image
151
  return image2
 
152
  """
153
-
154
  control_image = self.make_inpaint_condition(image, mask_image)
155
 
156
  # generate image
@@ -167,6 +167,7 @@ class EndpointHandler():
167
  ).images[0]
168
 
169
  return image
 
170
 
171
  # helper to decode input image
172
  def decode_base64_image(self, image_string):
 
24
  # )
25
  # self.smooth_pipe.to("cuda")
26
 
27
+ """
28
  self.controlnet = ControlNetModel.from_pretrained(
29
  "lllyasviel/control_v11p_sd15_inpaint", torch_dtype=torch.float16
30
  )
 
36
  self.pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(self.pipe.scheduler.config)
37
  self.pipe.enable_model_cpu_offload()
38
  self.pipe.enable_xformers_memory_efficient_attention()
 
39
  """
40
+
41
  # load StableDiffusionInpaintPipeline pipeline
42
  self.pipe = AutoPipelineForInpainting.from_pretrained(
43
  "runwayml/stable-diffusion-inpainting",
 
63
  self.pipe3 = AutoPipelineForImage2Image.from_pipe(self.pipe2)
64
  #self.pipe3.enable_model_cpu_offload()
65
  self.pipe3.enable_xformers_memory_efficient_attention()
 
66
 
67
 
68
  def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
 
114
  """
115
 
116
  #pipe = AutoPipelineForInpainting.from_pretrained("diffusers/stable-diffusion-xl-1.0-inpainting-0.1", torch_dtype=torch.float16, variant="fp16").to("cuda")
117
+
118
  # run inference pipeline
119
  out = self.pipe(prompt=prompt, negative_prompt=negative_prompt, image=image, mask_image=mask_image)
120
 
 
149
 
150
  # return first generate PIL image
151
  return image2
152
+
153
  """
 
154
  control_image = self.make_inpaint_condition(image, mask_image)
155
 
156
  # generate image
 
167
  ).images[0]
168
 
169
  return image
170
+ """
171
 
172
  # helper to decode input image
173
  def decode_base64_image(self, image_string):