ngohel58 commited on
Commit
b29a12f
·
verified ·
1 Parent(s): 8f71061

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +225 -40
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import gradio as gr
2
  import numpy as np
3
- from PIL import Image
4
  from transformers import pipeline
5
 
6
  try:
@@ -8,17 +8,110 @@ try:
8
  except ImportError:
9
  cv2 = None
10
 
11
- # Load depth estimation model once
12
  depth_pipe = pipeline("depth-estimation", model="depth-anything/Depth-Anything-V2-Large-hf")
13
 
14
- # Global state
 
 
 
 
 
 
15
  current_original_image = None
16
  current_depth_norm = None
17
  current_depth_map_pil = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
 
 
 
 
 
 
 
 
 
 
 
19
 
 
 
20
  def preprocess_depth(depth_norm, smoothing_radius):
21
- """Smooth the depth map using bilateral filtering if radius > 0 and cv2 is available."""
22
  if smoothing_radius > 0 and cv2 is not None:
23
  depth_uint8 = (depth_norm * 255.0).astype(np.uint8)
24
  sigma = max(smoothing_radius * 10.0, 1.0)
@@ -27,44 +120,64 @@ def preprocess_depth(depth_norm, smoothing_radius):
27
  return depth_norm
28
 
29
 
 
30
  def apply_effect(threshold, depth_scale, feather, red_brightness, blue_brightness, gamma,
31
- black_level_percent, white_level_percent, smoothing_percent):
 
 
32
  """
33
- Apply chromostereopsis effect using adjustable parameters.
34
- threshold: percentage [0,100] controlling blend midpoint.
35
- depth_scale: percentage [0,100] controlling steepness of logistic curve.
36
- feather: percentage [0,100] affecting the smoothness of the transition.
37
- red_brightness, blue_brightness: percentages [0,100] controlling channel intensities.
38
- gamma: percentage [0,100] mapped to gamma range [0.1, 3.0].
39
- black_level_percent, white_level_percent: percentages mapped to 0..255 levels.
40
- smoothing_percent: percentage [0,100] mapped to bilateral filter radius.
41
  """
42
  global current_original_image, current_depth_norm
 
 
43
  if current_original_image is None or current_depth_norm is None:
44
  return None
45
 
46
- # Levels adjustment
47
  black_level = black_level_percent * 2.55
48
  white_level = white_level_percent * 2.55
49
  gray = np.array(current_original_image.convert("L"), dtype=np.float32)
50
  denom = max(white_level - black_level, 1e-6)
51
- adjusted_gray = (gray - black_level) / denom
52
- adjusted_gray = np.clip(adjusted_gray, 0.0, 1.0)
53
 
54
- # Gamma correction
55
  gamma_val = 0.1 + (gamma / 100.0) * 2.9
56
  adjusted_gray = np.clip(adjusted_gray ** gamma_val, 0.0, 1.0)
57
 
58
- # Smooth depth map
59
  smoothing_radius = smoothing_percent / 10.0
60
  depth_smoothed = preprocess_depth(current_depth_norm, smoothing_radius)
 
61
 
62
- # Compute blend factor using logistic function
63
  threshold_norm = threshold / 100.0
64
  steepness = max(depth_scale, 1e-3)
65
  feather_norm = feather / 100.0
66
  steepness_adj = steepness / (feather_norm * 10.0 + 1.0)
67
- blend = 1.0 / (1.0 + np.exp(-steepness_adj * (depth_smoothed - threshold_norm)))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
 
69
  # Map brightness to factors (0-2)
70
  red_factor = red_brightness / 50.0
@@ -81,11 +194,43 @@ def apply_effect(threshold, depth_scale, feather, red_brightness, blue_brightnes
81
  output[..., 0] = red_img
82
  output[..., 1] = 0
83
  output[..., 2] = blue_img
84
-
85
  return Image.fromarray(output, mode="RGB")
86
 
87
 
88
- def generate_depth_map(input_image):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  """Generate normalized depth map and initial effect image."""
90
  global current_original_image, current_depth_norm, current_depth_map_pil
91
  if input_image is None:
@@ -93,8 +238,9 @@ def generate_depth_map(input_image):
93
  current_depth_norm = None
94
  current_depth_map_pil = None
95
  return None, None
 
96
  current_original_image = input_image
97
- # Run depth estimation
98
  result = depth_pipe(input_image)
99
  depth = np.array(result["depth"], dtype=np.float32)
100
  depth -= depth.min()
@@ -103,6 +249,11 @@ def generate_depth_map(input_image):
103
  depth /= max_val
104
  current_depth_norm = depth
105
  current_depth_map_pil = Image.fromarray((depth * 255.0).astype(np.uint8), mode="L")
 
 
 
 
 
106
  # Default effect parameters
107
  effect = apply_effect(
108
  threshold=50,
@@ -114,13 +265,20 @@ def generate_depth_map(input_image):
114
  black_level_percent=0,
115
  white_level_percent=100,
116
  smoothing_percent=0,
 
 
 
 
 
117
  )
118
  return current_depth_map_pil.convert("RGB"), effect
119
 
120
 
121
  def update_effect(threshold, depth_scale, feather, red_brightness, blue_brightness,
122
- gamma, black_level, white_level, smoothing):
123
- """Update the effect when any slider changes."""
 
 
124
  return apply_effect(
125
  threshold=threshold,
126
  depth_scale=depth_scale,
@@ -131,54 +289,81 @@ def update_effect(threshold, depth_scale, feather, red_brightness, blue_brightne
131
  black_level_percent=black_level,
132
  white_level_percent=white_level,
133
  smoothing_percent=smoothing,
 
 
 
 
 
134
  )
135
 
136
 
137
  def clear_results():
138
  """Reset global state and clear outputs."""
139
  global current_original_image, current_depth_norm, current_depth_map_pil
 
140
  current_original_image = None
141
  current_depth_norm = None
142
  current_depth_map_pil = None
 
 
143
  return None, None
144
 
145
 
 
146
  with gr.Blocks(title="ChromoStereoizer Enhanced", theme=gr.themes.Soft()) as demo:
147
- gr.Markdown("# ChromoStereoizer Enhanced")
148
  with gr.Row():
149
  with gr.Column(scale=1):
150
  input_image = gr.Image(label="Upload Image", type="pil", height=400)
 
 
 
 
 
151
  generate_btn = gr.Button("Generate Depth Map", variant="primary", size="lg")
 
152
  with gr.Column(scale=1):
153
  gr.Markdown("**Depth Map**")
154
  depth_output = gr.Image(type="pil", height=400, interactive=False, show_download_button=True, show_label=False)
155
  gr.Markdown("**ChromoStereoizer Result**")
156
  chromo_output = gr.Image(type="pil", height=400, interactive=False, show_download_button=True, show_label=False)
157
  gr.Markdown("## Controls")
158
- threshold_slider = gr.Slider(minimum=0, maximum=100, value=50, step=1, label="Threshold (%)")
159
- depth_scale_slider = gr.Slider(minimum=0, maximum=100, value=50, step=1, label="Depth Scale (Steepness)")
160
- feather_slider = gr.Slider(minimum=0, maximum=100, value=10, step=1, label="Feather (%)")
161
- red_slider = gr.Slider(minimum=0, maximum=100, value=50, step=1, label="Red Brightness")
162
- blue_slider = gr.Slider(minimum=0, maximum=100, value=50, step=1, label="Blue Brightness")
163
- gamma_slider = gr.Slider(minimum=0, maximum=100, value=50, step=1, label="Gamma")
164
- black_slider = gr.Slider(minimum=0, maximum=100, value=0, step=1, label="Black Level (%)")
165
- white_slider = gr.Slider(minimum=0, maximum=100, value=100, step=1, label="White Level (%)")
166
- smoothing_slider = gr.Slider(minimum=0, maximum=100, value=0, step=1, label="Smoothing (%)")
 
167
  clear_btn = gr.Button("Clear", variant="secondary")
168
- # Event bindings
 
169
  generate_btn.click(
170
  fn=generate_depth_map,
171
- inputs=[input_image],
172
  outputs=[depth_output, chromo_output],
173
  show_progress=True,
174
  )
175
- for slider in [threshold_slider, depth_scale_slider, feather_slider, red_slider, blue_slider, gamma_slider, black_slider, white_slider, smoothing_slider]:
176
- slider.change(
 
 
 
 
 
177
  fn=update_effect,
178
- inputs=[threshold_slider, depth_scale_slider, feather_slider, red_slider, blue_slider, gamma_slider, black_slider, white_slider, smoothing_slider],
 
 
 
 
179
  outputs=chromo_output,
180
  show_progress=False,
181
  )
 
182
  clear_btn.click(
183
  fn=clear_results,
184
  inputs=[],
 
1
  import gradio as gr
2
  import numpy as np
3
+ from PIL import Image, ImageFilter
4
  from transformers import pipeline
5
 
6
  try:
 
8
  except ImportError:
9
  cv2 = None
10
 
11
+ # --- Models ---
12
  depth_pipe = pipeline("depth-estimation", model="depth-anything/Depth-Anything-V2-Large-hf")
13
 
14
+ # A robust, widely available panoptic segmentation model in transformers
15
+ try:
16
+ seg_pipe = pipeline("image-segmentation", model="facebook/detr-resnet-50-panoptic")
17
+ except Exception:
18
+ seg_pipe = None # We'll handle gracefully
19
+
20
+ # --- Global state ---
21
  current_original_image = None
22
  current_depth_norm = None
23
  current_depth_map_pil = None
24
+ current_near_softmask = None # float32 0..1, soft foreground mask
25
+ current_edge_band = None # float32 0..1, where to snap edges
26
+
27
+
28
+ # ---------- Utilities ----------
29
+ def _np_from_pil_gray(img_pil):
30
+ return np.array(img_pil.convert("L"), dtype=np.float32) / 255.0
31
+
32
+ def _ensure_float01(x):
33
+ x = x.astype(np.float32)
34
+ if x.max() > 1.0 or x.min() < 0.0:
35
+ x = (x - x.min()) / max(x.max() - x.min(), 1e-6)
36
+ return x
37
+
38
+ def _soften_mask(mask01, sigma_px=2.0):
39
+ """Gaussian blur softening for a binary mask in [0,1]."""
40
+ if cv2 is not None:
41
+ return _ensure_float01(cv2.GaussianBlur(mask01, (0, 0), sigmaX=max(sigma_px, 1e-6)))
42
+ # PIL fallback
43
+ pil = Image.fromarray((mask01 * 255).astype(np.uint8), mode="L").filter(ImageFilter.GaussianBlur(radius=float(sigma_px)))
44
+ return np.array(pil, dtype=np.float32) / 255.0
45
+
46
+ def _edge_band_from_mask(mask01, band_px=6):
47
+ """Thin band around mask boundary where we enforce snapping."""
48
+ m = (mask01 > 0.5).astype(np.uint8)
49
+ if cv2 is not None:
50
+ k = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
51
+ edge = cv2.morphologyEx(m, cv2.MORPH_GRADIENT, k) # 1px outline
52
+ if band_px > 1:
53
+ edge = cv2.dilate(edge, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (int(band_px), int(band_px))))
54
+ return edge.astype(np.float32)
55
+ # PIL fallback: crude edge + dilation
56
+ pil = Image.fromarray(m * 255, mode="L").filter(ImageFilter.FIND_EDGES)
57
+ arr = (np.array(pil) > 0).astype(np.uint8)
58
+ # simple max filter dilation
59
+ for _ in range(max(int(band_px // 2), 1)):
60
+ arr = np.maximum.reduce([
61
+ np.pad(arr, ((1,0),(0,0)))[:-1,:],
62
+ np.pad(arr, ((0,1),(0,0)))[1:,:],
63
+ np.pad(arr, ((0,0),(1,0)))[:, :-1],
64
+ np.pad(arr, ((0,0),(0,1)))[:, 1:],
65
+ arr
66
+ ])
67
+ return arr.astype(np.float32)
68
+
69
+ def _pick_nearest_segment_mask(segments, depth01, invert_depth=False):
70
+ """
71
+ Given pipeline outputs (each with a 'mask' PIL) and normalized depth map,
72
+ choose the segment with smallest median *distance to camera*.
73
+ If invert_depth=True, we treat 1-depth as distance.
74
+ """
75
+ if not segments:
76
+ return None
77
+
78
+ h, w = depth01.shape
79
+ best_mask = None
80
+ best_score = None
81
+
82
+ # Interpret "nearness": if invert_depth True, higher values mean near
83
+ near_map = (1.0 - depth01) if invert_depth else depth01
84
+ # If your model outputs "larger = farther", set invert_depth=True via UI
85
+
86
+ for seg in segments:
87
+ m = np.array(seg["mask"].resize((w, h)).convert("L")) > 127
88
+ if m.sum() < 50: # ignore tiny segments
89
+ continue
90
+ # Higher median(nearness) = nearer
91
+ score = float(np.median(near_map[m]))
92
+ if (best_score is None) or (score > best_score):
93
+ best_score = score
94
+ best_mask = m.astype(np.float32)
95
+
96
+ if best_mask is None:
97
+ return None
98
+ return _ensure_float01(best_mask)
99
 
100
+ def _depth_only_near_mask(depth01, near_percent=15, invert_depth=False):
101
+ """
102
+ Fallback: use top N% nearest pixels (by depth) as a binary mask.
103
+ near_percent in [1..40] typical.
104
+ """
105
+ flat = depth01.flatten()
106
+ if invert_depth:
107
+ flat = 1.0 - flat
108
+ q = np.quantile(flat, 1.0 - (near_percent / 100.0))
109
+ m = ((1.0 - depth01) if invert_depth else depth01) >= q
110
+ return m.astype(np.float32)
111
 
112
+
113
+ # ---------- Preprocess depth ----------
114
  def preprocess_depth(depth_norm, smoothing_radius):
 
115
  if smoothing_radius > 0 and cv2 is not None:
116
  depth_uint8 = (depth_norm * 255.0).astype(np.uint8)
117
  sigma = max(smoothing_radius * 10.0, 1.0)
 
120
  return depth_norm
121
 
122
 
123
+ # ---------- Effect ----------
124
  def apply_effect(threshold, depth_scale, feather, red_brightness, blue_brightness, gamma,
125
+ black_level_percent, white_level_percent, smoothing_percent,
126
+ use_segmentation, edge_snap_strength, edge_band_px,
127
+ invert_depth, near_percent):
128
  """
129
+ Adds segmentation-assisted edge snapping:
130
+ - use_segmentation: run / use segmentation-assisted mask if available
131
+ - edge_snap_strength: 0..100 weight of snapping in edge band
132
+ - edge_band_px: band width around boundary to force crisp transition
133
+ - invert_depth: flip near/far interpretation if needed
134
+ - near_percent: fallback mask when segmentation is off/failed
 
 
135
  """
136
  global current_original_image, current_depth_norm
137
+ global current_near_softmask, current_edge_band
138
+
139
  if current_original_image is None or current_depth_norm is None:
140
  return None
141
 
142
+ # Levels adjustment (same as yours)
143
  black_level = black_level_percent * 2.55
144
  white_level = white_level_percent * 2.55
145
  gray = np.array(current_original_image.convert("L"), dtype=np.float32)
146
  denom = max(white_level - black_level, 1e-6)
147
+ adjusted_gray = np.clip((gray - black_level) / denom, 0.0, 1.0)
 
148
 
149
+ # Gamma
150
  gamma_val = 0.1 + (gamma / 100.0) * 2.9
151
  adjusted_gray = np.clip(adjusted_gray ** gamma_val, 0.0, 1.0)
152
 
153
+ # Depth smoothing
154
  smoothing_radius = smoothing_percent / 10.0
155
  depth_smoothed = preprocess_depth(current_depth_norm, smoothing_radius)
156
+ depth_for_blend = (1.0 - depth_smoothed) if invert_depth else depth_smoothed
157
 
158
+ # Logistic blend from depth
159
  threshold_norm = threshold / 100.0
160
  steepness = max(depth_scale, 1e-3)
161
  feather_norm = feather / 100.0
162
  steepness_adj = steepness / (feather_norm * 10.0 + 1.0)
163
+ blend = 1.0 / (1.0 + np.exp(-steepness_adj * (depth_for_blend - threshold_norm)))
164
+
165
+ # Edge snapping: mix blend with (soft) near-object mask ONLY within edge band
166
+ snap_w = np.clip(edge_snap_strength / 100.0, 0.0, 1.0)
167
+
168
+ if current_near_softmask is None:
169
+ # Build a fallback mask so snapping can still help
170
+ fallback = _depth_only_near_mask(current_depth_norm, near_percent=near_percent, invert_depth=invert_depth)
171
+ current_near_softmask_local = _soften_mask(fallback, sigma_px=max(edge_band_px / 2.0, 1.0))
172
+ current_edge_band_local = _edge_band_from_mask(fallback, band_px=max(int(edge_band_px), 1))
173
+ else:
174
+ current_near_softmask_local = current_near_softmask
175
+ current_edge_band_local = current_edge_band
176
+
177
+ if snap_w > 0.0:
178
+ # Per-pixel alpha only near edges
179
+ per_pixel_alpha = snap_w * _ensure_float01(current_edge_band_local)
180
+ blend = (1.0 - per_pixel_alpha) * blend + per_pixel_alpha * _ensure_float01(current_near_softmask_local)
181
 
182
  # Map brightness to factors (0-2)
183
  red_factor = red_brightness / 50.0
 
194
  output[..., 0] = red_img
195
  output[..., 1] = 0
196
  output[..., 2] = blue_img
 
197
  return Image.fromarray(output, mode="RGB")
198
 
199
 
200
+ # ---------- Pipeline steps ----------
201
+ def _compute_segmentation_assist(img_pil, depth01, invert_depth, edge_band_px, near_percent):
202
+ """
203
+ Build current_near_softmask and current_edge_band using segmentation if enabled;
204
+ else fallback to depth-only near mask.
205
+ """
206
+ global current_near_softmask, current_edge_band
207
+
208
+ h, w = depth01.shape
209
+ near_mask = None
210
+
211
+ if seg_pipe is not None:
212
+ try:
213
+ segs = seg_pipe(img_pil)
214
+ # Some models return dict with 'segments'; normalize to list
215
+ if isinstance(segs, dict) and "segments_info" in segs and "segmentation" in segs:
216
+ # Panoptic map; fall back to simple depth-based mask
217
+ segs = [] # transformers panoptic map formats vary; keep generic path below
218
+ # segs should be a list of dicts, each with a 'mask' PIL
219
+ candidates = [s for s in segs if isinstance(s.get("mask", None), Image.Image)]
220
+ near_mask = _pick_nearest_segment_mask(candidates, depth01, invert_depth=invert_depth)
221
+ except Exception:
222
+ near_mask = None
223
+
224
+ if near_mask is None:
225
+ # Fallback: depth-only near mask
226
+ near_mask = _depth_only_near_mask(depth01, near_percent=near_percent, invert_depth=invert_depth)
227
+
228
+ # Build soft mask + edge band
229
+ current_near_softmask = _soften_mask(near_mask, sigma_px=max(edge_band_px / 2.0, 1.0))
230
+ current_edge_band = _edge_band_from_mask(near_mask, band_px=max(int(edge_band_px), 1))
231
+
232
+
233
+ def generate_depth_map(input_image, use_segmentation, edge_band_px, invert_depth, near_percent):
234
  """Generate normalized depth map and initial effect image."""
235
  global current_original_image, current_depth_norm, current_depth_map_pil
236
  if input_image is None:
 
238
  current_depth_norm = None
239
  current_depth_map_pil = None
240
  return None, None
241
+
242
  current_original_image = input_image
243
+ # Depth estimation
244
  result = depth_pipe(input_image)
245
  depth = np.array(result["depth"], dtype=np.float32)
246
  depth -= depth.min()
 
249
  depth /= max_val
250
  current_depth_norm = depth
251
  current_depth_map_pil = Image.fromarray((depth * 255.0).astype(np.uint8), mode="L")
252
+
253
+ # Build segmentation assist (or fallback)
254
+ if use_segmentation or True: # we compute once so UI changes can immediately work
255
+ _compute_segmentation_assist(input_image, current_depth_norm, invert_depth, edge_band_px, near_percent)
256
+
257
  # Default effect parameters
258
  effect = apply_effect(
259
  threshold=50,
 
265
  black_level_percent=0,
266
  white_level_percent=100,
267
  smoothing_percent=0,
268
+ use_segmentation=use_segmentation,
269
+ edge_snap_strength=60,
270
+ edge_band_px=edge_band_px,
271
+ invert_depth=invert_depth,
272
+ near_percent=near_percent,
273
  )
274
  return current_depth_map_pil.convert("RGB"), effect
275
 
276
 
277
  def update_effect(threshold, depth_scale, feather, red_brightness, blue_brightness,
278
+ gamma, black_level, white_level, smoothing,
279
+ use_segmentation, edge_snap_strength, edge_band_px,
280
+ invert_depth, near_percent):
281
+ """Update the effect when any control changes."""
282
  return apply_effect(
283
  threshold=threshold,
284
  depth_scale=depth_scale,
 
289
  black_level_percent=black_level,
290
  white_level_percent=white_level,
291
  smoothing_percent=smoothing,
292
+ use_segmentation=use_segmentation,
293
+ edge_snap_strength=edge_snap_strength,
294
+ edge_band_px=edge_band_px,
295
+ invert_depth=invert_depth,
296
+ near_percent=near_percent,
297
  )
298
 
299
 
300
  def clear_results():
301
  """Reset global state and clear outputs."""
302
  global current_original_image, current_depth_norm, current_depth_map_pil
303
+ global current_near_softmask, current_edge_band
304
  current_original_image = None
305
  current_depth_norm = None
306
  current_depth_map_pil = None
307
+ current_near_softmask = None
308
+ current_edge_band = None
309
  return None, None
310
 
311
 
312
+ # ---------- UI ----------
313
  with gr.Blocks(title="ChromoStereoizer Enhanced", theme=gr.themes.Soft()) as demo:
314
+ gr.Markdown("# ChromoStereoizer Enhanced (Segmentation-Assisted)")
315
  with gr.Row():
316
  with gr.Column(scale=1):
317
  input_image = gr.Image(label="Upload Image", type="pil", height=400)
318
+ with gr.Accordion("Segmentation & Mask Options", open=False):
319
+ use_segmentation = gr.Checkbox(value=True, label="Use segmentation-assisted edge snapping (falls back to depth-only)")
320
+ edge_band_px = gr.Slider(1, 20, value=6, step=1, label="Edge Band Width (px)")
321
+ invert_depth = gr.Checkbox(value=False, label="Invert depth (toggle if near/far feels flipped)")
322
+ near_percent = gr.Slider(1, 40, value=15, step=1, label="Fallback: top N% nearest pixels")
323
  generate_btn = gr.Button("Generate Depth Map", variant="primary", size="lg")
324
+
325
  with gr.Column(scale=1):
326
  gr.Markdown("**Depth Map**")
327
  depth_output = gr.Image(type="pil", height=400, interactive=False, show_download_button=True, show_label=False)
328
  gr.Markdown("**ChromoStereoizer Result**")
329
  chromo_output = gr.Image(type="pil", height=400, interactive=False, show_download_button=True, show_label=False)
330
  gr.Markdown("## Controls")
331
+ threshold_slider = gr.Slider(0, 100, value=50, step=1, label="Threshold (%)")
332
+ depth_scale_slider = gr.Slider(0, 100, value=50, step=1, label="Depth Scale (Steepness)")
333
+ feather_slider = gr.Slider(0, 100, value=10, step=1, label="Feather (%)")
334
+ red_slider = gr.Slider(0, 100, value=50, step=1, label="Red Brightness")
335
+ blue_slider = gr.Slider(0, 100, value=50, step=1, label="Blue Brightness")
336
+ gamma_slider = gr.Slider(0, 100, value=50, step=1, label="Gamma")
337
+ black_slider = gr.Slider(0, 100, value=0, step=1, label="Black Level (%)")
338
+ white_slider = gr.Slider(0, 100, value=100, step=1, label="White Level (%)")
339
+ smoothing_slider = gr.Slider(0, 100, value=0, step=1, label="Depth Smoothing (%)")
340
+ edge_snap_strength = gr.Slider(0, 100, value=60, step=1, label="Edge Snap Strength (%)")
341
  clear_btn = gr.Button("Clear", variant="secondary")
342
+
343
+ # Events
344
  generate_btn.click(
345
  fn=generate_depth_map,
346
+ inputs=[input_image, use_segmentation, edge_band_px, invert_depth, near_percent],
347
  outputs=[depth_output, chromo_output],
348
  show_progress=True,
349
  )
350
+
351
+ for ctrl in [
352
+ threshold_slider, depth_scale_slider, feather_slider, red_slider, blue_slider,
353
+ gamma_slider, black_slider, white_slider, smoothing_slider,
354
+ use_segmentation, edge_snap_strength, edge_band_px, invert_depth, near_percent
355
+ ]:
356
+ ctrl.change(
357
  fn=update_effect,
358
+ inputs=[
359
+ threshold_slider, depth_scale_slider, feather_slider, red_slider, blue_slider,
360
+ gamma_slider, black_slider, white_slider, smoothing_slider,
361
+ use_segmentation, edge_snap_strength, edge_band_px, invert_depth, near_percent
362
+ ],
363
  outputs=chromo_output,
364
  show_progress=False,
365
  )
366
+
367
  clear_btn.click(
368
  fn=clear_results,
369
  inputs=[],