NihalGazi commited on
Commit
6a45047
·
verified ·
1 Parent(s): c4cbd8f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +108 -22
app.py CHANGED
@@ -1,34 +1,120 @@
1
  import gradio as gr
2
- from faceflux import process_video
 
 
 
 
 
 
3
 
4
- css = """video { object-fit: contain !important; }"""
5
 
6
- with gr.Blocks(css=css) as iface:
7
- gr.Markdown("# Super Fast Face Swap FACEFLUX")
8
- gr.Markdown(
9
- "**FACEFLUX**: Ultra-lightweight, CPU-only face swap. "
10
- "Ideal for small or distant faces; offline & privacy-preserving. "
11
- "Weakness: large up-close or extreme angles."
12
- )
 
 
13
 
14
- with gr.Row():
15
- vid = gr.Video(label="Input Video")
16
- ref = gr.Image(type="numpy", label="Reference Image")
 
 
 
 
 
 
 
 
 
17
 
18
- with gr.Row():
19
- res = gr.Dropdown([256,384,512,768], value=512, label="Resolution")
20
- quality = gr.Slider(1,4, value=1, step=1, label="Swap Quality")
21
- feather = gr.Slider(0.12, 0.24, value=0.12, step=0.01, label="Feather (%)")
22
- strength = gr.Slider(-0.35, -0.15, value=-0.25, step=0.05, label="Strength")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
- btn = gr.Button("Generate Morph 🚀")
25
- out_vid = gr.Video(label="Morphed Video")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  btn.click(
28
  fn=process_video,
29
- inputs=[vid, ref, strength, res, quality, feather, 0.24],
30
- outputs=[out_vid],
31
  show_progress=True
32
  )
33
 
34
- iface.queue().launch(debug=True)
 
 
 
 
1
  import gradio as gr
2
+ import cv2
3
+ import numpy as np
4
+ import mediapipe as mp
5
+ import time
6
+ import tempfile
7
+ import os
8
+ import faceflux
9
 
 
10
 
11
+ def process_video(
12
+ video_path, ref_img, trans, res, step, feather_pct, padding_pct,
13
+ progress=gr.Progress()
14
+ ):
15
+ # --- Initialization ---
16
+ cap = cv2.VideoCapture(video_path)
17
+ fps = cap.get(cv2.CAP_PROP_FPS) or 24
18
+ total = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
19
+ progress(0.0, desc="Initializing")
20
 
21
+ # --- Prepare masked reference ---
22
+ ref_bgr = cv2.cvtColor(ref_img, cv2.COLOR_RGB2BGR)
23
+ mask_ref, ref_box = get_face_mask_box(ref_bgr, feather_pct, padding_pct)
24
+ if mask_ref is None:
25
+ progress(None) # hide on error
26
+ return None, None, None, None
27
+ x_r, y_r, w_r, h_r = ref_box
28
+ ref_cut = ref_bgr[y_r:y_r+h_r, x_r:x_r+w_r]
29
+ mask_ref_norm = mask_ref.astype(np.float32)[..., None] / 255.0
30
+ ref_masked = (ref_cut.astype(np.float32) * mask_ref_norm).astype(np.uint8)
31
+ ref_morph = cv2.resize(ref_masked, (res, res))
32
+ progress(0.1, desc="Reference ready")
33
 
34
+ # --- Output setup ---
35
+ w_o = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
36
+ h_o = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
37
+ tmp_vid = tempfile.NamedTemporaryFile(delete=False, suffix='.mp4').name
38
+ out_vid = cv2.VideoWriter(tmp_vid, cv2.VideoWriter_fourcc(*'mp4v'), fps, (w_o, h_o))
39
+
40
+ first_crop = first_ref = first_mask = first_morphed = None
41
+
42
+ # --- Frame-by-frame processing ---
43
+ for i in range(total):
44
+ ret, frame = cap.read()
45
+ if not ret:
46
+ break
47
+ progress(0.1 + 0.8 * (i / total), desc=f"Processing frame {i+1}/{total}")
48
+
49
+ mask_roi, box = get_face_mask_box(frame, feather_pct, padding_pct)
50
+ if mask_roi is None:
51
+ out_vid.write(frame)
52
+ continue
53
+ x, y, w, h = box
54
+ crop = frame[y:y+h, x:x+w]
55
+ crop_resized = cv2.resize(crop, (res, res))
56
+ alpha = float(np.clip((trans+1)/2, 0, 1))
57
+ mor = morph_faces(crop_resized, ref_morph, alpha, res, step)
58
+
59
+ if i == 0:
60
+ first_crop = crop_resized.copy()
61
+ first_ref = ref_morph.copy()
62
+ first_mask = cv2.resize(mask_roi, (res, res), interpolation=cv2.INTER_LINEAR)
63
+ first_morphed = mor.copy()
64
 
65
+ mor_back = cv2.resize(mor, (w, h))
66
+ mask_n = (mask_roi.astype(np.float32)[..., None] / 255.0)
67
+ region = frame[y:y+h, x:x+w].astype(np.float32)
68
+ blended = region * (1-mask_n) + mor_back.astype(np.float32) * mask_n
69
+ frame[y:y+h, x:x+w] = blended.astype(np.uint8)
70
+ out_vid.write(frame)
71
+
72
+ cap.release()
73
+ out_vid.release()
74
+
75
+ # --- First-frame outputs ---
76
+ if first_morphed is not None and first_mask is not None:
77
+ mask_n0 = first_mask.astype(np.float32)[..., None] / 255.0
78
+ first_morphed = (first_morphed.astype(np.float32) * mask_n0).astype(np.uint8)
79
+ else:
80
+ zero = np.zeros((res, res, 3), dtype=np.uint8)
81
+ first_crop = first_crop or zero
82
+ first_ref = first_ref or ref_morph
83
+ first_morphed = zero
84
+
85
+ progress(1.0, desc="Done")
86
+ return tmp_vid, \
87
+ cv2.cvtColor(first_crop, cv2.COLOR_BGR2RGB), \
88
+ cv2.cvtColor(first_ref, cv2.COLOR_BGR2RGB), \
89
+ cv2.cvtColor(first_morphed, cv2.COLOR_BGR2RGB)
90
+
91
+ # --- Gradio App ---
92
+ css = """video, img { object-fit: contain !important; }"""
93
+ with gr.Blocks(css=css) as iface:
94
+ gr.Markdown("# Morph with Face-Shaped Composite and Padding Percentage")
95
+ with gr.Row():
96
+ vid = gr.Video(label='Input Video')
97
+ ref = gr.Image(type='numpy', label='Reference Image')
98
+ with gr.Row():
99
+ res = gr.Dropdown([256,384,512,768], value=512, label='Resolution')
100
+ step = gr.Slider(1,4,value=4,step=1,label='Landmark Sub-sampling')
101
+ feather = gr.Slider(0.0,0.5,value=0.1,step=0.01,label='Feather (%)')
102
+ padding = gr.Slider(0.0,0.5,value=0.24,step=0.01,label='Padding (%)')
103
+ trans = gr.Slider(-1.0,1.0,value=-0.35,step=0.05,label='Transition Level')
104
+ btn = gr.Button('Generate Morph 🚀')
105
+ out_vid = gr.Video(label='Morphed Video')
106
+ out_crop = gr.Image(label='First Frame Crop')
107
+ out_ref = gr.Image(label='Masked Reference')
108
+ out_morph = gr.Image(label='Masked Morphed First Frame')
109
 
110
  btn.click(
111
  fn=process_video,
112
+ inputs=[vid, ref, trans, res, step, feather, padding],
113
+ outputs=[out_vid, out_crop, out_ref, out_morph],
114
  show_progress=True
115
  )
116
 
117
+ gr.Markdown("---\n*Default values set and feather/padding are now relative percentages.*")
118
+
119
+ # Enable queueing so progress updates render
120
+ iface.queue().launch(debug=True)