Spaces:
Sleeping
Sleeping
Commit
·
470cc6a
1
Parent(s):
c215e6f
111
Browse files
FER_dinamic_LSTM_IEMOCAP.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0cd1561a72f9de26c315bb857f03e8946635db047e0dbea52bb0276610f19751
|
3 |
+
size 11569208
|
FER_static_ResNet50_AffectNet.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8274190b5be4355bd2f07b59f593fcdb294f9d7c563bfa9ac9e5ea06c10692d2
|
3 |
+
size 98562934
|
app.py
CHANGED
@@ -24,22 +24,22 @@ def clear_dynamic_info():
|
|
24 |
)
|
25 |
|
26 |
with gr.Blocks(css="app.css") as demo:
|
27 |
-
with gr.Tab("
|
28 |
gr.Markdown(value=DESCRIPTION_DYNAMIC)
|
29 |
with gr.Row():
|
30 |
with gr.Column(scale=2):
|
31 |
input_video = gr.Video(elem_classes="video1")
|
32 |
with gr.Row():
|
33 |
clear_btn_dynamic = gr.Button(
|
34 |
-
value="
|
35 |
)
|
36 |
submit_dynamic = gr.Button(
|
37 |
-
value="
|
38 |
)
|
39 |
with gr.Column(scale=2, elem_classes="dl4"):
|
40 |
with gr.Row():
|
41 |
-
output_video = gr.Video(label="Original video", scale=1, elem_classes="video2")
|
42 |
-
output_face = gr.Video(label="Pre-processed video", scale=1, elem_classes="video3")
|
43 |
output_heatmaps = gr.Video(label="Heatmaps", scale=1, elem_classes="video4")
|
44 |
output_statistics = gr.Plot(label="Statistics of emotions", elem_classes="stat")
|
45 |
#gr.Examples(
|
|
|
24 |
)
|
25 |
|
26 |
with gr.Blocks(css="app.css") as demo:
|
27 |
+
with gr.Tab("动态视频分析"):
|
28 |
gr.Markdown(value=DESCRIPTION_DYNAMIC)
|
29 |
with gr.Row():
|
30 |
with gr.Column(scale=2):
|
31 |
input_video = gr.Video(elem_classes="video1")
|
32 |
with gr.Row():
|
33 |
clear_btn_dynamic = gr.Button(
|
34 |
+
value="清楚", interactive=True, scale=1
|
35 |
)
|
36 |
submit_dynamic = gr.Button(
|
37 |
+
value="提交", interactive=True, scale=1, elem_classes="提交"
|
38 |
)
|
39 |
with gr.Column(scale=2, elem_classes="dl4"):
|
40 |
with gr.Row():
|
41 |
+
#output_video = gr.Video(label="Original video", scale=1, elem_classes="video2")
|
42 |
+
#output_face = gr.Video(label="Pre-processed video", scale=1, elem_classes="video3")
|
43 |
output_heatmaps = gr.Video(label="Heatmaps", scale=1, elem_classes="video4")
|
44 |
output_statistics = gr.Plot(label="Statistics of emotions", elem_classes="stat")
|
45 |
#gr.Examples(
|
app/__pycache__/face_utils.cpython-310.pyc
ADDED
Binary file (2.18 kB). View file
|
|
app/__pycache__/model.cpython-310.pyc
ADDED
Binary file (2.74 kB). View file
|
|
app/__pycache__/model_architectures.cpython-310.pyc
ADDED
Binary file (5.22 kB). View file
|
|
app/__pycache__/plot.cpython-310.pyc
ADDED
Binary file (933 Bytes). View file
|
|