|
import gradio as gr |
|
import torch |
|
from PIL import Image |
|
import os |
|
import yolov9 |
|
|
|
HTML_TEMPLATE = """ |
|
<style> |
|
body { |
|
background: linear-gradient(135deg, #1a2a6c, #b21f1f, #fdbb2d); |
|
font-family: 'Roboto', sans-serif; |
|
color: #ecf0f1; |
|
min-height: 100vh; |
|
} |
|
#app-header { |
|
text-align: center; |
|
background: rgba(26, 42, 108, 0.8); |
|
padding: 40px; |
|
border-radius: 20px; |
|
box-shadow: 0 15px 30px rgba(0, 0, 0, 0.4); |
|
position: relative; |
|
overflow: hidden; |
|
margin-bottom: 40px; |
|
backdrop-filter: blur(10px); |
|
} |
|
#app-header::before { |
|
content: ""; |
|
position: absolute; |
|
top: -50%; |
|
left: -50%; |
|
width: 200%; |
|
height: 200%; |
|
background: radial-gradient(circle, rgba(253,187,45,0.2) 0%, rgba(253,187,45,0) 70%); |
|
animation: shimmer 20s infinite linear; |
|
} |
|
@keyframes shimmer { |
|
0% { transform: rotate(0deg); } |
|
100% { transform: rotate(360deg); } |
|
} |
|
#app-header h1 { |
|
color: #fdbb2d; |
|
font-size: 3em; |
|
margin-bottom: 20px; |
|
text-shadow: 2px 2px 4px rgba(0,0,0,0.3); |
|
} |
|
#app-header p { |
|
font-size: 1.3em; |
|
color: #ecf0f1; |
|
} |
|
.feature-container { |
|
display: flex; |
|
justify-content: center; |
|
gap: 40px; |
|
margin-top: 40px; |
|
flex-wrap: wrap; |
|
} |
|
.feature { |
|
position: relative; |
|
transition: all 0.4s ease; |
|
border-radius: 15px; |
|
overflow: hidden; |
|
background: rgba(178, 31, 31, 0.7); |
|
box-shadow: 0 8px 20px rgba(0,0,0,0.3); |
|
width: 180px; |
|
height: 180px; |
|
display: flex; |
|
flex-direction: column; |
|
justify-content: center; |
|
align-items: center; |
|
} |
|
.feature:hover { |
|
transform: translateY(-15px) rotate(5deg) scale(1.05); |
|
box-shadow: 0 20px 40px rgba(0,0,0,0.4); |
|
background: rgba(253, 187, 45, 0.8); |
|
} |
|
.feature-icon { |
|
font-size: 4em; |
|
color: #ecf0f1; |
|
margin-bottom: 15px; |
|
transition: all 0.4s ease; |
|
} |
|
.feature:hover .feature-icon { |
|
transform: scale(1.2); |
|
} |
|
.feature-description { |
|
color: #ecf0f1; |
|
font-size: 1em; |
|
text-align: center; |
|
padding: 0 10px; |
|
transition: all 0.4s ease; |
|
} |
|
.feature:hover .feature-description { |
|
font-weight: bold; |
|
} |
|
.artifact { |
|
position: absolute; |
|
background: radial-gradient(circle, rgba(253,187,45,0.3) 0%, rgba(253,187,45,0) 70%); |
|
border-radius: 50%; |
|
opacity: 0.5; |
|
filter: blur(40px); |
|
} |
|
.artifact.large { |
|
width: 600px; |
|
height: 600px; |
|
top: -200px; |
|
left: -300px; |
|
animation: float 30s infinite ease-in-out; |
|
} |
|
.artifact.medium { |
|
width: 400px; |
|
height: 400px; |
|
bottom: -200px; |
|
right: -200px; |
|
animation: float 25s infinite ease-in-out reverse; |
|
} |
|
.artifact.small { |
|
width: 200px; |
|
height: 200px; |
|
top: 50%; |
|
left: 50%; |
|
transform: translate(-50%, -50%); |
|
animation: pulse 8s infinite alternate; |
|
} |
|
@keyframes float { |
|
0%, 100% { transform: translateY(0) rotate(0deg); } |
|
50% { transform: translateY(-30px) rotate(15deg); } |
|
} |
|
@keyframes pulse { |
|
0% { transform: scale(1) translate(-50%, -50%); opacity: 0.5; } |
|
100% { transform: scale(1.2) translate(-50%, -50%); opacity: 0.8; } |
|
} |
|
</style> |
|
<div id="app-header"> |
|
<div class="artifact large"></div> |
|
<div class="artifact medium"></div> |
|
<div class="artifact small"></div> |
|
<h1>YOLOv9: Manhole Detector</h1> |
|
<p>Unleash the power of AI to detect manholes with precision</p> |
|
<div class="feature-container"> |
|
<div class="feature"> |
|
<div class="feature-icon">π―</div> |
|
<div class="feature-description">High Precision Detection</div> |
|
</div> |
|
<div class="feature"> |
|
<div class="feature-icon">β‘</div> |
|
<div class="feature-description">Lightning-Fast Processing</div> |
|
</div> |
|
<div class="feature"> |
|
<div class="feature-icon">πΌοΈ</div> |
|
<div class="feature-description">Dynamic Image Resizing</div> |
|
</div> |
|
<div class="feature"> |
|
<div class="feature-icon">π§</div> |
|
<div class="feature-description">Fine-Tuned Thresholds</div> |
|
</div> |
|
</div> |
|
</div> |
|
""" |
|
|
|
|
|
def yolov9_inference(img_path, image_size, conf_threshold, iou_threshold): |
|
model = yolov9.load('./best.pt') |
|
model.conf = conf_threshold |
|
model.iou = iou_threshold |
|
results = model(img_path, size=image_size) |
|
output = results.render() |
|
return output[0] |
|
|
|
def app(): |
|
with gr.Blocks(theme=gr.themes.Soft()) as demo: |
|
gr.HTML(HTML_TEMPLATE) |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=1, min_width=300): |
|
img_path = gr.Image(type="filepath", label="Upload Image") |
|
image_size = gr.Slider(label="Image Size", minimum=320, maximum=1280, step=32, value=640) |
|
conf_threshold = gr.Slider(label="Confidence Threshold", minimum=0.1, maximum=1.0, step=0.1, value=0.4) |
|
iou_threshold = gr.Slider(label="IoU Threshold", minimum=0.1, maximum=1.0, step=0.1, value=0.5) |
|
detect_button = gr.Button("Detect Manholes", variant="primary") |
|
|
|
with gr.Column(scale=1, min_width=300): |
|
output_numpy = gr.Image(type="numpy", label="Detection Result") |
|
|
|
detect_button.click( |
|
fn=yolov9_inference, |
|
inputs=[img_path, image_size, conf_threshold, iou_threshold], |
|
outputs=[output_numpy] |
|
) |
|
|
|
gr.Examples( |
|
examples=[ |
|
["./openmanhole.jpg", 640, 0.4, 0.5], |
|
["./images.jpeg", 640, 0.4, 0.5], |
|
], |
|
fn=yolov9_inference, |
|
inputs=[img_path, image_size, conf_threshold, iou_threshold], |
|
outputs=[output_numpy], |
|
cache_examples=True, |
|
) |
|
|
|
return demo |
|
|
|
|
|
|
|
demo = gr.Blocks() |
|
|
|
with demo: |
|
app() |
|
|
|
if __name__ == "__main__": |
|
demo.launch(debug=True, share=True) |