Spaces:
Sleeping
Sleeping
Samariddin Kushmuratov
commited on
Commit
·
adaf02d
1
Parent(s):
5029f8d
yangi qoida qoshildi
Browse files- app.py +10 -6
- assets/result_imgs/00028.jpg +0 -0
- classificatsion_video_demo.py +9 -5
- models/classification/128_1_90/args.yaml +95 -0
- models/classification/128_1_90/confusion_matrix.png +0 -0
- models/classification/128_1_90/confusion_matrix_normalized.png +0 -0
- models/classification/128_1_90/results.csv +101 -0
- models/classification/128_1_90/results.png +0 -0
- models/classification/128_1_90/train_batch0.jpg +0 -0
- models/classification/128_1_90/train_batch1.jpg +0 -0
- models/classification/128_1_90/train_batch2.jpg +0 -0
- models/classification/128_1_90/val_batch0_labels.jpg +0 -0
- models/classification/128_1_90/val_batch0_pred.jpg +0 -0
- models/classification/128_1_90/val_batch1_labels.jpg +0 -0
- models/classification/128_1_90/val_batch1_pred.jpg +0 -0
- models/classification/128_1_90/val_batch2_labels.jpg +0 -0
- models/classification/128_1_90/val_batch2_pred.jpg +0 -0
- models/classification/128_1_90/weights/best.pt +3 -0
app.py
CHANGED
@@ -2,7 +2,7 @@ import os
|
|
2 |
import subprocess
|
3 |
import glob
|
4 |
import gradio as gr
|
5 |
-
from gradio.components import Gallery, Video, Textbox
|
6 |
from classificatsion_video_demo import process
|
7 |
|
8 |
|
@@ -25,7 +25,7 @@ def get_car_numbers(result_folder):
|
|
25 |
os.chdir(current_dir)
|
26 |
return 0
|
27 |
|
28 |
-
def predict(video_path):
|
29 |
"""
|
30 |
Gradio interface orqali yuklab olingan videodan problem framelarni ajratib olinadi va resultat sifatida Galleryga chiqariladi
|
31 |
:param video_path:
|
@@ -33,6 +33,10 @@ def predict(video_path):
|
|
33 |
"""
|
34 |
# Your image processing code here
|
35 |
# print(video_path)
|
|
|
|
|
|
|
|
|
36 |
_path_RES = "assets/result_imgs"
|
37 |
images_res = glob.glob(f'{_path_RES}/*.jpg')
|
38 |
for image in images_res:
|
@@ -41,7 +45,7 @@ def predict(video_path):
|
|
41 |
images_res = glob.glob(f'{_path}/*.jpg')
|
42 |
for image in images_res:
|
43 |
os.remove(image)
|
44 |
-
problem, good, result = process(video_path)
|
45 |
|
46 |
get_car_numbers(_path)
|
47 |
images = glob.glob(f'{_path_RES}/*.jpg')
|
@@ -63,12 +67,12 @@ my_description = """
|
|
63 |
all_frame = Textbox(label="Topilgan kadrlar / Umumiy kadrlar")
|
64 |
|
65 |
problem_frames = Gallery(label="Muammoli kadrlar", elem_id="gallery").style(
|
66 |
-
|
67 |
)
|
68 |
input_video = Video(label="Kiruvchi video") # Create input video component
|
69 |
-
|
70 |
gr.Interface(fn=predict,
|
71 |
-
inputs=input_video,
|
72 |
outputs=[all_frame,problem_frames],
|
73 |
title=my_title,
|
74 |
examples=my_example,
|
|
|
2 |
import subprocess
|
3 |
import glob
|
4 |
import gradio as gr
|
5 |
+
from gradio.components import Gallery, Video, Textbox, Radio
|
6 |
from classificatsion_video_demo import process
|
7 |
|
8 |
|
|
|
25 |
os.chdir(current_dir)
|
26 |
return 0
|
27 |
|
28 |
+
def predict(video_path, radio):
|
29 |
"""
|
30 |
Gradio interface orqali yuklab olingan videodan problem framelarni ajratib olinadi va resultat sifatida Galleryga chiqariladi
|
31 |
:param video_path:
|
|
|
33 |
"""
|
34 |
# Your image processing code here
|
35 |
# print(video_path)
|
36 |
+
print("radio: ",radio,type(radio))
|
37 |
+
which_model=True
|
38 |
+
if radio =="128-1":
|
39 |
+
which_model=False
|
40 |
_path_RES = "assets/result_imgs"
|
41 |
images_res = glob.glob(f'{_path_RES}/*.jpg')
|
42 |
for image in images_res:
|
|
|
45 |
images_res = glob.glob(f'{_path}/*.jpg')
|
46 |
for image in images_res:
|
47 |
os.remove(image)
|
48 |
+
problem, good, result = process(video_path,which_model)
|
49 |
|
50 |
get_car_numbers(_path)
|
51 |
images = glob.glob(f'{_path_RES}/*.jpg')
|
|
|
67 |
all_frame = Textbox(label="Topilgan kadrlar / Umumiy kadrlar")
|
68 |
|
69 |
problem_frames = Gallery(label="Muammoli kadrlar", elem_id="gallery").style(
|
70 |
+
grid_cols=[3], height="auto"
|
71 |
)
|
72 |
input_video = Video(label="Kiruvchi video") # Create input video component
|
73 |
+
radio = Radio(choices=["128-1", "128-4"], label="Qoida buzilishi modda boyicha")
|
74 |
gr.Interface(fn=predict,
|
75 |
+
inputs=[input_video, radio],
|
76 |
outputs=[all_frame,problem_frames],
|
77 |
title=my_title,
|
78 |
examples=my_example,
|
assets/result_imgs/00028.jpg
CHANGED
![]() |
![]() |
classificatsion_video_demo.py
CHANGED
@@ -25,7 +25,7 @@ def train():
|
|
25 |
metrics = model.val()
|
26 |
print(metrics.top1) # top1 aniqligi
|
27 |
|
28 |
-
def tekshirish(path2):
|
29 |
"""
|
30 |
test qilish, model va rasmni berishimiz kerak
|
31 |
"""
|
@@ -36,16 +36,20 @@ def tekshirish(path2):
|
|
36 |
"tl-14",
|
37 |
"weights/best.pt"
|
38 |
)
|
|
|
|
|
|
|
|
|
39 |
test_rasm_joyi =(path2)
|
40 |
|
41 |
-
model_custom = YOLO("models/classification/
|
42 |
natijalar = model_custom(test_rasm_joyi) # predict on an image
|
43 |
natija = natijalar[0].names[np.argmax(natijalar[0].probs.cpu().numpy().data)]
|
44 |
return (f"Label natija: {natija}")
|
45 |
|
46 |
|
47 |
|
48 |
-
def process(video_path):
|
49 |
|
50 |
# saqlash_path = video_path.split('/')[-1].split(".")[0]
|
51 |
saqlash_path = "assets/images"
|
@@ -72,11 +76,11 @@ def process(video_path):
|
|
72 |
if ret==True:
|
73 |
#frame = cv2.flip(frame,1)
|
74 |
# print(tekshirish(frame))
|
75 |
-
if tekshirish(frame) == "Label natija: good":
|
76 |
font = cv2.FONT_HERSHEY_COMPLEX
|
77 |
cv2.putText(frame, 'good', (0, 100), font, 2, (255, 255, 255), 3)
|
78 |
good_frame += 1
|
79 |
-
elif tekshirish(frame) == "Label natija: problem":
|
80 |
font = cv2.FONT_HERSHEY_COMPLEX
|
81 |
cv2.putText(frame, 'problem', (0, 100), font, 2, (255, 255, 255), 3)
|
82 |
# out.write(frame)
|
|
|
25 |
metrics = model.val()
|
26 |
print(metrics.top1) # top1 aniqligi
|
27 |
|
28 |
+
def tekshirish(path2,which_mdel=True):
|
29 |
"""
|
30 |
test qilish, model va rasmni berishimiz kerak
|
31 |
"""
|
|
|
36 |
"tl-14",
|
37 |
"weights/best.pt"
|
38 |
)
|
39 |
+
my_model="tl-14"
|
40 |
+
if not which_mdel:
|
41 |
+
my_model="128_1_90"
|
42 |
+
|
43 |
test_rasm_joyi =(path2)
|
44 |
|
45 |
+
model_custom = YOLO("models/classification/"+my_model+"/weights/best.pt")
|
46 |
natijalar = model_custom(test_rasm_joyi) # predict on an image
|
47 |
natija = natijalar[0].names[np.argmax(natijalar[0].probs.cpu().numpy().data)]
|
48 |
return (f"Label natija: {natija}")
|
49 |
|
50 |
|
51 |
|
52 |
+
def process(video_path,which_model):
|
53 |
|
54 |
# saqlash_path = video_path.split('/')[-1].split(".")[0]
|
55 |
saqlash_path = "assets/images"
|
|
|
76 |
if ret==True:
|
77 |
#frame = cv2.flip(frame,1)
|
78 |
# print(tekshirish(frame))
|
79 |
+
if tekshirish(frame,which_model) == "Label natija: good":
|
80 |
font = cv2.FONT_HERSHEY_COMPLEX
|
81 |
cv2.putText(frame, 'good', (0, 100), font, 2, (255, 255, 255), 3)
|
82 |
good_frame += 1
|
83 |
+
elif tekshirish(frame, which_model) == "Label natija: problem":
|
84 |
font = cv2.FONT_HERSHEY_COMPLEX
|
85 |
cv2.putText(frame, 'problem', (0, 100), font, 2, (255, 255, 255), 3)
|
86 |
# out.write(frame)
|
models/classification/128_1_90/args.yaml
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
task: classify
|
2 |
+
mode: train
|
3 |
+
model: yolov8n-cls.pt
|
4 |
+
data: /home/cradle/work/git/traffic_laws/scripts/splitted_128_1_90
|
5 |
+
epochs: 100
|
6 |
+
patience: 50
|
7 |
+
batch: 512
|
8 |
+
imgsz: 224
|
9 |
+
save: true
|
10 |
+
save_period: 10
|
11 |
+
cache: false
|
12 |
+
device: cuda:0
|
13 |
+
workers: 8
|
14 |
+
project: null
|
15 |
+
name: null
|
16 |
+
exist_ok: false
|
17 |
+
pretrained: false
|
18 |
+
optimizer: SGD
|
19 |
+
verbose: true
|
20 |
+
seed: 0
|
21 |
+
deterministic: true
|
22 |
+
single_cls: false
|
23 |
+
rect: false
|
24 |
+
cos_lr: false
|
25 |
+
close_mosaic: 0
|
26 |
+
resume: false
|
27 |
+
amp: true
|
28 |
+
overlap_mask: true
|
29 |
+
mask_ratio: 4
|
30 |
+
dropout: 0.0
|
31 |
+
val: true
|
32 |
+
split: val
|
33 |
+
save_json: false
|
34 |
+
save_hybrid: false
|
35 |
+
conf: null
|
36 |
+
iou: 0.7
|
37 |
+
max_det: 300
|
38 |
+
half: false
|
39 |
+
dnn: false
|
40 |
+
plots: true
|
41 |
+
source: null
|
42 |
+
show: false
|
43 |
+
save_txt: false
|
44 |
+
save_conf: false
|
45 |
+
save_crop: false
|
46 |
+
show_labels: true
|
47 |
+
show_conf: true
|
48 |
+
vid_stride: 1
|
49 |
+
line_width: null
|
50 |
+
visualize: false
|
51 |
+
augment: true
|
52 |
+
agnostic_nms: false
|
53 |
+
classes: null
|
54 |
+
retina_masks: false
|
55 |
+
boxes: true
|
56 |
+
format: torchscript
|
57 |
+
keras: false
|
58 |
+
optimize: false
|
59 |
+
int8: false
|
60 |
+
dynamic: false
|
61 |
+
simplify: false
|
62 |
+
opset: null
|
63 |
+
workspace: 4
|
64 |
+
nms: false
|
65 |
+
lr0: 0.01
|
66 |
+
lrf: 0.01
|
67 |
+
momentum: 0.937
|
68 |
+
weight_decay: 0.0005
|
69 |
+
warmup_epochs: 3.0
|
70 |
+
warmup_momentum: 0.8
|
71 |
+
warmup_bias_lr: 0.1
|
72 |
+
box: 7.5
|
73 |
+
cls: 0.5
|
74 |
+
dfl: 1.5
|
75 |
+
pose: 12.0
|
76 |
+
kobj: 1.0
|
77 |
+
label_smoothing: 0.0
|
78 |
+
nbs: 64
|
79 |
+
hsv_h: 0.015
|
80 |
+
hsv_s: 0.7
|
81 |
+
hsv_v: 0.4
|
82 |
+
degrees: 0.0
|
83 |
+
translate: 0.1
|
84 |
+
scale: 0.5
|
85 |
+
shear: 0.0
|
86 |
+
perspective: 0.0
|
87 |
+
flipud: 0.0
|
88 |
+
fliplr: 0.5
|
89 |
+
mosaic: 1.0
|
90 |
+
mixup: 0.0
|
91 |
+
copy_paste: 0.0
|
92 |
+
cfg: null
|
93 |
+
v5loader: false
|
94 |
+
tracker: botsort.yaml
|
95 |
+
save_dir: /home/cradle/work/git/traffic_laws/runs/classify/train15
|
models/classification/128_1_90/confusion_matrix.png
ADDED
![]() |
models/classification/128_1_90/confusion_matrix_normalized.png
ADDED
![]() |
models/classification/128_1_90/results.csv
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
epoch, train/loss, metrics/accuracy_top1, metrics/accuracy_top5, val/loss, lr/pg0, lr/pg1, lr/pg2
|
2 |
+
0, 5.3959, 0.58578, 1, 8.6169, 0.070194, 0.0033118, 0.0033118
|
3 |
+
1, 5.2537, 0.63671, 1, 8.6121, 0.040128, 0.0065794, 0.0065794
|
4 |
+
2, 5.1909, 0.58121, 1, 8.5891, 0.009996, 0.0097809, 0.0097809
|
5 |
+
3, 5.0829, 0.60452, 1, 8.527, 0.009703, 0.009703, 0.009703
|
6 |
+
4, 4.919, 0.7112, 1, 7.7585, 0.009703, 0.009703, 0.009703
|
7 |
+
5, 4.4664, 0.76862, 1, 7.5757, 0.009604, 0.009604, 0.009604
|
8 |
+
6, 4.0698, 0.8186, 1, 6.6486, 0.009505, 0.009505, 0.009505
|
9 |
+
7, 3.8222, 0.83013, 1, 6.3911, 0.009406, 0.009406, 0.009406
|
10 |
+
8, 3.6811, 0.84623, 1, 6.2779, 0.009307, 0.009307, 0.009307
|
11 |
+
9, 3.5939, 0.84791, 1, 6.1614, 0.009208, 0.009208, 0.009208
|
12 |
+
10, 3.5058, 0.85536, 1, 6.0693, 0.009109, 0.009109, 0.009109
|
13 |
+
11, 3.4128, 0.86857, 1, 5.9351, 0.00901, 0.00901, 0.00901
|
14 |
+
12, 3.3802, 0.85896, 1, 6.0703, 0.008911, 0.008911, 0.008911
|
15 |
+
13, 3.31, 0.84959, 1, 6.0569, 0.008812, 0.008812, 0.008812
|
16 |
+
14, 3.2372, 0.78111, 1, 6.8461, 0.008713, 0.008713, 0.008713
|
17 |
+
15, 3.2108, 0.90485, 1, 5.5184, 0.008614, 0.008614, 0.008614
|
18 |
+
16, 3.1682, 0.89716, 1, 5.6549, 0.008515, 0.008515, 0.008515
|
19 |
+
17, 3.1248, 0.88611, 1, 5.6647, 0.008416, 0.008416, 0.008416
|
20 |
+
18, 3.0998, 0.89284, 1, 5.5219, 0.008317, 0.008317, 0.008317
|
21 |
+
19, 3.0506, 0.91062, 1, 5.4323, 0.008218, 0.008218, 0.008218
|
22 |
+
20, 3.0193, 0.90822, 1, 5.3484, 0.008119, 0.008119, 0.008119
|
23 |
+
21, 3.0009, 0.91591, 1, 5.2606, 0.00802, 0.00802, 0.00802
|
24 |
+
22, 2.9543, 0.91999, 1, 5.2103, 0.007921, 0.007921, 0.007921
|
25 |
+
23, 2.9313, 0.91278, 1, 5.3881, 0.007822, 0.007822, 0.007822
|
26 |
+
24, 2.8859, 0.92407, 1, 5.1685, 0.007723, 0.007723, 0.007723
|
27 |
+
25, 2.8762, 0.92095, 1, 5.1795, 0.007624, 0.007624, 0.007624
|
28 |
+
26, 2.8339, 0.92984, 1, 5.108, 0.007525, 0.007525, 0.007525
|
29 |
+
27, 2.8228, 0.92864, 1, 5.1664, 0.007426, 0.007426, 0.007426
|
30 |
+
28, 2.8043, 0.93056, 1, 5.1173, 0.007327, 0.007327, 0.007327
|
31 |
+
29, 2.7594, 0.94017, 1, 5.0049, 0.007228, 0.007228, 0.007228
|
32 |
+
30, 2.7637, 0.94185, 1, 4.9965, 0.007129, 0.007129, 0.007129
|
33 |
+
31, 2.7239, 0.94882, 1, 4.8774, 0.00703, 0.00703, 0.00703
|
34 |
+
32, 2.6918, 0.9445, 1, 4.9122, 0.006931, 0.006931, 0.006931
|
35 |
+
33, 2.6984, 0.94906, 1, 4.8653, 0.006832, 0.006832, 0.006832
|
36 |
+
34, 2.6507, 0.9493, 1, 4.8853, 0.006733, 0.006733, 0.006733
|
37 |
+
35, 2.6436, 0.94978, 1, 4.8471, 0.006634, 0.006634, 0.006634
|
38 |
+
36, 2.6444, 0.95267, 1, 4.8067, 0.006535, 0.006535, 0.006535
|
39 |
+
37, 2.5989, 0.95147, 1, 4.7835, 0.006436, 0.006436, 0.006436
|
40 |
+
38, 2.6012, 0.95315, 1, 4.8159, 0.006337, 0.006337, 0.006337
|
41 |
+
39, 2.5365, 0.95843, 1, 4.7298, 0.006238, 0.006238, 0.006238
|
42 |
+
40, 2.5692, 0.95459, 1, 4.7394, 0.006139, 0.006139, 0.006139
|
43 |
+
41, 2.5167, 0.96156, 1, 4.6781, 0.00604, 0.00604, 0.00604
|
44 |
+
42, 2.5246, 0.96204, 1, 4.6442, 0.005941, 0.005941, 0.005941
|
45 |
+
43, 2.5085, 0.96108, 1, 4.6867, 0.005842, 0.005842, 0.005842
|
46 |
+
44, 2.4857, 0.95963, 1, 4.6825, 0.005743, 0.005743, 0.005743
|
47 |
+
45, 2.4798, 0.96708, 1, 4.6052, 0.005644, 0.005644, 0.005644
|
48 |
+
46, 2.4229, 0.9666, 1, 4.574, 0.005545, 0.005545, 0.005545
|
49 |
+
47, 2.4381, 0.96636, 1, 4.6034, 0.005446, 0.005446, 0.005446
|
50 |
+
48, 2.402, 0.96732, 1, 4.56, 0.005347, 0.005347, 0.005347
|
51 |
+
49, 2.4287, 0.96925, 1, 4.548, 0.005248, 0.005248, 0.005248
|
52 |
+
50, 2.3834, 0.97069, 1, 4.5258, 0.005149, 0.005149, 0.005149
|
53 |
+
51, 2.3778, 0.97213, 1, 4.5395, 0.00505, 0.00505, 0.00505
|
54 |
+
52, 2.3429, 0.97093, 1, 4.513, 0.004951, 0.004951, 0.004951
|
55 |
+
53, 2.2987, 0.97405, 1, 4.508, 0.004852, 0.004852, 0.004852
|
56 |
+
54, 2.3226, 0.97069, 1, 4.5243, 0.004753, 0.004753, 0.004753
|
57 |
+
55, 2.3072, 0.97045, 1, 4.5249, 0.004654, 0.004654, 0.004654
|
58 |
+
56, 2.3048, 0.97453, 1, 4.4927, 0.004555, 0.004555, 0.004555
|
59 |
+
57, 2.2935, 0.97213, 1, 4.4828, 0.004456, 0.004456, 0.004456
|
60 |
+
58, 2.2666, 0.97309, 1, 4.4873, 0.004357, 0.004357, 0.004357
|
61 |
+
59, 2.254, 0.97213, 1, 4.4818, 0.004258, 0.004258, 0.004258
|
62 |
+
60, 2.2279, 0.97189, 1, 4.47, 0.004159, 0.004159, 0.004159
|
63 |
+
61, 2.209, 0.97453, 1, 4.4533, 0.00406, 0.00406, 0.00406
|
64 |
+
62, 2.193, 0.9779, 1, 4.4305, 0.003961, 0.003961, 0.003961
|
65 |
+
63, 2.1824, 0.97621, 1, 4.4309, 0.003862, 0.003862, 0.003862
|
66 |
+
64, 2.175, 0.97814, 1, 4.43, 0.003763, 0.003763, 0.003763
|
67 |
+
65, 2.1548, 0.97862, 1, 4.4198, 0.003664, 0.003664, 0.003664
|
68 |
+
66, 2.1453, 0.9791, 1, 4.4114, 0.003565, 0.003565, 0.003565
|
69 |
+
67, 2.1573, 0.97838, 1, 4.4102, 0.003466, 0.003466, 0.003466
|
70 |
+
68, 2.1214, 0.97958, 1, 4.3913, 0.003367, 0.003367, 0.003367
|
71 |
+
69, 2.0798, 0.97958, 1, 4.3974, 0.003268, 0.003268, 0.003268
|
72 |
+
70, 2.067, 0.97934, 1, 4.391, 0.003169, 0.003169, 0.003169
|
73 |
+
71, 2.0567, 0.97958, 1, 4.3848, 0.00307, 0.00307, 0.00307
|
74 |
+
72, 2.0294, 0.98126, 1, 4.3674, 0.002971, 0.002971, 0.002971
|
75 |
+
73, 2.0508, 0.98102, 1, 4.3634, 0.002872, 0.002872, 0.002872
|
76 |
+
74, 2.0297, 0.98174, 1, 4.3656, 0.002773, 0.002773, 0.002773
|
77 |
+
75, 2.0016, 0.98318, 1, 4.3564, 0.002674, 0.002674, 0.002674
|
78 |
+
76, 1.98, 0.98246, 1, 4.3524, 0.002575, 0.002575, 0.002575
|
79 |
+
77, 1.9697, 0.98126, 1, 4.3546, 0.002476, 0.002476, 0.002476
|
80 |
+
78, 1.9277, 0.98342, 1, 4.3408, 0.002377, 0.002377, 0.002377
|
81 |
+
79, 1.9377, 0.98294, 1, 4.3363, 0.002278, 0.002278, 0.002278
|
82 |
+
80, 1.8853, 0.98294, 1, 4.3355, 0.002179, 0.002179, 0.002179
|
83 |
+
81, 1.8711, 0.98366, 1, 4.3301, 0.00208, 0.00208, 0.00208
|
84 |
+
82, 1.8593, 0.98438, 1, 4.3262, 0.001981, 0.001981, 0.001981
|
85 |
+
83, 1.845, 0.9851, 1, 4.3219, 0.001882, 0.001882, 0.001882
|
86 |
+
84, 1.8015, 0.98534, 1, 4.3163, 0.001783, 0.001783, 0.001783
|
87 |
+
85, 1.7988, 0.9863, 1, 4.3104, 0.001684, 0.001684, 0.001684
|
88 |
+
86, 1.7558, 0.98606, 1, 4.3082, 0.001585, 0.001585, 0.001585
|
89 |
+
87, 1.7396, 0.98558, 1, 4.3032, 0.001486, 0.001486, 0.001486
|
90 |
+
88, 1.7327, 0.98582, 1, 4.3001, 0.001387, 0.001387, 0.001387
|
91 |
+
89, 1.6905, 0.98606, 1, 4.2961, 0.001288, 0.001288, 0.001288
|
92 |
+
90, 1.6703, 0.98679, 1, 4.2916, 0.001189, 0.001189, 0.001189
|
93 |
+
91, 1.6434, 0.98606, 1, 4.2878, 0.00109, 0.00109, 0.00109
|
94 |
+
92, 1.6202, 0.98654, 1, 4.2848, 0.000991, 0.000991, 0.000991
|
95 |
+
93, 1.5861, 0.98703, 1, 4.2811, 0.000892, 0.000892, 0.000892
|
96 |
+
94, 1.5559, 0.98775, 1, 4.2785, 0.000793, 0.000793, 0.000793
|
97 |
+
95, 1.526, 0.98775, 1, 4.2753, 0.000694, 0.000694, 0.000694
|
98 |
+
96, 1.5021, 0.98751, 1, 4.2735, 0.000595, 0.000595, 0.000595
|
99 |
+
97, 1.4809, 0.98775, 1, 4.2725, 0.000496, 0.000496, 0.000496
|
100 |
+
98, 1.4292, 0.98775, 1, 4.2694, 0.000397, 0.000397, 0.000397
|
101 |
+
99, 1.3988, 0.98799, 1, 4.2668, 0.000298, 0.000298, 0.000298
|
models/classification/128_1_90/results.png
ADDED
![]() |
models/classification/128_1_90/train_batch0.jpg
ADDED
![]() |
models/classification/128_1_90/train_batch1.jpg
ADDED
![]() |
models/classification/128_1_90/train_batch2.jpg
ADDED
![]() |
models/classification/128_1_90/val_batch0_labels.jpg
ADDED
![]() |
models/classification/128_1_90/val_batch0_pred.jpg
ADDED
![]() |
models/classification/128_1_90/val_batch1_labels.jpg
ADDED
![]() |
models/classification/128_1_90/val_batch1_pred.jpg
ADDED
![]() |
models/classification/128_1_90/val_batch2_labels.jpg
ADDED
![]() |
models/classification/128_1_90/val_batch2_pred.jpg
ADDED
![]() |
models/classification/128_1_90/weights/best.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:060a88242196a18a3557d0b8e0fe8131f3bea71efbd3531b897fbcdbec19596c
|
3 |
+
size 2959200
|