测试代码

This commit is contained in:
2025-06-24 16:57:16 +08:00
parent fdc5b9722c
commit 3d364aa0bd
5 changed files with 142 additions and 5 deletions

97
gift_demo.py Normal file
View File

@ -0,0 +1,97 @@
import argparse
import os
import time
from ultralytics import YOLOv10
import cv2
import torch
from ultralytics.utils.show_trace_pr import ShowPR
# from trace_detect import run, _init_model
import numpy as np
image_ext = [".jpg", ".jpeg", ".webp", ".bmp", ".png"]
video_ext = ["mp4", "mov", "avi", "mkv"]
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--demo", default="image", help="demo type, eg. image, video and webcam"
)
parser.add_argument("--config", default='../config/gift-1.5x.yml', help="model config file path")
parser.add_argument("--model", default='../ckpts/nanodet_m_1.5x/model_best/nanodet_model_best.pth',
help="model file path")
parser.add_argument("--path", default="../../data_center/gift/objectdet/test/images",
help="path to images or video")
parser.add_argument("--camid", type=int, default=0, help="webcam demo camera id")
parser.add_argument(
"--save_result",
default="../../data_center/gift/objectdet/test/result",
help="whether to save the inference result of image/video",
)
args = parser.parse_args()
return args
def get_image_list(path):
image_names = []
for maindir, subdir, file_name_list in os.walk(path):
for filename in file_name_list:
apath = os.path.join(maindir, filename)
ext = os.path.splitext(apath)[1]
if ext in image_ext:
image_names.append(apath)
return image_names
def _init():
model = YOLOv10('runs/detect/train/weights/best_gift_v10n.pt')
return model
def get_trace_event(model, path):
res_single = []
if os.path.isdir(path):
files = get_image_list(path)
else:
files = [path]
files.sort()
for image_name in files:
# all_box = run(model=model, stride=stride, pt=pt, source=image_name)
# print(image_name)
all_box = model.predict(image_name, save=False, imgsz=[224, 224], conf=0.1)
# print(all_box[0].boxes.conf)
all_box = np.array(all_box[0].boxes.conf.cpu())
if len(all_box) == 0:
res_single.append(0)
else:
res_single.append(all_box[-1])
# if sum(res_single) == 0 and (not "commodity" in path):
# with open('err.txt', 'w') as f:
# f.write(path+'\n')
return res_single
def main(path):
model = _init()
tags, result_all = [], []
classify = ['commodity', 'gift']
for cla in classify:
pre_pth = os.sep.join([path, cla])
for root, dirs, files in os.walk(pre_pth):
if not dirs:
if cla == 'commodity':
tags.append(0)
else:
tags.append(1)
res_single = get_trace_event(model, root)
result_all.append(res_single)
spr = ShowPR(tags, result_all, title_name='yolov10n')
# spr.change_precValue()
spr.get_pic()
if __name__ == "__main__":
# path = '../data_center/gift/trace_subimgs/d50' # 间距为50时
# path = '../data_center/gift/trace_subimgs/actual_test' # 永辉超市实测
path = '../data_center/gift/gift_test' #yolov10单图测试
# path = '../data_center/gift/trace_subimgs/tracluster' # tracluster方法过滤
main(path)

View File

@ -82,3 +82,43 @@ R 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984
P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.950 0.960 0.970 0.980 0.990 P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.950 0.960 0.970 0.980 0.990
T 0.060 0.063 0.068 0.071 0.077 0.081 0.087 0.091 0.097 0.104 0.109 0.116 0.124 0.130 0.140 0.150 0.160 0.171 0.184 0.196 0.212 0.228 0.247 0.271 0.291 0.315 0.341 0.367 0.411 0.481 T 0.060 0.063 0.068 0.071 0.077 0.081 0.087 0.091 0.097 0.104 0.109 0.116 0.124 0.130 0.140 0.150 0.160 0.171 0.184 0.196 0.212 0.228 0.247 0.271 0.291 0.315 0.341 0.367 0.411 0.481
R 0.918 0.917 0.914 0.912 0.908 0.905 0.903 0.902 0.900 0.898 0.895 0.893 0.890 0.888 0.885 0.881 0.879 0.879 0.876 0.873 0.869 0.866 0.862 0.857 0.853 0.849 0.843 0.835 0.824 0.809 R 0.918 0.917 0.914 0.912 0.908 0.905 0.903 0.902 0.900 0.898 0.895 0.893 0.890 0.888 0.885 0.881 0.879 0.879 0.876 0.873 0.869 0.866 0.862 0.857 0.853 0.849 0.843 0.835 0.824 0.809
0
P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.950 0.960 0.970 0.980 0.990
T 0.060 0.063 0.068 0.071 0.077 0.081 0.087 0.091 0.097 0.104 0.109 0.116 0.124 0.130 0.140 0.150 0.160 0.171 0.184 0.196 0.212 0.228 0.247 0.271 0.291 0.315 0.341 0.367 0.411 0.481
R 0.918 0.917 0.914 0.912 0.908 0.905 0.903 0.902 0.900 0.898 0.895 0.893 0.890 0.888 0.885 0.881 0.879 0.879 0.876 0.873 0.869 0.866 0.862 0.857 0.853 0.849 0.843 0.835 0.824 0.809
0
P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.950 0.960 0.970 0.980 0.990
T 0.060 0.063 0.068 0.071 0.077 0.081 0.087 0.091 0.097 0.104 0.109 0.116 0.124 0.130 0.140 0.150 0.160 0.171 0.184 0.196 0.212 0.228 0.247 0.271 0.291 0.315 0.341 0.367 0.411 0.481
R 0.918 0.917 0.914 0.912 0.908 0.905 0.903 0.902 0.900 0.898 0.895 0.893 0.890 0.888 0.885 0.881 0.879 0.879 0.876 0.873 0.869 0.866 0.862 0.857 0.853 0.849 0.843 0.835 0.824 0.809
0
P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.960 0.970 0.980 0.990
T 0.939 0.939 0.939 0.939 0.939 0.940 0.941 0.942 0.942 0.942 0.942 0.943 0.943 0.946 0.947 0.947 0.947 0.949 0.949 0.949 0.949 0.951 0.951 0.951 0.951 0.958 0.958 0.958 0.958
R 0.069 0.069 0.069 0.069 0.069 0.057 0.047 0.042 0.042 0.042 0.042 0.036 0.036 0.018 0.016 0.016 0.016 0.010 0.010 0.010 0.010 0.008 0.008 0.008 0.008 0.002 0.002 0.002 0.002
1
P
T
R
2
P
T
R
3
P
T
R
0
P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.950 0.960 0.970 0.980 0.990
T 0.013 0.014 0.015 0.018 0.020 0.021 0.023 0.025 0.027 0.030 0.033 0.037 0.041 0.046 0.052 0.059 0.065 0.079 0.090 0.104 0.122 0.134 0.168 0.214 0.261 0.310 0.358 0.432 0.553 0.617
R 0.950 0.950 0.950 0.947 0.945 0.945 0.945 0.944 0.944 0.939 0.938 0.938 0.938 0.936 0.935 0.934 0.932 0.927 0.925 0.924 0.923 0.921 0.917 0.912 0.905 0.901 0.899 0.887 0.867 0.856
1
P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.950 0.960 0.970 0.980 0.990
T 0.036 0.039 0.041 0.045 0.049 0.052 0.056 0.060 0.066 0.070 0.077 0.084 0.092 0.102 0.108 0.116 0.129 0.142 0.155 0.174 0.188 0.202 0.226 0.255 0.289 0.317 0.349 0.410 0.473 0.515
R 0.934 0.934 0.934 0.933 0.931 0.931 0.930 0.928 0.928 0.927 0.927 0.925 0.924 0.923 0.923 0.921 0.919 0.917 0.916 0.914 0.911 0.908 0.905 0.903 0.899 0.897 0.890 0.877 0.871 0.866
2
P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.950 0.960 0.970 0.980 0.990
T 0.190 0.196 0.211 0.218 0.228 0.233 0.244 0.251 0.264 0.275 0.289 0.294 0.300 0.319 0.337 0.343 0.358 0.367 0.383 0.404 0.429 0.498 0.514 0.555 0.566 0.609 0.647 0.662 0.675 0.694
R 0.643 0.640 0.629 0.623 0.617 0.613 0.606 0.603 0.593 0.585 0.574 0.571 0.568 0.559 0.545 0.537 0.522 0.513 0.504 0.493 0.487 0.436 0.422 0.400 0.397 0.375 0.349 0.342 0.329 0.316
3
P 0.700 0.710 0.720 0.730 0.740 0.750 0.760 0.770 0.780 0.790 0.800 0.810 0.820 0.830 0.840 0.850 0.860 0.870 0.880 0.890 0.900 0.910 0.920 0.930 0.940 0.950 0.960 0.970 0.980 0.990
T 0.005 0.005 0.005 0.005 0.005 0.005 0.005 0.006 0.006 0.006 0.007 0.007 0.008 0.009 0.009 0.010 0.013 0.018 0.021 0.023 0.026 0.028 0.035 0.038 0.039 0.042 0.049 0.052 0.065 0.088
R 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984 0.984

View File

@ -43,7 +43,7 @@ PIN_MEMORY = str(os.getenv("PIN_MEMORY", True)).lower() == "true" # global pin_
def img2label_paths(img_paths): def img2label_paths(img_paths):
"""Define label paths as a function of image paths.""" """Define label paths as a function of image paths."""
# sa, sb = f"{os.sep}images{os.sep}", f"{os.sep}labels{os.sep}" # /images/, /labels/ substrings # sa, sb = f"{os.sep}images{os.sep}", f"{os.sep}labels{os.sep}" # /images/, /labels/ substrings
sa, sb = f"{os.sep}images{os.sep}", f"{os.sep}labels_class2{os.sep}" # /images/, /labels/ substrings sa, sb = f"{os.sep}images{os.sep}", f"{os.sep}labels{os.sep}" # /images/, /labels/ substrings
return [sb.join(x.rsplit(sa, 1)).rsplit(".", 1)[0] + ".txt" for x in img_paths] return [sb.join(x.rsplit(sa, 1)).rsplit(".", 1)[0] + ".txt" for x in img_paths]

View File

@ -148,9 +148,9 @@ class DetectionValidator(BaseValidator):
pred[:, 5] = 0 pred[:, 5] = 0
# =======Targets 2 分类====== # =======Targets 2 分类======
column = pred[:,5] # column = pred[:,5]
condition2 = (column != 10) # condition2 = (column != 10)
pred[:,5][condition2] = 0 # pred[:,5][condition2] = 0
# =========================== # ===========================
predn = self._prepare_pred(pred, pbatch) predn = self._prepare_pred(pred, pbatch)

View File

@ -389,7 +389,7 @@ class ConfusionMatrix:
cls = detect_cur_cpu[5] cls = detect_cur_cpu[5]
c = int(cls) c = int(cls)
label = f'{names[c]} {conf:.2f} iou:{float(iou):.2f}' label = f'{names[c]} {conf:.2f} iou:{float(iou):.2f}'
print(">>>>>>>>>>>>>>>>>>> label: {} C: {}".format(label, c)) # print(">>>>>>>>>>>>>>>>>>> label: {} C: {}".format(label, c))
if fp_flag: if fp_flag:
annotator.box_label(xyxy, label, color=(125, 0, 125)) ##fp iou匹配上类别错误 紫色框 annotator.box_label(xyxy, label, color=(125, 0, 125)) ##fp iou匹配上类别错误 紫色框
else: else: