From 39f94c7bd404661d09b196f9efb768fa0f23fd36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E5=BA=86=E5=88=9A?= Date: Tue, 17 Dec 2024 17:32:09 +0800 Subject: [PATCH] 20241217 --- contrast/event_test.py | 69 +++ contrast/one2one_contrast.py | 186 ++++---- contrast/one2one_onsite.py | 398 ++++++++++++------ .../utils/__pycache__/event.cpython-39.pyc | Bin 6124 -> 10567 bytes contrast/utils/event.py | 234 +++++++++- pipeline.py | 40 +- shopper.py | 73 ++++ .../__pycache__/drawtracks.cpython-39.pyc | Bin 9024 -> 9071 bytes .../__pycache__/read_data.cpython-39.pyc | Bin 12153 -> 12448 bytes tracking/utils/drawtracks.py | 5 +- tracking/utils/read_data.py | 13 +- 11 files changed, 768 insertions(+), 250 deletions(-) create mode 100644 contrast/event_test.py create mode 100644 shopper.py diff --git a/contrast/event_test.py b/contrast/event_test.py new file mode 100644 index 0000000..b148825 --- /dev/null +++ b/contrast/event_test.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" +Created on Mon Dec 16 18:56:18 2024 + +@author: ym +""" +import os +import cv2 + +from utils.event import ShoppingEvent + +def main(): + evtpaths = r"\\192.168.1.28\share\测试视频数据以及日志\算法全流程测试\202412\images" + text1 = "one2n_Error.txt" + text2 = "one2SN_Error.txt" + events = [] + text = (text1, text2) + for txt in text: + txtfile = os.path.join(evtpaths, txt) + with open(txtfile, "r") as f: + lines = f.readlines() + for i, line in enumerate(lines): + line = line.strip() + if line: + fpath=os.path.join(evtpaths, line) + events.append(fpath) + + + events = list(set(events)) + + '''定义当前事件存储地址及生成相应文件件''' + resultPath = r"\\192.168.1.28\share\测试视频数据以及日志\算法全流程测试\202412\result" + for evtpath in events: + evtname = os.path.basename(evtpath) + event = ShoppingEvent(evtpath) + + img_cat = event.draw_tracks() + trajpath = os.path.join(resultPath, "trajectory") + if not os.path.exists(trajpath): + os.makedirs(trajpath) + traj_imgpath = os.path.join(trajpath, evtname+".png") + cv2.imwrite(traj_imgpath, img_cat) + + + ## 保存序列图像和轨迹子图 + subimgpath = os.path.join(resultPath, f"{evtname}", "subimg") + imgspath = os.path.join(resultPath, f"{evtname}", "imgs") + if not os.path.exists(subimgpath): + os.makedirs(subimgpath) + if not os.path.exists(imgspath): + os.makedirs(imgspath) + + + subimgpairs = event.save_event_subimg(subimgpath) + for subimgName, subimg in subimgpairs: + spath = os.path.join(subimgpath, subimgName) + cv2.imwrite(spath, subimg) + + imgpairs = event.plot_save_image(imgspath) + for imgname, img in imgpairs: + spath = os.path.join(imgspath, imgname) + cv2.imwrite(spath, img) + + print(f"{evtname}") + + + +if __name__ == "__main__": + main() diff --git a/contrast/one2one_contrast.py b/contrast/one2one_contrast.py index 663d16f..f6b8177 100644 --- a/contrast/one2one_contrast.py +++ b/contrast/one2one_contrast.py @@ -84,82 +84,84 @@ def ft16_to_uint8(arr_ft16): return arr_uint8, arr_ft16_ -def plot_save_image(event, savepath): - cameras = ('front', 'back') - for camera in cameras: - if camera == 'front': - boxes = event.front_trackerboxes - imgpaths = event.front_imgpaths - else: - boxes = event.back_trackerboxes - imgpaths = event.back_imgpaths - - def array2list(bboxes): - '''[x1, y1, x2, y2, track_id, score, cls, frame_index, box_index]''' - frame_ids = bboxes[:, 7].astype(int) - fID = np.unique(bboxes[:, 7].astype(int)) - fboxes = [] - for f_id in fID: - idx = np.where(frame_ids==f_id)[0] - box = bboxes[idx, :] - fboxes.append((f_id, box)) - return fboxes - - fboxes = array2list(boxes) - - for fid, fbox in fboxes: - imgpath = imgpaths[int(fid-1)] - - image = cv2.imread(imgpath) - - annotator = Annotator(image.copy(), line_width=2) - for i, *xyxy, tid, score, cls, fid, bid in enumerate(fbox): - label = f'{int(id), int(cls)}' - if tid >=0 and cls==0: - color = colors(int(cls), True) - elif tid >=0 and cls!=0: - color = colors(int(id), True) - else: - color = colors(19, True) # 19为调色板的最后一个元素 - annotator.box_label(xyxy, label, color=color) - - im0 = annotator.result() - spath = os.path.join(savepath, Path(imgpath).name) - cv2.imwrite(spath, im0) - - -def save_event_subimg(event, savepath): - ''' - 功能: 保存一次购物事件的轨迹子图 - 9 items: barcode, type, filepath, back_imgpaths, front_imgpaths, - back_boxes, front_boxes, back_feats, front_feats, - feats_compose, feats_select - 子图保存次序:先前摄、后后摄,以 k 为编号,和 "feats_compose" 中次序相同 - ''' - cameras = ('front', 'back') - for camera in cameras: - if camera == 'front': - boxes = event.front_boxes - imgpaths = event.front_imgpaths - else: - boxes = event.back_boxes - imgpaths = event.back_imgpaths - - for i, box in enumerate(boxes): - x1, y1, x2, y2, tid, score, cls, fid, bid = box - - imgpath = imgpaths[int(fid-1)] - image = cv2.imread(imgpath) - - subimg = image[int(y1/2):int(y2/2), int(x1/2):int(x2/2), :] - - camerType, timeTamp, _, frameID = os.path.basename(imgpath).split('.')[0].split('_') - subimgName = f"cam{camerType}_{i}_tid{int(tid)}_fid({int(fid)}, {frameID}).png" - spath = os.path.join(savepath, subimgName) - - cv2.imwrite(spath, subimg) - # basename = os.path.basename(event['filepath']) - print(f"Image saved: {os.path.basename(event.eventpath)}") +# ============================================================================= +# def plot_save_image(event, savepath): +# cameras = ('front', 'back') +# for camera in cameras: +# if camera == 'front': +# boxes = event.front_trackerboxes +# imgpaths = event.front_imgpaths +# else: +# boxes = event.back_trackerboxes +# imgpaths = event.back_imgpaths +# +# def array2list(bboxes): +# '''[x1, y1, x2, y2, track_id, score, cls, frame_index, box_index]''' +# frame_ids = bboxes[:, 7].astype(int) +# fID = np.unique(bboxes[:, 7].astype(int)) +# fboxes = [] +# for f_id in fID: +# idx = np.where(frame_ids==f_id)[0] +# box = bboxes[idx, :] +# fboxes.append((f_id, box)) +# return fboxes +# +# fboxes = array2list(boxes) +# +# for fid, fbox in fboxes: +# imgpath = imgpaths[int(fid-1)] +# +# image = cv2.imread(imgpath) +# +# annotator = Annotator(image.copy(), line_width=2) +# for i, *xyxy, tid, score, cls, fid, bid in enumerate(fbox): +# label = f'{int(id), int(cls)}' +# if tid >=0 and cls==0: +# color = colors(int(cls), True) +# elif tid >=0 and cls!=0: +# color = colors(int(id), True) +# else: +# color = colors(19, True) # 19为调色板的最后一个元素 +# annotator.box_label(xyxy, label, color=color) +# +# im0 = annotator.result() +# spath = os.path.join(savepath, Path(imgpath).name) +# cv2.imwrite(spath, im0) +# +# +# def save_event_subimg(event, savepath): +# ''' +# 功能: 保存一次购物事件的轨迹子图 +# 9 items: barcode, type, filepath, back_imgpaths, front_imgpaths, +# back_boxes, front_boxes, back_feats, front_feats, +# feats_compose, feats_select +# 子图保存次序:先前摄、后后摄,以 k 为编号,和 "feats_compose" 中次序相同 +# ''' +# cameras = ('front', 'back') +# for camera in cameras: +# if camera == 'front': +# boxes = event.front_boxes +# imgpaths = event.front_imgpaths +# else: +# boxes = event.back_boxes +# imgpaths = event.back_imgpaths +# +# for i, box in enumerate(boxes): +# x1, y1, x2, y2, tid, score, cls, fid, bid = box +# +# imgpath = imgpaths[int(fid-1)] +# image = cv2.imread(imgpath) +# +# subimg = image[int(y1/2):int(y2/2), int(x1/2):int(x2/2), :] +# +# camerType, timeTamp, _, frameID = os.path.basename(imgpath).split('.')[0].split('_') +# subimgName = f"cam{camerType}_{i}_tid{int(tid)}_fid({int(fid)}, {frameID}).png" +# spath = os.path.join(savepath, subimgName) +# +# cv2.imwrite(spath, subimg) +# # basename = os.path.basename(event['filepath']) +# print(f"Image saved: {os.path.basename(event.eventpath)}") +# ============================================================================= def data_precision_compare(stdfeat, evtfeat, evtMessage, save=True): @@ -296,7 +298,11 @@ def one2one_simi(): if not os.path.exists(pairpath): os.makedirs(pairpath) try: - save_event_subimg(event, pairpath) + subimgpairs = event.save_event_subimg(pairpath) + for subimgName, subimg in subimgpairs: + spath = os.path.join(pairpath, subimgName) + cv2.imwrite(spath, subimg) + except Exception as e: error_event.append(evtname) @@ -304,10 +310,16 @@ def one2one_simi(): if not os.path.exists(img_path): os.makedirs(img_path) try: - plot_save_image(event, img_path) + imgpairs = event.plot_save_image(img_path) + for imgname, img in imgpairs: + spath = os.path.join(img_path, imgname) + cv2.imwrite(spath, img) except Exception as e: error_event.append(evtname) - + + + + errfile = os.path.join(subimgPath, f'error_event.txt') with open(errfile, 'w', encoding='utf-8') as f: @@ -353,17 +365,16 @@ def one2one_simi(): matrix = 1 - cdist(stdfeat, evtfeat, 'cosine') matrix[matrix < 0] = 0 - simi_mean = np.mean(matrix) simi_max = np.max(matrix) stdfeatm = np.mean(stdfeat, axis=0, keepdims=True) evtfeatm = np.mean(evtfeat, axis=0, keepdims=True) simi_mfeat = 1- np.maximum(0.0, cdist(stdfeatm, evtfeatm, 'cosine')) rltdata.append((label, stdbcd, evtname, simi_mean, simi_max, simi_mfeat[0,0])) - + '''================ float32、16、int8 精度比较与存储 =============''' # data_precision_compare(stdfeat, evtfeat, mergePairs[i], save=True) - + print("func: one2one_eval(), have finished!") return rltdata @@ -436,8 +447,11 @@ def gen_eventdict(sourcePath, saveimg=True): errEvents = [] k = 0 for source_path in sourcePath: - bname = os.path.basename(source_path) + evtpath, bname = os.path.split(source_path) + bname = r"20241126-135911-bdf91cf9-3e9a-426d-94e8-ddf92238e175_6923555210479" + source_path = os.path.join(evtpath, bname) + pickpath = os.path.join(eventDataPath, f"{bname}.pickle") if os.path.isfile(pickpath): continue @@ -451,9 +465,9 @@ def gen_eventdict(sourcePath, saveimg=True): errEvents.append(source_path) print(e) - # k += 1 - # if k==10: - # break + k += 1 + if k==1: + break errfile = os.path.join(eventDataPath, f'error_events.txt') with open(errfile, 'w', encoding='utf-8') as f: diff --git a/contrast/one2one_onsite.py b/contrast/one2one_onsite.py index 16ecdbe..1456c9d 100644 --- a/contrast/one2one_onsite.py +++ b/contrast/one2one_onsite.py @@ -105,27 +105,56 @@ def test_compare(): plot_pr_curve(simiList) def one2one_pr(paths): + ''' + 1:1 + + ''' + paths = Path(paths) - # evtpaths = [p for p in paths.iterdir() if p.is_dir() and len(p.name.split('_'))>=2] - evtpaths = [p for p in paths.iterdir() if p.is_dir()] + evtpaths = [] + for p in paths.iterdir(): + condt1 = p.is_dir() + condt2 = len(p.name.split('_'))>=2 + condt3 = len(p.name.split('_')[-1])>8 + condt4 = p.name.split('_')[-1].isdigit() + + if condt1 and condt2 and condt3 and condt4: + evtpaths.append(p) + + + + + # evtpaths = [p for p in paths.iterdir() if p.is_dir() and len(p.name.split('_'))>=2 and len(p.name.split('_')[-1])>8] + # evtpaths = [p for p in paths.iterdir() if p.is_dir()] events, similars = [], [] - ##===================================== 扫A放A, 扫A放B场景 + ##===================================== 扫A放A, 扫A放B场景() one2oneAA, one2oneAB = [], [] + one2SNAA, one2SNAB = [], [] - ##===================================== 应用于展厅 1:N + ##===================================== 应用于 1:1 + + _tp_events, _fn_events, _fp_events, _tn_events = [], [], [], [] + _tp_simi, _fn_simi, _tn_simi, _fp_simi = [], [], [], [] + + ##===================================== 应用于 1:SN tp_events, fn_events, fp_events, tn_events = [], [], [], [] tp_simi, fn_simi, tn_simi, fp_simi = [], [], [], [] ##===================================== 应用于1:n tpevents, fnevents, fpevents, tnevents = [], [], [], [] tpsimi, fnsimi, tnsimi, fpsimi = [], [], [], [] - other_event, other_simi = [], [] ##===================================== barcodes总数、比对错误事件 - bcdList, one2onePath = [], [] + bcdList = [] + one2onePath, one2onePath1 = [], [] + one2SNPath, one2SNPath1 = [], [] + one2nPath = [] + + errorFile_one2one, errorFile_one2SN, errorFile_one2n = [], [], [] + for path in evtpaths: barcode = path.stem.split('_')[-1] datapath = path.joinpath('process.data') @@ -140,51 +169,93 @@ def one2one_pr(paths): except Exception as e: print(f"{path.stem}, Error: {e}") + + '''放入为 1:1,相似度取最大值;取出时为 1:SN, 相似度取均值''' one2one = SimiDict['one2one'] + one2SN = SimiDict['one2SN'] one2n = SimiDict['one2n'] + '''================== 0. 1:1 ===================''' barcodes, similars = [], [] for dt in one2one: + one2onePath.append((path.stem)) + if dt['similar']==0: + one2onePath1.append((path.stem)) + continue + barcodes.append(dt['barcode']) + similars.append(dt['similar']) + if len(barcodes)==len(similars) and len(barcodes)!=0: + ## 扫A放A, 扫A放B场景 + simAA = [similars[i] for i in range(len(barcodes)) if barcodes[i]==barcode] + simAB = [similars[i] for i in range(len(barcodes)) if barcodes[i]!=barcode] + + one2oneAA.extend(simAA) + one2oneAB.extend(simAB) + + ## 相似度排序,barcode相等且排名第一为TP,适用于多的barcode相似度比较 + max_idx = similars.index(max(similars)) + max_sim = similars[max_idx] + # max_bcd = barcodes[max_idx] + for i in range(len(one2one)): + bcd, simi = barcodes[i], similars[i] + if bcd==barcode and simi==max_sim: + _tp_simi.append(simi) + _tp_events.append(path.stem) + elif bcd==barcode and simi!=max_sim: + _fn_simi.append(simi) + _fn_events.append(path.stem) + elif bcd!=barcode and simi!=max_sim: + _tn_simi.append(simi) + _tn_events.append(path.stem) + elif bcd!=barcode and simi==max_sim and barcode in barcodes: + _fp_simi.append(simi) + _fp_events.append(path.stem) + else: + errorFile_one2one.append(path.stem) + + + '''================== 2. 取出场景下的 1 : Small N ===================''' + barcodes, similars = [], [] + for dt in one2SN: barcodes.append(dt['barcode']) similars.append(dt['similar']) - if len(barcodes)!=len(similars) or len(barcodes)==0: - continue + if len(barcodes)==len(similars) and len(barcodes)!=0: + ## 扫A放A, 扫A放B场景 + simAA = [similars[i] for i in range(len(barcodes)) if barcodes[i]==barcode] + simAB = [similars[i] for i in range(len(barcodes)) if barcodes[i]!=barcode] - ##===================================== 扫A放A, 扫A放B场景 - simAA = [similars[i] for i in range(len(barcodes)) if barcodes[i]==barcode] - simAB = [similars[i] for i in range(len(barcodes)) if barcodes[i]!=barcode] - - one2oneAA.extend(simAA) - one2oneAB.extend(simAB) - one2onePath.append(path.stem) - - ##===================================== 以下应用适用于展厅 1:N - max_idx = similars.index(max(similars)) - max_sim = similars[max_idx] - # max_bcd = barcodes[max_idx] - - if path.stem.find('100321')>0: - print("hhh") - - - for i in range(len(one2one)): - bcd, simi = barcodes[i], similars[i] - if bcd==barcode and simi==max_sim: - tp_simi.append(simi) - tp_events.append(path.stem) - elif bcd==barcode and simi!=max_sim: - fn_simi.append(simi) - fn_events.append(path.stem) - elif bcd!=barcode and simi!=max_sim: - tn_simi.append(simi) - tn_events.append(path.stem) - else: - fp_simi.append(simi) - fp_events.append(path.stem) + one2SNAA.extend(simAA) + one2SNAB.extend(simAB) + one2SNPath.append(path.stem) + if len(simAA)==0: + one2SNPath1.append(path.stem) + + + ## 相似度排序,barcode相等且排名第一为TP,适用于多的barcode相似度比较 + max_idx = similars.index(max(similars)) + max_sim = similars[max_idx] + # max_bcd = barcodes[max_idx] + for i in range(len(one2SN)): + bcd, simi = barcodes[i], similars[i] + if bcd==barcode and simi==max_sim: + tp_simi.append(simi) + tp_events.append(path.stem) + elif bcd==barcode and simi!=max_sim: + fn_simi.append(simi) + fn_events.append(path.stem) + elif bcd!=barcode and simi!=max_sim: + tn_simi.append(simi) + tn_events.append(path.stem) + elif bcd!=barcode and simi==max_sim and barcode in barcodes: + fp_simi.append(simi) + fp_events.append(path.stem) + else: + errorFile_one2SN.append(path.stem) + - ##===================================== 以下应用适用1:n + '''===================== 3. 取出场景下的 1:n ========================''' events, evt_barcodes, evt_similars, evt_types = [], [], [], [] for dt in one2n: events.append(dt["event"]) @@ -192,92 +263,132 @@ def one2one_pr(paths): evt_similars.append(dt["similar"]) evt_types.append(dt["type"]) - if len(events)!=len(evt_barcodes) or len(evt_barcodes)!=len(evt_similars) \ - or len(evt_barcodes)!=len(evt_similars) or len(events)==0: continue - - maxsim = evt_similars[evt_similars.index(max(evt_similars))] - for i in range(len(one2n)): - bcd, simi = evt_barcodes[i], evt_similars[i] + if len(events)==len(evt_barcodes) and len(evt_barcodes)==len(evt_similars) \ + and len(evt_similars)==len(evt_types) and len(events)>0: - if bcd==barcode and simi==maxsim: - tpsimi.append(simi) - tpevents.append(path.stem) - elif bcd==barcode and simi!=maxsim: - fnsimi.append(simi) - fnevents.append(path.stem) - elif bcd!=barcode and simi!=maxsim: - tnsimi.append(simi) - tnevents.append(path.stem) - elif bcd!=barcode and simi==maxsim: - fpsimi.append(simi) - fpevents.append(path.stem) - else: - other_simi.append(simi) - other_event.append(path.stem) + one2nPath.append(path.stem) + maxsim = evt_similars[evt_similars.index(max(evt_similars))] + for i in range(len(one2n)): + bcd, simi = evt_barcodes[i], evt_similars[i] + + if bcd==barcode and simi==maxsim: + tpsimi.append(simi) + tpevents.append(path.stem) + elif bcd==barcode and simi!=maxsim: + fnsimi.append(simi) + fnevents.append(path.stem) + elif bcd!=barcode and simi!=maxsim: + tnsimi.append(simi) + tnevents.append(path.stem) + elif bcd!=barcode and simi==maxsim and barcode in evt_barcodes: + fpsimi.append(simi) + fpevents.append(path.stem) + else: + errorFile_one2n.append(path.stem) '''命名规则: - 1:1 1:n 1:N - TP_ TP TPX - PPrecise_ PPrecise PPreciseX - tpsimi tp_simi + 1:1 (max) 1:1 (max) 1:n 1:N + _TP TP_ TP TPX + _PPrecise PPrecise_ PPrecise PPreciseX + tpsimi tp_simi ''' - ''' 1:1 数据存储''' + ''' 1:1 数据存储, 相似度计算方式:最大值、均值''' + _PPrecise, _PRecall = [], [] + _NPrecise, _NRecall = [], [] PPrecise_, PRecall_ = [], [] NPrecise_, NRecall_ = [], [] - ''' 1:n 数据存储''' - PPrecise, PRecall = [], [] - NPrecise, NRecall = [], [] - - ''' 展厅 1:N 数据存储''' + ''' 1:SN 数据存储,需根据相似度排序''' PPreciseX, PRecallX = [], [] NPreciseX, NRecallX = [], [] + ''' 1:n 数据存储,需根据相似度排序''' + PPrecise, PRecall = [], [] + NPrecise, NRecall = [], [] + + + Thresh = np.linspace(-0.2, 1, 100) for th in Thresh: - '''============================= 1:1''' - TP_ = sum(np.array(one2oneAA) >= th) - FP_ = sum(np.array(one2oneAB) >= th) - FN_ = sum(np.array(one2oneAA) < th) - TN_ = sum(np.array(one2oneAB) < th) + '''(Precise, Recall) 计算方式, 若 1:1 与 1:SN 相似度选择方式相同,则可以合并''' + '''===================================== 1:1 最大值''' + _TP = sum(np.array(one2oneAA) >= th) + _FP = sum(np.array(one2oneAB) >= th) + _FN = sum(np.array(one2oneAA) < th) + _TN = sum(np.array(one2oneAB) < th) + + _PPrecise.append(_TP/(_TP+_FP+1e-6)) + _PRecall.append(_TP/(len(one2oneAA)+1e-6)) + _NPrecise.append(_TN/(_TN+_FN+1e-6)) + _NRecall.append(_TN/(len(one2oneAB)+1e-6)) + + '''===================================== 1:SN 均值''' + TP_ = sum(np.array(one2SNAA) >= th) + FP_ = sum(np.array(one2SNAB) >= th) + FN_ = sum(np.array(one2SNAA) < th) + TN_ = sum(np.array(one2SNAB) < th) + PPrecise_.append(TP_/(TP_+FP_+1e-6)) - # PRecall_.append(TP_/(TP_+FN_+1e-6)) - PRecall_.append(TP_/(len(one2oneAA)+1e-6)) - + PRecall_.append(TP_/(len(one2SNAA)+1e-6)) NPrecise_.append(TN_/(TN_+FN_+1e-6)) - # NRecall_.append(TN_/(TN_+FP_+1e-6)) - NRecall_.append(TN_/(len(one2oneAB)+1e-6)) - - '''============================= 1:n''' - TP = sum(np.array(tpsimi) >= th) - FP = sum(np.array(fpsimi) >= th) - FN = sum(np.array(fnsimi) < th) - TN = sum(np.array(tnsimi) < th) - PPrecise.append(TP/(TP+FP+1e-6)) - # PRecall.append(TP/(TP+FN+1e-6)) - PRecall.append(TP/(len(tpsimi)+len(fnsimi)+1e-6)) - - NPrecise.append(TN/(TN+FN+1e-6)) - # NRecall.append(TN/(TN+FP+1e-6)) - NRecall.append(TN/(len(tnsimi)+len(fpsimi)+1e-6)) - - - '''============================= 1:N 展厅''' + NRecall_.append(TN_/(len(one2SNAB)+1e-6)) + + '''适用于 (Precise, Recall) 计算方式:多个相似度计算并排序,barcode相等且排名第一为 TP ''' + '''===================================== 1:SN ''' TPX = sum(np.array(tp_simi) >= th) FPX = sum(np.array(fp_simi) >= th) FNX = sum(np.array(fn_simi) < th) TNX = sum(np.array(tn_simi) < th) PPreciseX.append(TPX/(TPX+FPX+1e-6)) - # PRecallX.append(TPX/(TPX+FNX+1e-6)) PRecallX.append(TPX/(len(tp_simi)+len(fn_simi)+1e-6)) NPreciseX.append(TNX/(TNX+FNX+1e-6)) - # NRecallX.append(TNX/(TNX+FPX+1e-6)) NRecallX.append(TNX/(len(tn_simi)+len(fp_simi)+1e-6)) + + + '''===================================== 1:n''' + TP = sum(np.array(tpsimi) >= th) + FP = sum(np.array(fpsimi) >= th) + FN = sum(np.array(fnsimi) < th) + TN = sum(np.array(tnsimi) < th) + + PPrecise.append(TP/(TP+FP+1e-6)) + PRecall.append(TP/(len(tpsimi)+len(fnsimi)+1e-6)) + NPrecise.append(TN/(TN+FN+1e-6)) + NRecall.append(TN/(len(tnsimi)+len(fpsimi)+1e-6)) + + - '''============================= 1:1 曲线''' + + '''1. ============================= 1:1 最大值方案 曲线''' + fig, ax = plt.subplots() + ax.plot(Thresh, _PPrecise, 'r', label='Precise_Pos: TP/TPFP') + ax.plot(Thresh, _PRecall, 'b', label='Recall_Pos: TP/TPFN') + ax.plot(Thresh, _NPrecise, 'g', label='Precise_Neg: TN/TNFP') + ax.plot(Thresh, _NRecall, 'c', label='Recall_Neg: TN/TNFN') + ax.set_xlim([0, 1]) + ax.set_ylim([0, 1]) + ax.grid(True) + ax.set_title('1:1 Precise & Recall') + ax.set_xlabel(f"Event Num: {len(one2oneAA)+len(one2oneAB)}") + ax.legend() + plt.show() + ## ============================= 1:1 最大值方案 直方图''' + fig, axes = plt.subplots(2, 1) + axes[0].hist(np.array(one2oneAA), bins=60, edgecolor='black') + axes[0].set_xlim([-0.2, 1]) + axes[0].set_title('AA') + axes[1].hist(np.array(one2oneAB), bins=60, edgecolor='black') + axes[1].set_xlim([-0.2, 1]) + axes[1].set_title('BB') + plt.show() + + + + + '''2. ============================= 1:1 均值方案 曲线''' fig, ax = plt.subplots() ax.plot(Thresh, PPrecise_, 'r', label='Precise_Pos: TP/TPFP') ax.plot(Thresh, PRecall_, 'b', label='Recall_Pos: TP/TPFN') @@ -287,21 +398,50 @@ def one2one_pr(paths): ax.set_ylim([0, 1]) ax.grid(True) ax.set_title('1:1 Precise & Recall') - ax.set_xlabel(f"Event Num: {len(one2oneAA)}") + ax.set_xlabel(f"Event Num: {len(one2SNAA)}") ax.legend() plt.show() - - '''============================= 1:1 直方图''' + ## ============================= 1:1 均值方案 直方图''' fig, axes = plt.subplots(2, 1) - axes[0].hist(np.array(one2oneAA), bins=60, edgecolor='black') + axes[0].hist(np.array(one2SNAA), bins=60, edgecolor='black') axes[0].set_xlim([-0.2, 1]) axes[0].set_title('AA') - axes[1].hist(np.array(one2oneAB), bins=60, edgecolor='black') + axes[1].hist(np.array(one2SNAB), bins=60, edgecolor='black') axes[1].set_xlim([-0.2, 1]) axes[1].set_title('BB') plt.show() + + ''''3. ============================= 1:SN 曲线''' + fig, ax = plt.subplots() + ax.plot(Thresh, PPreciseX, 'r', label='Precise_Pos: TP/TPFP') + ax.plot(Thresh, PRecallX, 'b', label='Recall_Pos: TP/TPFN') + ax.plot(Thresh, NPreciseX, 'g', label='Precise_Neg: TN/TNFP') + ax.plot(Thresh, NRecallX, 'c', label='Recall_Neg: TN/TNFN') + ax.set_xlim([0, 1]) + ax.set_ylim([0, 1]) + ax.grid(True) + ax.set_title('1:SN Precise & Recall') + ax.set_xlabel(f"Event Num: {len(one2SNAA)}") + ax.legend() + plt.show() + ## ============================= 1:N 展厅 直方图''' + fig, axes = plt.subplots(2, 2) + axes[0, 0].hist(tp_simi, bins=60, edgecolor='black') + axes[0, 0].set_xlim([-0.2, 1]) + axes[0, 0].set_title('TP') + axes[0, 1].hist(fp_simi, bins=60, edgecolor='black') + axes[0, 1].set_xlim([-0.2, 1]) + axes[0, 1].set_title('FP') + axes[1, 0].hist(tn_simi, bins=60, edgecolor='black') + axes[1, 0].set_xlim([-0.2, 1]) + axes[1, 0].set_title('TN') + axes[1, 1].hist(fn_simi, bins=60, edgecolor='black') + axes[1, 1].set_xlim([-0.2, 1]) + axes[1, 1].set_title('FN') + plt.show() + - '''============================= 1:n 曲线''' + '''4. ============================= 1:n 曲线,''' fig, ax = plt.subplots() ax.plot(Thresh, PPrecise, 'r', label='Precise_Pos: TP/TPFP') ax.plot(Thresh, PRecall, 'b', label='Recall_Pos: TP/TPFN') @@ -311,11 +451,10 @@ def one2one_pr(paths): ax.set_ylim([0, 1]) ax.grid(True) ax.set_title('1:n Precise & Recall') - ax.set_xlabel(f"Event Num: {len(one2oneAA)}") + ax.set_xlabel(f"Event Num: {len(tpsimi)+len(fnsimi)}") ax.legend() plt.show() - - '''============================= 1:n 直方图''' + ## ============================= 1:n 直方图''' fig, axes = plt.subplots(2, 2) axes[0, 0].hist(tpsimi, bins=60, edgecolor='black') axes[0, 0].set_xlim([-0.2, 1]) @@ -332,35 +471,18 @@ def one2one_pr(paths): plt.show() - '''============================= 1:N 展厅 曲线''' - fig, ax = plt.subplots() - ax.plot(Thresh, PPreciseX, 'r', label='Precise_Pos: TP/TPFP') - ax.plot(Thresh, PRecallX, 'b', label='Recall_Pos: TP/TPFN') - ax.plot(Thresh, NPreciseX, 'g', label='Precise_Neg: TN/TNFP') - ax.plot(Thresh, NRecallX, 'c', label='Recall_Neg: TN/TNFN') - ax.set_xlim([0, 1]) - ax.set_ylim([0, 1]) - ax.grid(True) - ax.set_title('1:N Precise & Recall') - ax.set_xlabel(f"Event Num: {len(one2oneAA)}") - ax.legend() - plt.show() + fpsnErrFile = str(paths.joinpath("one2SN_Error.txt")) + with open(fpsnErrFile, "w") as file: + for item in fp_events: + file.write(item + "\n") + + fpErrFile = str(paths.joinpath("one2n_Error.txt")) + with open(fpErrFile, "w") as file: + for item in fpevents: + file.write(item + "\n") + + - '''============================= 1:N 展厅 直方图''' - fig, axes = plt.subplots(2, 2) - axes[0, 0].hist(tp_simi, bins=60, edgecolor='black') - axes[0, 0].set_xlim([-0.2, 1]) - axes[0, 0].set_title('TP') - axes[0, 1].hist(fp_simi, bins=60, edgecolor='black') - axes[0, 1].set_xlim([-0.2, 1]) - axes[0, 1].set_title('FP') - axes[1, 0].hist(tn_simi, bins=60, edgecolor='black') - axes[1, 0].set_xlim([-0.2, 1]) - axes[1, 0].set_title('TN') - axes[1, 1].hist(fn_simi, bins=60, edgecolor='black') - axes[1, 1].set_xlim([-0.2, 1]) - axes[1, 1].set_title('FN') - plt.show() # bcdSet = set(bcdList) # one2nErrFile = str(paths.joinpath("one_2_Small_n_Error.txt")) @@ -378,7 +500,7 @@ def one2one_pr(paths): if __name__ == "__main__": - evtpaths = r"\\192.168.1.28\share\测试视频数据以及日志\各模块测试记录\展厅测试\1129_展厅模型v801测试组测试" + evtpaths = r"\\192.168.1.28\share\测试视频数据以及日志\算法全流程测试\202412\images" one2one_pr(evtpaths) diff --git a/contrast/utils/__pycache__/event.cpython-39.pyc b/contrast/utils/__pycache__/event.cpython-39.pyc index 113c3dd7cbefe2001f63bae9ff967b9692dff1d6..0cbfef108bcd4b2df107441b46fcc9bf09e088e7 100644 GIT binary patch literal 10567 zcmbtadvIK5b-(Yu`_yVB%aU!v;)KL&Y#}MK&`E4nkPAK^Meotxom5(UOPpPo=Pe9?)7tnhC+VclD&u_*C{EhRoK>96vNpck z>2k85_PYa4_pBzq?)EP$=udlCDdd($688(YAck^S7Gx?FD`Qf{c-+AQt z(2@Me(8y>aaiTQuKh_Y(^NY2_23D(#71YppsN_Gk)P3^!#7Wn8r~K2RG&NhPPZzW> zeqX)b@JoI}gw|A}+7KS<#~o36oGTUdFy$`zsP7k@l3xmYF$t$g1nQvJnD?9Wewbpa zSE*I1B~dVzjKRrT6L6+Ey<`Hq(_zXi^04BoD_Vg4aI|xpqnEV-WkvV#JyzA$P3bK?&`v6k4z&!lGy^>_X0=uH zH=wl4KnYCQ6ZO-w0(Hd-Ec&){9}ZOPxG_J92`2K<-`dyD=O-r{3$B-6yZq(#OD|r# z`g)$XA-{Ivxf`o5b!y65$Me6nFnlP#h-YC0Z#-PPSaA;Jy{U$959Oz--l2S1lxl9V zQg_^iLwO7!(MP$vPq_ZPsQ>JZ=R2KOc?zJ&L4Wqf(-OIM^_8`A%hz9BSv&Xb_20j^ zcID}7SD)yNh-wSNi^B^eizE5!DU} zhSq$&GB@vrR>|`hn{KFA>V9ZG{+KI>)@-_U2h}aPeW4{+8pe3nPMr+n?d|hI!^bY7 zy|WPN*pHVL(T}nIxA2zpJu%g&qnYPV%=?w9H{qUf>;7PKF-(+%C@qduE1tg_O}$BT$g?7{);57Q4n)@U{%W*?+^hXx7Y6b;_~ zBb>JRemq_V+a9QaQiPoNTA(hgGx{=>4Tm6f8lc56#?YRId==vi6AmP(m}Hn@N}6HD z@0!UvI$*bBT%tO;3bpECE~FCpE{1&!cfX~6Ki3~%?jDAF8SZ2F4u-cdd}p9JCS?7G zfwGbhAl0e@iUrC~KzWy>aLxTZN^HxlZbgmTR-rBNemA{YYuZb<@L4;a_k3|Ee@C-2 zHCuJ>07hGhcc_fLtnqo>h+_|5RzuUF!zuQm6k4)=y9Rny3b%)GK1EGv)KEvaLZhmN z=exBq=AQBE5a%#9SrStX2M8#q>9l7ip)reRDvsOWrd?%$v$=(k<_eYCG_k!fBl(?j ztWkGI@PsBI^{|VpFG6ES0wV_?LK$Yl6G@tu>DHS5Vi+q|8zukfXxPoISdv`9s$@Hf zn})f~S~z^N+IE_(mz8teGKpwaomqrAS!LUZ+i9|^o-Oq{W9}fznhBnqoRq{RJt=#T zjHne+6y~>rU8>YC}oUKMB+O4AJR$&*zG`A>DHEPXQN5(K%nD3l#ErFV zs1q1RM~8uo2XUCv6Rkv`ujm2XpaF#iG~t-wCIf550M(LI^k)X9ufU`xSJkpcdXRdb zI~MgAA5dmutyGXgpZi+rAcj8EGdA~VGae^=?rAdGff-mqEU?Q~D-+GFtCii-Q=-)! zbVogPZRshyrKf)lx`J$3YvqDWkSmi*l~{Uz#|loBY9#iNp4d-EOzz|Ud_&~-^U;b3 zoi^eYY^lftgxz%DKjC_HSGrcN5Zy$#e0dyAK0vHzpkj>DLd6ShmI<8oLXu@w+)3Sy z63BAf`6iT<8zy1HrhwF`V6xo`ZH!fM=1bMklmpl_z`St1Fn)idUap9m>lAv#De5TI zJ}oCJIN`BqsbQSOmK>VUMnieTQ`-bJDNE7mZ_ihbm~Iy%F1s-_!bf3K!+57C#%M*K zq=}f^Bs6E)k<7I_ARn?QJKav>p2erBX$*C!)msE?))jSPOre%{2tY9s5HJJ6l7gsY zG{3qh|F4pu}x$ryzAYBbDC%vw_gV!t)@62VN;eW?8$ur=_t+3ZJs2KZ3Zz zz`tc(RqEMc#XC3zdqMv`jVI6@2oh|i4h-T1%m4-<0tU~NgouO(B@3lkrxc5LHuLO= zCrfeII*6lVu4)rn%OIbY%P}rntyo|<5H$3e1eRbd1uV%}+DT25N04#S{O)ox{LThe z)T$epY?T8hTlD}-ar<6iX~uQ|%P`glpIVN%8<=d{AH*Ws05D10lJI8NYw1nwd5 zeggLr_zeOl2;4{D0{{h^Wm4JZs zS+^XU-cs{lgs4TB0_$b1#KXcDuS#8C6v^1;`#1B@{c9OH& z6;<4JL0QQJ3ASVgFt%ghrm&*(7t{_Q^szMy~xkv(gZ4P{!-mfiSFD;@1dddqI~o>4KwZ(UL6zSNn08ne%I*2Zh?tmP-L zm2mIn?9uW|T<+ucyD7j}>36dD?p;CG%D}2}MH2`8JwX?CCmUp)ZdM6p&bSF5kJ zyVW1fTN77<9?bilK`&rF*ai5mpbv0=up98!pdawIXjSNoYrmUo-_Et~;Mxbcc7baj zRD_x=^|1$vANu$wK6c;f&?qC1BQn&+m}yk%?8(VUELIZJUPJieH^G$#hD=SE_~faR zQFF0rh3IpJHg&L|<`P4z;xU<=`?qDPNTc7>UhUmtuPByf@6P5E^C9G5 z5a&+KGJ-&rBVU7|PCFB4Lru}Ic#_r=i-Oruwm*gR4Yy|mMqWnFK;#b7XO-B)$Sbn* zYSSm?YK###aTyE0BB%2$WZ8~VN(D`Pk(z&j07Z0y zyuh#F@yOmP@F@?#uUADdtmHXsJtke~nmf6G5`soh0iF3Ok}d)z@Lojb&9F zqc^N-A8b0sa)<*F#lc}k6sOMRxt@i3pFutASJrzF?UZ87G2!f4^SdzMwb!4z_MLBC zfBx+Ci?6MH@5zl9&aYp%B5nnT11Z*S;vp2r;3-iCV@p&-(#cMgI8E3I0v`e>=<^5K5CHU(kxxQ?yhr>F~4QPr>HffsK>!3;14f_(L(@2;z>}58s6H1HLJ90j-oD z3sNigD#9HnhSQYx<3VDXJh(qX%MAGv6Y*BI)!oWg_81Fb!m9J!fRF=e$gwP!7U0! zgqxnoW|ANNj>$$VG{|C!S82>90rJ1#W+bYWx?6m_;=rQN`c7|9n2~x44G1b5wL)@*;Q!37P1$ky+94jwYOOtLjw2*L_ulmwG zAeAD%LZj_tS%+_dwXkiYZ3t~Pn-u{c1i2e@=;5Z05%I_-pQ7MDqoQ7hT{dW&#R`>8 z-jpz2YTxq-GF`mup|&s_YKwRlM(`d9bsud_zMTqn+)m+og$O)pag};A78Vy4arIIg z+VRQ4t{tCY4}CrvklIOuGBWsp7FkNwLRKcFX0-%c#lGmX;?p89-KXX>TkE%$-tlWc z@-}ToF)5u!)@G+ME`&vM5Ge*aYq1kio|9}DtQch?kp>~H_7JIO2vuLp0*^C|uyeS* z;cO-8JQLdPHoT>kyIHMIf*Whad^<2%kNpsO%%*%P^jM;mY^7Rh(rZ$uK`+LkX=0IH z?C}#T$pCRpt2gKcC+XOGp)rA_C3a3D{n(+V+Em;q#m{MF68>F&Y9@_n61}W+Ify(t zitTccd7Ut1WYijPQbFGfijxd%K+?IHWffQkST|!`!2rtHuOy-Kw3RG$-mXY_1aql? zub}5Yi>>XXKlAuo`{EzoSpM$u{Ixf~vi8cCuU&m&{ZCeJy!y)Zi!XN!{f+OwbmPs} z)?T@?_QLnJwY@uE!Ig=3JRe<^;C>XIBJM-s=hETFWhf*SA}udO=%SQTAe-fFz0qv= zj~lU$X}5?9%wX59&4>y&?}wS~ivYCPa!atSfn1r~0IYuP^~-cQd+x&8#Y^kYoc-w& zPp)109sCE5OW14Q`D%VPkDKG`Z~Wfcs5#%iZ_Y*7X<#2z+Vwyb*4^=zotS*ONqaw;@=VYdjdZoz~SFN68297{)WIm6Idbe zeSpyN<|lDl!Z<5SzJ#;=O3gi8sx{^IL_zh*Fd?gs(X6+7T46UE7QXT;%32@CmU{;= z0$muBTFXMQkta!cwO%NqQB3vQRr7@T$}Ir@zQe#fw?zy&fCk zMN7xchXR{PI*MbSgNFi@233a9Q$gHMLrs~arX1AXECmU^=rLDKRpH*~%BCGO;Hu}Q zy`?tW|LWd?c#xVVS1}W$mnnr+=l;`CK$4Mkx;rLbkh$qdnO`*$=x{(WQyWP_%st4! zflG`nec-J>Cuj$d6CN(+r%J+~AW(!JYx)zDjgoMRWxv)ek~6_F_P+mTAFea)wi`=< zeh3GFofLLU3i`$qAZ+}SsQ(Ut;D=rk{0Cu_K`L}@5lwO;r1!#(3;ChqI|MjWeHURY z`6TKv{*RS#TW!!Jq>NL9f&NYrHz9Q}%@Xbmsb@krI}Z&A+%UrthIcX?<)(9p#-BcWT(C^EJM<&>v^rF(x-G_&df`07q&@tC>j+Boc9zHg7^zi7Bl5=>n zG+91;S83F7hKHxz($S&f(Yx=ub7bV`T_eMzqet5N|6$B>$t;n)AO@x#ko!l*g-*uf z0*(T1+bR9T`5+yL_!-sP++y0@(5RIv^*^9^qV32=TG)S(mkE>jE|E0G- zu%il{JtldABrxpX!RH^)aR$=@TX3L7s}Ro+c$NSQGVc$qNcL{SKHNPwWN+z+(5705r(TQ5Zq1&RQRUV0SzaR`duiuS!(LAHaQN@9NV z=DnGDZ{BnE3v^3&wIM_11eR7$AQ);hYQ6A5U!Z8(N! z?3l+Hsy-otr?st4*2#8k$L{2u9FbZQ)qh5uJT<7BKif8OTXsMq9AC2nfJU2RFRRGSg_ax7B~^!z!NjxWRnZ3d1GoPPl4 zV#*QP4L+rB=Q(#(HYjiukZs;X;Qw&3|?KL8P z-h6Sgq$DZ~n=Fr;XkqlI=Y1rxWv-~#hRj;c5B#WB8v|i@lmFUW&k&vHvPmqN|J__8 z89tl&;^5nTE4HXwl=@ldHR$a~+EHRHl8)6l`-C*Xd}{nq?rV`2>2dau9FnT^&SpOs z8F4nwQL`x*r9*N$%A_M%>gW4Iv3_jE`41r!@z96TkCoVdc2tynvV|UculL@1OlYbsiVHhn2Kdnwv@KTrF;-$V)?z(2Vza6DN8%BPN{+8tH>Wdj zVwMGP3Xi(&?maIIyy!l28@nFkA6V0a6ZHCt36PK)Tg91XgmOP&g6PyyD;5LlH zR{$i{BEv+5G_i?^e|eG&50eYzikyd`P4em1u4WDwFET+6a*W1qd-B(ff@rUC#Z3t@`9S65(ITju#+l)Dc9SMH_3i3;;hG=S2%MD0RgSPlk> z+U~kk+#goJoy2x{W7s&t1j0FlHxZ@~UO;#WVFux4gbN545zv8b7U2@YWrR6t0D5&1 z_4KZiCc6U?$Hd^E^D&-a^8W}BTZD;qXyF2Y1TmRYECPQePsCrbAtZux8m*K@JeWF7 zF}^u8Pu}?WU}#O5hxjZ$ff5M)686H!<6G=C*qz-6IQb9!POv<)ukqiHyt96mF(1i^ zys*eNK~y41YVzGqD|{xMtXJERo>;T4|AN;>+jq_uvqrj1ocZ}xtO=rDqQVxyf!$-> zh8Knl!X1;yFY=MGxeH|xB&c+~wjZ(AL68`JNc|R!MC3;K_So``FQ7o(>G}by;$pnd ziQ%$V*kuvBfh-YIjBa9m{cg2(`>Xf9^7?(22A=<6Y8Hs zKsW~Ee*h^j2%O~bKaF3$oal8?OxTxE)f+;UsC6C_s@PrRe~Dk5D9>Q5N-}$Z91j62 z1iJ?C9cbbA`QF6$Ao)!G?}@9v3@kj2xcu8dz_URp0hFl>&jpE;SlUq!30nZ3784x7 zW1>*?Osa!)^GvEgmBL&Do(}PT@}HiY8Gu;?beurKrnOxR*#_{oN}11;Y>^tmt?W8X zNL@XT6y^hLh4q&S#?$l^XMG5Qzd#FZ014`xMaD#v`D>F46%4;PFNQ3FE3^ZTA)A6; zKE~4udxQMhScSKW|9tXdReQ!HyvJ!`VhWT7iFZ2 z!w>;Y5(?N4ka}v2pfE6r`D&+2_uAfdbbE-UOVz|HOl=0 and cls==0: + color = colors(int(cls), True) + elif tid >=0 and cls!=0: + color = colors(int(tid), True) + else: + color = colors(19, True) # 19为调色板的最后一个元素 + xyxy = (x1/2, y1/2, x2/2, y2/2) + annotator.box_label(xyxy, label, color=color) + + im0 = annotator.result() + + imgpairs.append((Path(imgpath).name, im0)) + + # spath = os.path.join(savepath, Path(imgpath).name) + + + # cv2.imwrite(spath, im0) + return imgpairs + + + def save_event_subimg(self, savepath): + ''' + 功能: 保存一次购物事件的轨迹子图 + 9 items: barcode, type, filepath, back_imgpaths, front_imgpaths, + back_boxes, front_boxes, back_feats, front_feats, + feats_compose, feats_select + 子图保存次序:先前摄、后后摄,以 k 为编号,和 "feats_compose" 中次序相同 + ''' + imgpairs = [] + cameras = ('front', 'back') + for camera in cameras: + boxes = np.empty((0, 9), dtype=np.float64) ##和类doTracks兼容 + if camera == 'front': + for b in self.front_boxes: + boxes = np.concatenate((boxes, b), axis=0) + imgpaths = self.front_imgpaths + else: + for b in self.back_boxes: + boxes = np.concatenate((boxes, b), axis=0) + imgpaths = self.back_imgpaths + + for i, box in enumerate(boxes): + x1, y1, x2, y2, tid, score, cls, fid, bid = box + + imgpath = imgpaths[int(fid-1)] + image = cv2.imread(imgpath) + + subimg = image[int(y1/2):int(y2/2), int(x1/2):int(x2/2), :] + + camerType, timeTamp, _, frameID = os.path.basename(imgpath).split('.')[0].split('_') + subimgName = f"cam{camerType}_{i}_tid{int(tid)}_fid({int(fid)}, {frameID}).png" + + imgpairs.append((subimgName, subimg)) + + # spath = os.path.join(savepath, subimgName) + + # cv2.imwrite(spath, subimg) + return imgpairs + # basename = os.path.basename(event['filepath']) + print(f"Image saved: {os.path.basename(self.eventpath)}") + + def draw_tracks(self): + front_edge = cv2.imread(r"D:\DetectTracking\tracking\shopcart\cart_tempt\board_ftmp_line.png") + back_edge = cv2.imread(r"D:\DetectTracking\tracking\shopcart\cart_tempt\edgeline.png") + + front_trackerboxes = array2list(self.front_trackerboxes) + back_trackerboxes = array2list(self.back_trackerboxes) + + # img1, img2 = edgeline.copy(), edgeline.copy() + img1 = drawTrack(front_trackerboxes, front_edge.copy()) + img2 = drawTrack(self.front_trackingboxes, front_edge.copy()) + + img3 = drawTrack(back_trackerboxes, back_edge.copy()) + img4 = drawTrack(self.back_trackingboxes, back_edge.copy()) + + + + imgcat1 = np.concatenate((img1, img2), axis = 1) + H, W = imgcat1.shape[:2] + cv2.line(imgcat1, (int(W/2), 0), (int(W/2), H), (128, 255, 128), 2) + + imgcat2 = np.concatenate((img3, img4), axis = 1) + H, W = imgcat2.shape[:2] + cv2.line(imgcat2, (int(W/2), 0), (int(W/2), H), (128, 255, 128), 2) + + + illus = [imgcat1, imgcat2] + if len(illus): + img_cat = np.concatenate(illus, axis = 1) + if len(illus)==2: + H, W = img_cat.shape[:2] + cv2.line(img_cat, (int(W/2), 0), (int(W/2), int(H)), (128, 128, 255), 3) + + return img_cat + + + + def main(): - pklpath = r"D:\DetectTracking\evtresult\images2\ShoppingDict.pkl" - evt = ShoppingEvent(pklpath, stype='pickle') + # pklpath = r"D:\DetectTracking\evtresult\images2\ShoppingDict.pkl" + # evt = ShoppingEvent(pklpath, stype='pickle') + + + + evtpath = r"\\192.168.1.28\share\测试视频数据以及日志\算法全流程测试\202412\images\20241209-160248-08edd5f6-1806-45ad-babf-7a4dd11cea60_6973226721445" + evt = ShoppingEvent(evtpath, stype='data') + + img_cat = evt.draw_tracks() + + cv2.imwrite("a.png", img_cat) + + + +# ============================================================================= +# def main1(): +# evtpaths = r"\\192.168.1.28\share\测试视频数据以及日志\算法全流程测试\202412\images" +# text1 = "one2n_Error.txt" +# text2 = "one2SN_Error.txt" +# events = [] +# text = (text1, text2) +# for txt in text: +# txtfile = os.path.join(evtpaths, txt) +# with open(txtfile, "r") as f: +# lines = f.readlines() +# for i, line in enumerate(lines): +# line = line.strip() +# if line: +# fpath=os.path.join(evtpaths, line) +# events.append(fpath) +# +# +# events = list(set(events)) +# +# '''定义当前事件存储地址及生成相应文件件''' +# resultPath = r"\\192.168.1.28\share\测试视频数据以及日志\算法全流程测试\202412\result" +# # eventDataPath = os.path.join(resultPath, "evtobjs") +# # subimgPath = os.path.join(resultPath, "subimgs") +# # imagePath = os.path.join(resultPath, "image") +# +# # if not os.path.exists(eventDataPath): +# # os.makedirs(eventDataPath) +# # if not os.path.exists(subimgPath): +# # os.makedirs(subimgPath) +# # if not os.path.exists(imagePath): +# # os.makedirs(imagePath) +# +# +# for evtpath in events: +# event = ShoppingEvent(evtpath) +# +# +# evtname = os.path.basename(evtpath) +# subimgpath = os.path.join(resultPath, f"{evtname}", "subimg") +# imgspath = os.path.join(resultPath, f"{evtname}", "imgs") +# if not os.path.exists(subimgpath): +# os.makedirs(subimgpath) +# if not os.path.exists(imgspath): +# os.makedirs(imgspath) +# +# subimgpairs = event.save_event_subimg(subimgpath) +# +# for subimgName, subimg in subimgpairs: +# spath = os.path.join(subimgpath, subimgName) +# cv2.imwrite(spath, subimg) +# +# imgpairs = event.plot_save_image(imgspath) +# for imgname, img in imgpairs: +# spath = os.path.join(imgspath, imgname) +# cv2.imwrite(spath, img) +# +# ============================================================================= + if __name__ == "__main__": main() + # main1() diff --git a/pipeline.py b/pipeline.py index 71fa31b..f3ef177 100644 --- a/pipeline.py +++ b/pipeline.py @@ -77,8 +77,13 @@ def pipeline( '''事件结果存储文件夹''' if not savepath: savepath = Path(__file__).resolve().parents[0] / "evtresult" + save_dir_event = Path(savepath) / evtname + pickpath = Path(savepath)/"pickfile" + if not pickpath.exists(): + pickpath.mkdir(parents=True, exist_ok=True) + ShoppingDict = {"eventPath": eventpath, "eventName": evtname, @@ -117,6 +122,8 @@ def pipeline( save_dir_video.mkdir(parents=True, exist_ok=True) + + '''Yolo + Resnet + Tracker''' optdict["source"] = vpath optdict["save_dir"] = save_dir_video @@ -162,14 +169,16 @@ def pipeline( # pklpath = save_dir_event / "ShoppingDict.pkl" # with open(str(pklpath), 'wb') as f: # pickle.dump(ShoppingDict, f) - pklpath = Path(savepath) / evtname+".pkl" - with open(str(pklpath), 'wb') as f: + pf_path = Path(pickpath) / Path(str(evtname)+".pkl") + with open(str(pf_path), 'wb') as f: pickle.dump(ShoppingDict, f) '''轨迹显示模块''' illus = [None, None] for CamerType, vts in event_tracks: + if len(vts.tracks)==0: continue + if CamerType == 'front': edgeline = cv2.imread("./tracking/shopcart/cart_tempt/board_ftmp_line.png") @@ -255,7 +264,7 @@ def main(): ''' 函数:pipeline(),遍历事件文件夹,选择类型 image 或 video, ''' - evtdir = r"\\192.168.1.28\share\测试视频数据以及日志\各模块测试记录\比对测试\1209永辉超市测试" + evtdir = r"\\192.168.1.28\share\测试视频数据以及日志\算法全流程测试\202412\images" evtdir = Path(evtdir) parmDict = {} @@ -263,25 +272,22 @@ def main(): parmDict["SourceType"] = "image" # video, image parmDict["stdfeat_path"] = None - k = 1 + k = 0 errEvents = [] for item in evtdir.iterdir(): if item.is_dir(): - item = r"D:\exhibition\images\images2\images2" - - + # item = r"D:\exhibition\images\images2\images2" parmDict["eventpath"] = item + pipeline(**parmDict) - - try: - pipeline(**parmDict) - except Exception as e: - errEvents.append(item) - - - # k+=1 - # if k==1: - # break + # try: + # pipeline(**parmDict) + # except Exception as e: + # errEvents.append(str(item)) + + k+=1 + if k==1: + break errfile = os.path.join(parmDict["savepath"], f'error_events.txt') with open(errfile, 'w', encoding='utf-8') as f: diff --git a/shopper.py b/shopper.py new file mode 100644 index 0000000..7e8bdd4 --- /dev/null +++ b/shopper.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Dec 17 10:45:10 2024 + +@author: ym +""" +import os +import numpy as np +import pandas as pd +from pathlib import Path + + + + + +xpath = r"\\192.168.1.28\share\模型\部署相关\永辉金源广场店商品信息.xlsx" +xroot, xfilename = os.path.split(xpath) +xfile, ext = os.path.splitext(xfilename) +spath = os.path.join(xroot, xfile+'_diff.xlsx') + + +df = pd.read_excel(xpath) +barcodes = df["商品条码"].tolist() +names_caojq = df["商品名称"].tolist() + +stdpath = r"\\192.168.1.28\share\数据\已完成数据\比对数据\barcode\all_totalBarocde\totalBarcode" + +stdpath = Path(stdpath) +stdBarcodes = [int(f.stem) for f in stdpath.iterdir() if f.is_dir() and f.stem.isdigit() and len(f.stem)>=8] + + +barcodes_s = set(barcodes) +stdBarcodes_s = set(stdBarcodes) + +A = barcodes_s - stdBarcodes_s + +record_bcd, record_name = [], [] +for bcd in A: + if np.isnan(bcd): continue + try: + index = barcodes.index(bcd) + name = names_caojq[index] + + record_bcd.append(bcd) + record_name.append(name) + except ValueError: + print(f"元素 {bcd} 不在列表中") + +df_save = pd.DataFrame({ + '商品条码': record_bcd, + '商品名称': record_name +}) + +df.to_excel(spath, index=False) + +print("Done") + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/tracking/utils/__pycache__/drawtracks.cpython-39.pyc b/tracking/utils/__pycache__/drawtracks.cpython-39.pyc index d1e5e1e46fce79aa4c628d66ebbb8e88231bc5a4..bccf31be23888fa3c4d47df21246c99b74edfc25 100644 GIT binary patch delta 2134 zcmaJ?O>h)N6yBNHncZYJ5eWeykYBM}3X#nQ4FN^@35fhep-O46mdWlUnPhfmc{52S z7-|bq5GYT5KrO2Lsj`rR2TghNV0rSu!IOvauI6A_UO3xdPiBpjaSmU7@AdcI>+X5o z{r%{lqwQu}TWd;u`*P;w@bK5|M|6epv-)@I_6jGB0CIqRyw^CLW%@DGt@@158@JnD z5PoLV%y7c4GX7X*WIxlVFifLjBvR~156G;{&`~5f26zRqUjR#sAfI-@mN3ZQ%hZ)~ zOAj+uML!P(FSm>t$V7+vw=J)4L2Q(724@FsqAarEM~p0b=Q_RA=2#H zWTe%Dsek3a)_P2t;}2SY-z%q=Jxk{FsAUEPjdr67QW4@;N5L!!xC=7G|HwV1?BN^o zJ9butBhpPGbAb+kXZ_jny5j{t9f$TLd})8mnFQ1R zD*udfb?KMWxPPFfpGWK!*6c>u0*Jts(g_mb7*3UsLi^Q*vmnEI&&LKoPLq z0u}=L0XqQ00t$-gkY78T#*6S^L+EY55rF)OvI(YO$|gWNMA^FMp9##sV)_KRvo*Q_ zuTG{rX_4Gv!*&CU>QT1hxpST~XVLpY!qP93WwM;@R!kB-7Fa$VLh7*w5PMIj0H*;b z1QfQ({~KxORrFp_;$ZwMG&A%gW5y!aa!78u>;pOF=4|Pf$P=kyF;W+bkrozgd%#p^OkaFY2C)IbmIc1)`a_-+}{}B z66T0sk`ozGMY$c><1a{&CPpZ~#^marj+004q7@<5a4V0yaG|1|TF-j#^0! zgmI>okY)v0a8=f3UF2APki;blxgxn3WmBYAgN3?PQ0YSTQ*?!0`>?d6ZygMPqgbXI6l#ro>>`BOT33)yt!{XJbhV(cluhlDNl*WEU+MG!d zf2X&{7{uCU_`Tjs$_(G!x9#Ala0IDErK(ib57aB_V(M_}t*5VOfwHIt>a=!ErJalF zr|CuQGet>VS{L(ay=fWzVqZ@qLuq8b2#{A^#;wX{g|*{332b+=ESA6Wj7A1o{;U%e z(($c|zY(>VuIq_w)T94=zs7}TTJ)sfE=Wqvt63$l8}Eep;Q#;t delta 2110 zcmaJ?O>7%g5cckR*Um4Y`6)@7e~RRYMs=K|`GW>pLI|`CZ6t)%YV~4ol6BU*&g>@H zpacUcNE8r-Ck{vm!2!{NDghA^AP$HVhaz!8;&DghfVlO{%yS&vir~YinQy+CH}84g z+rOuOn677ab=4944gVHh`uWtC^-t)E;Pd+T4dY0{1VIhKB=0sZ#DzY?+^jG7hVe$- zqsXU?JPQ}?tl;;>#wUfoOksEmBSNu-9stX%z$_Jbn&26NM<^P_Bp(LImyqJG#tMqL zdncAvbVdqpR?Qhy37+I%S6ytQToJSL8(;^P(EI=r^)N4$J=Lw)^+feR+ZFOPJHI5f zLa=;S{hjI=rO0nr|2Zbxi_ZhgOSx+NEzWxwfzt0 zkPtcpV4E;ScHCcEC^%l=!#UE<5f}BBy$$eruD0{|1`+xqF6hH^WUDNM7fD)q2)Ala zDA#s>uYFDLlTBT|M?fz1^CKC<{OXaF&NmN{{8WnwTkqs{}Iy@=ugQ;E^kbfW|?@3!Z-|VEA|Hi{P&1;X1K8 zFn7rUdBTS61{M@VJmb0Rp0jSjJ0yud1{M=FwwqyqGZt7roTbv2iiC9b@GQYaf(rHxJ3lPehv6M5-COq4;V_6*(j0ngG3}qSZZWH zMXIHOtct27*?x_^+vU8nLheXzMmZ|Sj-|jt3`Jy#?xK>@=_;yOON9=vS}mZIUa(2FaH< zG#zu;g5{Ve#lDd&8-rydRW^po#&FpfDI2HK`{|?j)yRf$KQ*rvGE7A2JcPE+K=4<( zy9{~nfPdP(ssL~8>FB4?3?f@fR>`VwsoUySjiL5rDr}40- zs~CkS)vl*+L}0s1Chlqf+b9gM{52;?L`zR49kHlluIu63=)r$}lx)=GaM1i}G$LxP z8dvI-xDLdNxM}8@?V4sHwnX7bq=}#C9X8}3=J|4OQlCOn@DF>RPnc+dd^fL=b%Ov1 Y%B5VEw@educy}MYVPkzMts$=d1H3;9*#H0l diff --git a/tracking/utils/__pycache__/read_data.cpython-39.pyc b/tracking/utils/__pycache__/read_data.cpython-39.pyc index 3bc17c616271afbe93469b93c820499e8cd35560..00f91e8cebcb3006527a12f583ed351d16d8c0c8 100644 GIT binary patch delta 3311 zcmbtWYitzP6`ngYJNxzvzt&(d#t5%rjd$Z`3Z)QtBUJ3tt}BY-WW4Om+FQJ{=I#JC zoy|6wv<(DsdW-l`TG^4(gs24?t5G6EsnnOMO8pt=RTULfTkfBz>c3W0>Nzv(wXvxB zqdVGf&N=tod%ttf+;i_gUztxEsZMVtO|ypw^IS@2G%jVdY1#l>I7xGIqU4(~#+`YcuAr*v$>ca^FI6m& zF?O)BSyTuct9-EMGgF$E$AtwlPS&Kfm=I92$g15EL1Hge?W=FbxVIu~Ly!Q5smuLjrwx{VTF6rnMr*rX-?2M zT3@ui&_;)Bk=<#oWp}F&$&DhNSR^&~iGLj-_J>W0dYps1sXjHCOzAg8>cxSE*ul02 z_NCfm4XvmWgdOOpB=l2*8OqtmwR<{{Bll80sZC}K$agKe*>r8<$kAaNW0iGv%5l_> zu#UPT3T6N%vFfgq^NY3hP0~}e4z|So8T7~^2BvbOkk zATgl`E%xGLGQ!n#8eN^BPN9KsC3HxVp^0s!~fjMd3Z-sE1SZHxA&Xy=N;=mPx~ z!fWU+ok(fag{lpHkl0_EisTX-+x+|gxmq{2?DntA9K+8o(AFtKexGBF%`YVE&uc@h zc33{D(8``!cp7vP`& zC9B-pP#hO5qDlJ%Q?LY0)?Au$S(+2CzFH71ah@y)1xa&jo(qB{4GOcQ0NV{8Nf3pC ztDqFzns-zxcnaQv&lD|rmJAAWYsSb=#7M!>hM`4$MhVg)oo^S>^Jz?u;X zL8}*KYnIR-TP~nNfQrjfK>yHEKo@P@4f<~_w?p;6Q#h4|sT3`+K(ydV7k#2t ztdc9WR5~qlgf?nb*j7a5rN-)VW4BhbuA>LszY7DqF$OId66~ld%9c%Faci~Eh7v|x zX;oN3tq!Prtjbvea{WHRa?N|K-~u56%;$wM{|qA(|KCBCb%XjS#_v#BrCC+BtdXk% zp9Y}o4++IWH7s}aEYTWI3UeZD1#VTqHw1jE*J4nRdU;nBW3A|M z*`CO*>sa-i`gf5%*uHZJOSC{_;=xC(0jwZZqK}~DBLO0dGO5BxssddUD-c01qpX1M z7Vr)bAB(o`Qff#{qzj9qtqF2?UD|Noa;@ZwQ~2W~>>MCB7jrxRM4Y=r2OHk?26@CD?wSm}kB&bW<}{a5URj>F^=_N3#B zU%82Xp8$kq?!~26?aR{Bj5Zvza218irdrB75#ijzO)J3nM;|VJZ+8#r#j8-*LzD30 zZamVp45cSflfRM67&HYUwR1g&(nA0s!2Z1_wD|!V?By=6IhV7^jG>4u8fhV)vad$= zlY49~vbDJK?zjK;_FK!>?kr!v_1C-aEM2|0eEprJ``1U8<}NJ1x3KiX>&rjCv~v5} z%B?#~_pXgD-~Rs6?T;PPXf)O}uzcsEm0!HE^5N@CADu6mV_mUsY>GsCdInCs4ce{XNGzqh}qyFb~h_722)yZd^e-#~BQXlZ3SwWOI0 z!;Zp;zJMW$+?B@lbt;qwSBDLOx-iga11_lo9h~h=AA!U@s)~)O8V+%{s%p8Ex=H0O z<8*dv#{Q(ZNOhWIH#$Fe=pJ;Sc(I@%gfWB(g!2fO5bz%2Fe`rob{goX*zkyj_9lWu Qo>tH2JuXkRNAZyV0Ggv&QUCw| delta 2924 zcmai0U2NOd73QTVilY9k*omV!?qp|9%-EJC`6o;3HcjHV$kr}i?bRFWMxjMYW-L+8 zC0oZXHMg^iZa|9$yFh~VVY7juK>9PRb2N9bo?<-cFekAJ6e6&R@rY7!;7kb176s)bmJzN z0H#z?MXN#U@J_T$l>*CRh1M@m+Fqz)>R!myZ_*>+i$^LRS!n5+Ry9~Rm@m+l}x5d$aA% zdIrfZ044764cilzHD}QiO}k>&465L00bvkf2qB4(0`LUeTr+4M`C$M>^dy{x7U(&Y zJ__IozNJZ@)HO$=VI1-y#<5?eF9{tyo!f~X`jDg!cn3;T?A-ASEYp!Vf(v^QYAn~Z zvT>KEQ8Y2h3f-M-rem%%i!4Ft0r12b(RrXQG;3ah2i&8_=Yo-O8-;Qd241P4| zco0GkDU3@=260Sd9N$3cMOHlgxQN>Wi*6piLzZ`rA2}AfFPY~vQ?c)$#yrAR-?zJ& z^XnyKuK{>r<2l1}Ji%FAHmHUYaysxj3(Q)m@Sx@UPy2zTWdJos4d+>EJ=SRGHeEom z4lqkk0sfaftVDx-F>u5WY=4LRz;fIojQBg`N0z1)R1Jbcs)hhM9IYal2-gvo06f|E zxKuN3M~$M}KIwm_6gQOC5iEoT`{S|OWQ8qu{q{@K^0VUuu`Esswv3(6bg;I*oB$g; z%(C4VXZ(-tGePLA$ zbobW=gapq;c!7GQs(8eu8s2m(xSKpod=#-6F63xyify7TM;SAzjoX zo17b(W5m=-}t^mvBzU5oEY+CUP~(=dhozEkoQXh%dE4^utS#D!K_~l3@A>m`?34 zx9{fO!U>Rd1~x$NDCmp(t^_#*d){sq~w z2#*&=!lDOS5`3tit8wx)yWTUf&QDFz5PU>b08k~EI@`En8U7cp@Uv5@=!-0cpPCJd zJd4`c9Mp(y*3FtmgSyAkrZ*5A1PY+YUsmSpsEQRrBM4aVN}FHnsx$-Ezo1goIK~H0 zP~I|fR;$#f;E*X5oJK*7?vmDi)q8?(&Tnj3ThIU?YWf`esJCxS1p!9{A`yu{1ehc; zFA=0<5+?#LkT&?s!1H_(5i}$=a^jRw^7PS$<0i@e6po;*u7ckGj&5YazcPq8Qae@H%NpZ3=>FQFGN zBeYIGpyI&(foVz~+*4jw769)lb5~bEs*oV%_v`)@5*+~sAg3SHKDIsT78cdSK#&oR|wu(QCV%EKR7mjz+NBdBJZ*f z1}>Agel&QgcN5oq3qTRPDCW83UmFhi1D{g|dwwvU`X_{JV_k9Zt!DlhSV?CtPos7JU*(>hI_kq}~Bg1eK9ohP(}y+ap3N=u@|{~sB|!4Lod diff --git a/tracking/utils/drawtracks.py b/tracking/utils/drawtracks.py index 024c22b..d9ba04e 100644 --- a/tracking/utils/drawtracks.py +++ b/tracking/utils/drawtracks.py @@ -364,7 +364,10 @@ def drawTrack(tracks, img): annotator = TrackAnnotator(img, line_width=2) for track in tracks: - annotator.plotting_track(track.boxes) + if isinstance(track, np.ndarray): + annotator.plotting_track(track) + else: + annotator.plotting_track(track.boxes) img = annotator.result() # pth = save_dir.joinpath(f"{filename}") diff --git a/tracking/utils/read_data.py b/tracking/utils/read_data.py index 36b05a6..8996f72 100644 --- a/tracking/utils/read_data.py +++ b/tracking/utils/read_data.py @@ -202,9 +202,9 @@ def read_tracking_output(filepath): if len(feats) != len(boxes): - return np.array([]), np.array([]) + return [np.array([])], [np.array([])] - return np.array(boxes), np.array(feats) + return [np.array(boxes)], [np.array(feats)] def read_deletedBarcode_file(filePath): @@ -408,7 +408,8 @@ def read_similar(filePath): if len(one2one_list): SimiDict['one2one'] = one2one_list if len(one2n_list): SimiDict['one2n'] = one2n_list - + if len(one2SN_list): SimiDict['one2SN'] = one2SN_list + return SimiDict @@ -532,13 +533,12 @@ def main(): break def main1(): - fpath = r'\\192.168.1.28\share\测试_202406\1101\images\20241101-140456-44dc75b5-c406-4cb2-8317-c4660bb727a3_6922130101355_6922130101355\process.data' - simidct = read_one2one_simi(fpath) + fpath = r'\\192.168.1.28\share\测试视频数据以及日志\各模块测试记录\比对测试\1209永辉超市测试\20241209-155924-117e1941-70f8-4287-8de1-4866868548a6_6926475209967\process.data' + simidct = read_similar(fpath) print(simidct) if __name__ == "__main__": - # main() main1() @@ -552,3 +552,4 @@ if __name__ == "__main__": +