1:n modified
This commit is contained in:
Binary file not shown.
Binary file not shown.
@ -61,9 +61,9 @@ class Config:
|
|||||||
test_val = "D:/比对/cl"
|
test_val = "D:/比对/cl"
|
||||||
# test_val = "./data/test_data_100"
|
# test_val = "./data/test_data_100"
|
||||||
|
|
||||||
# test_model = "checkpoints/best_resnet18_v12.pth"
|
test_model = "checkpoints/best_20250228.pth"
|
||||||
# test_model = "checkpoints/zhanting_res_801.pth"
|
# test_model = "checkpoints/zhanting_res_801.pth"
|
||||||
test_model = "checkpoints/zhanting_res_abroad_8021.pth"
|
# test_model = "checkpoints/zhanting_res_abroad_8021.pth"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
58
contrast/feat_infer.py
Normal file
58
contrast/feat_infer.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Created on Fri Feb 28 16:27:17 2025
|
||||||
|
|
||||||
|
@author: ym
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import pickle
|
||||||
|
import numpy as np
|
||||||
|
from PIL import Image
|
||||||
|
from scipy.spatial.distance import cdist
|
||||||
|
from feat_extract.config import config as conf
|
||||||
|
from feat_extract.inference import FeatsInterface #, inference_image
|
||||||
|
|
||||||
|
Encoder = FeatsInterface(conf)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
imgpaths = r"D:\全实时\202502\result\Yolos_Tracking\20250228-160049-188_6921168558018_6921168558018\a"
|
||||||
|
featDict = {}
|
||||||
|
imgs, imgfiles = [], []
|
||||||
|
for filename in os.listdir(imgpaths):
|
||||||
|
file, ext = os.path.splitext(filename)
|
||||||
|
|
||||||
|
imgpath = os.path.join(imgpaths, filename)
|
||||||
|
img = Image.open(imgpath)
|
||||||
|
|
||||||
|
imgs.append(img)
|
||||||
|
imgfiles.append(filename)
|
||||||
|
|
||||||
|
feature = Encoder.inference([img])
|
||||||
|
feature /= np.linalg.norm(feature, axis=1)[:, None]
|
||||||
|
feature_ft32 = feature.astype(np.float32)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
featDict[file] = feature_ft32
|
||||||
|
|
||||||
|
feature = Encoder.inference(imgs)
|
||||||
|
feature /= np.linalg.norm(feature, axis=1)[:, None]
|
||||||
|
feature_ft32 = feature.astype(np.float32)
|
||||||
|
|
||||||
|
|
||||||
|
matrix = 1 - cdist(feature, feature, 'cosine')
|
||||||
|
|
||||||
|
print("do")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -6,6 +6,7 @@ Created on Wed Dec 18 11:49:01 2024
|
|||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
|
import copy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
@ -17,20 +18,23 @@ def init_eventdict(sourcePath, stype="data"):
|
|||||||
'''stype: str,
|
'''stype: str,
|
||||||
'source': 由 videos 或 images 生成的 pickle 文件
|
'source': 由 videos 或 images 生成的 pickle 文件
|
||||||
'data': 从 data 文件中读取的现场运行数据
|
'data': 从 data 文件中读取的现场运行数据
|
||||||
|
"realtime": 全实时数据,从 data 文件中读取的现场运行数据
|
||||||
'''
|
'''
|
||||||
|
|
||||||
k, errEvents = 0, []
|
k, errEvents = 0, []
|
||||||
for bname in os.listdir(sourcePath):
|
for bname in os.listdir(sourcePath):
|
||||||
# bname = r"20241126-135911-bdf91cf9-3e9a-426d-94e8-ddf92238e175_6923555210479"
|
# bname = r"20241126-135911-bdf91cf9-3e9a-426d-94e8-ddf92238e175_6923555210479"
|
||||||
|
|
||||||
source_path = os.path.join(sourcePath, bname)
|
source_path = os.path.join(sourcePath, bname)
|
||||||
if stype=="data" or stype=="realtime":
|
if stype=="source" and not os.path.isfile(source_path): continue
|
||||||
|
if stype=="data" and os.path.isfile(source_path): continue
|
||||||
|
if stype=="realtime" and os.path.isfile(source_path): continue
|
||||||
|
|
||||||
|
if os.path.isdir(source_path):
|
||||||
pickpath = os.path.join(eventDataPath, f"{bname}.pickle")
|
pickpath = os.path.join(eventDataPath, f"{bname}.pickle")
|
||||||
if not os.path.isdir(source_path) or os.path.isfile(pickpath):
|
else:
|
||||||
continue
|
|
||||||
if stype=="source":
|
|
||||||
pickpath = os.path.join(eventDataPath, bname)
|
pickpath = os.path.join(eventDataPath, bname)
|
||||||
if not os.path.isfile(source_path) or os.path.isfile(pickpath):
|
|
||||||
|
if os.path.isfile(pickpath):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
evt = os.path.splitext(os.path.split(pickpath)[-1])[0].split('_')
|
evt = os.path.splitext(os.path.split(pickpath)[-1])[0].split('_')
|
||||||
@ -38,23 +42,23 @@ def init_eventdict(sourcePath, stype="data"):
|
|||||||
if not cont:
|
if not cont:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# event = ShoppingEvent(source_path, stype)
|
||||||
try:
|
try:
|
||||||
event = ShoppingEvent(source_path, stype)
|
event = ShoppingEvent(source_path, stype)
|
||||||
|
|
||||||
with open(pickpath, 'wb') as f:
|
with open(pickpath, 'wb') as f:
|
||||||
pickle.dump(event, f)
|
pickle.dump(event, f)
|
||||||
print(bname)
|
print(bname)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
errEvents.append(source_path)
|
errEvents.append(source_path)
|
||||||
print(e)
|
print(f"Error: {bname}, {e}")
|
||||||
# k += 1
|
# k += 1
|
||||||
# if k==1:
|
# if k==1:
|
||||||
# break
|
# break
|
||||||
|
|
||||||
# errfile = os.path.join(resultPath, 'error_events.txt')
|
errfile = os.path.join(resultPath, 'error_events.txt')
|
||||||
# with open(errfile, 'a', encoding='utf-8') as f:
|
with open(errfile, 'a', encoding='utf-8') as f:
|
||||||
# for line in errEvents:
|
for line in errEvents:
|
||||||
# f.write(line + '\n')
|
f.write(line + '\n')
|
||||||
|
|
||||||
def read_eventdict(eventDataPath):
|
def read_eventdict(eventDataPath):
|
||||||
evtDict = {}
|
evtDict = {}
|
||||||
@ -70,7 +74,8 @@ def read_eventdict(eventDataPath):
|
|||||||
|
|
||||||
return evtDict
|
return evtDict
|
||||||
|
|
||||||
def simi_calc(event, o2nevt, typee=None):
|
def simi_calc(event, o2nevt, pattern, typee=None):
|
||||||
|
if pattern==1 or pattern==2:
|
||||||
if typee == "11":
|
if typee == "11":
|
||||||
boxes1 = event.front_boxes
|
boxes1 = event.front_boxes
|
||||||
boxes2 = o2nevt.front_boxes
|
boxes2 = o2nevt.front_boxes
|
||||||
@ -97,11 +102,10 @@ def simi_calc(event, o2nevt, typee=None):
|
|||||||
feat2 = o2nevt.front_feats
|
feat2 = o2nevt.front_feats
|
||||||
|
|
||||||
'''自定义事件特征选择'''
|
'''自定义事件特征选择'''
|
||||||
if typee==3 and len(event.feats_compose) and len(o2nevt.feats_compose):
|
if pattern==3 and len(event.feats_compose) and len(o2nevt.feats_compose):
|
||||||
feat1 = [event.feats_compose]
|
feat1 = [event.feats_compose]
|
||||||
feat2 = [o2nevt.feats_compose]
|
feat2 = [o2nevt.feats_compose]
|
||||||
|
|
||||||
|
|
||||||
if len(feat1) and len(feat2):
|
if len(feat1) and len(feat2):
|
||||||
matrix = 1 - cdist(feat1[0], feat2[0], 'cosine')
|
matrix = 1 - cdist(feat1[0], feat2[0], 'cosine')
|
||||||
simi = np.mean(matrix)
|
simi = np.mean(matrix)
|
||||||
@ -114,48 +118,51 @@ def one2n_pr(evtDicts, pattern=1):
|
|||||||
'''
|
'''
|
||||||
pattern:
|
pattern:
|
||||||
1: process.data 中记录的相似度
|
1: process.data 中记录的相似度
|
||||||
2: 根据 process.data 中标记的 type 选择特征计算相似度
|
2: 根据 process.data 中标记的 type 选择特征组合方式计算相似度
|
||||||
3: 以其它方式选择特征计算相似度
|
3: 利用 process.data 中的轨迹特征,以其它方式计算相似度
|
||||||
'''
|
'''
|
||||||
|
|
||||||
tpevents, fnevents, fpevents, tnevents = [], [], [], []
|
tpevents, fnevents, fpevents, tnevents = [], [], [], []
|
||||||
tpsimi, fnsimi, tnsimi, fpsimi = [], [], [], []
|
tpsimi, fnsimi, tnsimi, fpsimi = [], [], [], []
|
||||||
one2nFile, errorFile_one2n = [], []
|
one2nFile, errorFile_one2n = [], []
|
||||||
|
errorFile_one2n_ = []
|
||||||
|
evts_output = []
|
||||||
for evtname, event in evtDicts.items():
|
for evtname, event in evtDicts.items():
|
||||||
evt_names, evt_barcodes, evt_similars, evt_types = [], [], [], []
|
evt_names, evt_barcodes, evt_similars, evt_types = [], [], [], []
|
||||||
|
|
||||||
if len(event.barcode)==0:
|
if len(event.one2n)==0 or len(event.barcode)==0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
evts_output.append(evtname)
|
||||||
|
|
||||||
for ndict in event.one2n:
|
for ndict in event.one2n:
|
||||||
nname = ndict["event"]
|
nname = ndict["event"]
|
||||||
barcode = ndict["barcode"]
|
barcode = ndict["barcode"]
|
||||||
similar = ndict["similar"]
|
similar = ndict["similar"]
|
||||||
typee = ndict["type"].strip()
|
typee = ndict["type"].strip()
|
||||||
|
|
||||||
|
if len(barcode)==0:
|
||||||
|
continue
|
||||||
|
if typee.find(",") >=0:
|
||||||
|
typee = typee.split(",")[-1]
|
||||||
|
|
||||||
|
if pattern==1:
|
||||||
|
evt_similars.append(similar)
|
||||||
|
if pattern==2 or pattern==3:
|
||||||
|
o2n_evt = [evt for name, evt in evtDicts.items() if name.find(nname[:15])==0]
|
||||||
|
if len(o2n_evt)!=1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
simival = simi_calc(event, o2n_evt[0], pattern, typee)
|
||||||
|
if simival==None:
|
||||||
|
continue
|
||||||
|
evt_similars.append(simival)
|
||||||
|
|
||||||
evt_names.append(nname)
|
evt_names.append(nname)
|
||||||
evt_barcodes.append(barcode)
|
evt_barcodes.append(barcode)
|
||||||
evt_types.append(typee)
|
evt_types.append(typee)
|
||||||
|
|
||||||
if pattern==1:
|
# if evtname == "20250226-170321-327_6903244678377":
|
||||||
evt_similars.append(similar)
|
# print("evtname")
|
||||||
|
|
||||||
if pattern==2 or pattern==3:
|
|
||||||
o2n_evt = [evt for name, evt in evtDicts.items() if name.find(nname[:15])==0]
|
|
||||||
if len(o2n_evt)==1:
|
|
||||||
o2nevt = o2n_evt[0]
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if pattern==2:
|
|
||||||
simival = simi_calc(event, o2nevt, typee)
|
|
||||||
|
|
||||||
if pattern==3:
|
|
||||||
simival = simi_calc(event, o2nevt, typee=pattern)
|
|
||||||
|
|
||||||
if simival==None:
|
|
||||||
continue
|
|
||||||
evt_similars.append(simival)
|
|
||||||
|
|
||||||
## process.data的oneTon的各项中,均不包括当前事件的barcode
|
## process.data的oneTon的各项中,均不包括当前事件的barcode
|
||||||
if event.barcode not in evt_barcodes:
|
if event.barcode not in evt_barcodes:
|
||||||
@ -164,14 +171,11 @@ def one2n_pr(evtDicts, pattern=1):
|
|||||||
else:
|
else:
|
||||||
one2nFile.append(evtname)
|
one2nFile.append(evtname)
|
||||||
|
|
||||||
if len(evt_names)==len(evt_barcodes) and len(evt_barcodes)==len(evt_similars) \
|
if len(evt_names)==len(evt_barcodes)==len(evt_similars)==len(evt_types) and len(evt_names)>0:
|
||||||
and len(evt_similars)==len(evt_types) and len(evt_names)>0:
|
|
||||||
|
|
||||||
# maxsim = evt_similars[evt_similars.index(max(evt_similars))]
|
# maxsim = evt_similars[evt_similars.index(max(evt_similars))]
|
||||||
maxsim = max(evt_similars)
|
maxsim = max(evt_similars)
|
||||||
for i in range(len(evt_names)):
|
for i in range(len(evt_names)):
|
||||||
bcd, simi = evt_barcodes[i], evt_similars[i]
|
bcd, simi = evt_barcodes[i], evt_similars[i]
|
||||||
|
|
||||||
if bcd==event.barcode and simi==maxsim:
|
if bcd==event.barcode and simi==maxsim:
|
||||||
tpsimi.append(simi)
|
tpsimi.append(simi)
|
||||||
tpevents.append(evtname)
|
tpevents.append(evtname)
|
||||||
@ -185,14 +189,11 @@ def one2n_pr(evtDicts, pattern=1):
|
|||||||
fpsimi.append(simi)
|
fpsimi.append(simi)
|
||||||
fpevents.append(evtname)
|
fpevents.append(evtname)
|
||||||
else:
|
else:
|
||||||
errorFile_one2n.append(evtname)
|
errorFile_one2n_.append(evtname)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
''' 1:n 数据存储,需根据相似度排序'''
|
''' 1:n 数据存储,需根据相似度排序'''
|
||||||
PPrecise, PRecall = [], []
|
PPrecise, PRecall = [], []
|
||||||
NPrecise, NRecall = [], []
|
NPrecise, NRecall = [], []
|
||||||
|
|
||||||
Thresh = np.linspace(-0.2, 1, 100)
|
Thresh = np.linspace(-0.2, 1, 100)
|
||||||
for th in Thresh:
|
for th in Thresh:
|
||||||
'''============================= 1:n 计算'''
|
'''============================= 1:n 计算'''
|
||||||
@ -202,9 +203,9 @@ def one2n_pr(evtDicts, pattern=1):
|
|||||||
TN = sum(np.array(tnsimi) < th)
|
TN = sum(np.array(tnsimi) < th)
|
||||||
|
|
||||||
PPrecise.append(TP/(TP+FP+1e-6))
|
PPrecise.append(TP/(TP+FP+1e-6))
|
||||||
PRecall.append(TP/(len(one2nFile)+1e-6))
|
PRecall.append(TP/(TP+FN+1e-6))
|
||||||
NPrecise.append(TN/(TN+FN+1e-6))
|
NPrecise.append(TN/(TN+FN+1e-6))
|
||||||
NRecall.append(TN/(len(tnsimi)+len(fpsimi)+1e-6))
|
NRecall.append(TN/(TN+FP+1e-6))
|
||||||
|
|
||||||
|
|
||||||
'''4. ============================= 1:n 曲线,'''
|
'''4. ============================= 1:n 曲线,'''
|
||||||
@ -239,10 +240,8 @@ def one2n_pr(evtDicts, pattern=1):
|
|||||||
return fpevents
|
return fpevents
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
'''1. 生成事件字典并保存至 eventDataPath, 只需运行一次 '''
|
'''1. 生成事件字典并保存至 eventDataPath, 只需运行一次 '''
|
||||||
init_eventdict(eventSourcePath, stype="source") # 'source', 'data', 'realtime'
|
init_eventdict(eventSourcePath, stype="realtime") # 'source', 'data', 'realtime'
|
||||||
|
|
||||||
|
|
||||||
# for pfile in os.listdir(eventDataPath):
|
# for pfile in os.listdir(eventDataPath):
|
||||||
# evt = os.path.splitext(pfile)[0].split('_')
|
# evt = os.path.splitext(pfile)[0].split('_')
|
||||||
@ -250,13 +249,12 @@ def main():
|
|||||||
# if not cont:
|
# if not cont:
|
||||||
# continue
|
# continue
|
||||||
|
|
||||||
|
|
||||||
'''2. 读取事件字典 '''
|
'''2. 读取事件字典 '''
|
||||||
evtDicts = read_eventdict(eventDataPath)
|
evtDicts = read_eventdict(eventDataPath)
|
||||||
|
|
||||||
|
|
||||||
'''3. 1:n 比对事件评估 '''
|
'''3. 1:n 比对事件评估 '''
|
||||||
fpevents = one2n_pr(evtDicts, pattern=2)
|
fpevents = one2n_pr(evtDicts, pattern=1)
|
||||||
|
|
||||||
fpErrFile = str(Path(resultPath).joinpath("one2n_fp_Error.txt"))
|
fpErrFile = str(Path(resultPath).joinpath("one2n_fp_Error.txt"))
|
||||||
with open(fpErrFile, "w") as file:
|
with open(fpErrFile, "w") as file:
|
||||||
@ -266,10 +264,10 @@ def main():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
eventSourcePath = r"\\192.168.1.28\share\测试视频数据以及日志\全实时测试\result_V12\ShoppingDict_pkfile"
|
eventSourcePath = r"\\192.168.1.28\share\测试视频数据以及日志\全实时测试\V12\2025-2-27"
|
||||||
resultPath = r"\\192.168.1.28\share\测试视频数据以及日志\全实时测试\testing"
|
resultPath = r"\\192.168.1.28\share\测试视频数据以及日志\全实时测试\testing"
|
||||||
|
|
||||||
eventDataPath = os.path.join(resultPath, "evtobjs_data")
|
eventDataPath = os.path.join(resultPath, "evtobjs_wang")
|
||||||
if not os.path.exists(eventDataPath):
|
if not os.path.exists(eventDataPath):
|
||||||
os.makedirs(eventDataPath)
|
os.makedirs(eventDataPath)
|
||||||
|
|
||||||
|
@ -507,7 +507,7 @@ def contrast_pr(paths):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
evtpaths = r"\\192.168.1.28\share\测试视频数据以及日志\全实时测试\V12\2025-2-21\比对\video"
|
evtpaths = r"\\192.168.1.28\share\测试视频数据以及日志\全实时测试\V12\2025-2-26_2"
|
||||||
contrast_pr(evtpaths)
|
contrast_pr(evtpaths)
|
||||||
|
|
||||||
|
|
||||||
|
@ -266,16 +266,16 @@ def main():
|
|||||||
函数:pipeline(),遍历事件文件夹,选择类型 image 或 video,
|
函数:pipeline(),遍历事件文件夹,选择类型 image 或 video,
|
||||||
'''
|
'''
|
||||||
parmDict = {}
|
parmDict = {}
|
||||||
evtdir = r"\\192.168.1.28\share\测试视频数据以及日志\全实时测试\V12\2025-2-21\比对\video"
|
evtdir = r"D:\全实时\202502"
|
||||||
parmDict["SourceType"] = "video" # video, image
|
parmDict["SourceType"] = "video" # video, image
|
||||||
parmDict["savepath"] = r"\\192.168.1.28\share\测试视频数据以及日志\全实时测试\result_V12"
|
parmDict["savepath"] = r"D:\全实时\202502\result"
|
||||||
parmDict["weights"] = r'D:\DetectTracking\ckpts\best_cls10_0906.pt'
|
parmDict["weights"] = r'D:\DetectTracking\ckpts\best_cls10_0906.pt'
|
||||||
|
|
||||||
evtdir = Path(evtdir)
|
evtdir = Path(evtdir)
|
||||||
k, errEvents = 0, []
|
k, errEvents = 0, []
|
||||||
for item in evtdir.iterdir():
|
for item in evtdir.iterdir():
|
||||||
if item.is_dir():
|
if item.is_dir():
|
||||||
item = evtdir/Path("20250221-160936-893_6942506204855_6942506204855")
|
item = evtdir/Path("20250228-160049-188_6921168558018_6921168558018")
|
||||||
parmDict["eventpath"] = item
|
parmDict["eventpath"] = item
|
||||||
# pipeline(**parmDict)
|
# pipeline(**parmDict)
|
||||||
|
|
||||||
|
@ -279,7 +279,7 @@ def yolo_resnet_tracker(
|
|||||||
color = colors(int(id), True)
|
color = colors(int(id), True)
|
||||||
else:
|
else:
|
||||||
color = colors(19, True) # 19为调色板的最后一个元素
|
color = colors(19, True) # 19为调色板的最后一个元素
|
||||||
annotator.box_label(xyxy, label, color=color)
|
# annotator.box_label(xyxy, label, color=color)
|
||||||
|
|
||||||
'''====== Save results (image and video) ======'''
|
'''====== Save results (image and video) ======'''
|
||||||
# save_path = str(save_dir / Path(path).name) # 带有后缀名
|
# save_path = str(save_dir / Path(path).name) # 带有后缀名
|
||||||
|
@ -24,6 +24,8 @@ from dotrack.dotracks_back import doBackTracks
|
|||||||
from dotrack.dotracks_front import doFrontTracks
|
from dotrack.dotracks_front import doFrontTracks
|
||||||
from utils.drawtracks import draw5points, drawTrack, drawtracefeat, plot_frameID_y2, drawFeatures, draw_all_trajectories
|
from utils.drawtracks import draw5points, drawTrack, drawtracefeat, plot_frameID_y2, drawFeatures, draw_all_trajectories
|
||||||
|
|
||||||
|
from utils.read_data import extract_data_realtime, read_tracking_output_realtime
|
||||||
|
|
||||||
# from datetime import datetime
|
# from datetime import datetime
|
||||||
# from utils.proBoxes import boxes_add_fid
|
# from utils.proBoxes import boxes_add_fid
|
||||||
# from utils.plotting import boxing_img #, Annotator, colors,
|
# from utils.plotting import boxing_img #, Annotator, colors,
|
||||||
@ -80,20 +82,38 @@ def save_subimgs(vts, file, TracksDict):
|
|||||||
cv2.imwrite(str(imgdir) + f"/{tid}_{fid}_{bid}.png", img)
|
cv2.imwrite(str(imgdir) + f"/{tid}_{fid}_{bid}.png", img)
|
||||||
|
|
||||||
def have_tracked():
|
def have_tracked():
|
||||||
trackdict = r'./data/trackdicts'
|
# trackdict = r'./data/trackdicts'
|
||||||
alltracks = []
|
trackdict = r'D:\全实时\202502\20250228-152846-438_6901668934727_6901668934727'
|
||||||
|
|
||||||
|
bboxes, alltracks = [], []
|
||||||
k = 0
|
k = 0
|
||||||
gt = Profile()
|
gt = Profile()
|
||||||
for filename in os.listdir(trackdict):
|
for filename in os.listdir(trackdict):
|
||||||
filename = '153112511_0_seek_105.pkl'
|
# filename = '153112511_0_seek_105.pkl'
|
||||||
|
|
||||||
file, ext = os.path.splitext(filename)
|
file, ext = os.path.splitext(filename)
|
||||||
filepath = os.path.join(trackdict, filename)
|
filepath = os.path.join(trackdict, filename)
|
||||||
TracksDict = np.load(filepath, allow_pickle=True)
|
|
||||||
|
|
||||||
|
if file.split('_')[0]=='0' or file.find("back") >= 0:
|
||||||
|
CamerType = "back"
|
||||||
|
if file.split('_')[0]=='1' or file.find("front") >= 0:
|
||||||
|
CamerType = "front"
|
||||||
|
|
||||||
|
## 1. 加载 tracker 输出的 pickle 文件
|
||||||
|
if ext in ['.pkl', '.pickle']:
|
||||||
|
filepath = os.path.join(trackdict, filename)
|
||||||
|
TracksDict = np.load(filepath, allow_pickle=True)
|
||||||
bboxes = TracksDict['TrackBoxes']
|
bboxes = TracksDict['TrackBoxes']
|
||||||
|
|
||||||
|
## 2. 加载 data 文件
|
||||||
|
if filename.find('tracker.data')>0:
|
||||||
|
bboxes, TracksDict = extract_data_realtime(filepath)
|
||||||
|
|
||||||
|
if len(bboxes)==0:
|
||||||
|
continue
|
||||||
|
|
||||||
with gt:
|
with gt:
|
||||||
if filename.find("front") >= 0:
|
if CamerType == "front":
|
||||||
vts = doFrontTracks(bboxes, TracksDict)
|
vts = doFrontTracks(bboxes, TracksDict)
|
||||||
|
|
||||||
Intrude = vts.isintrude()
|
Intrude = vts.isintrude()
|
||||||
@ -112,7 +132,7 @@ def have_tracked():
|
|||||||
edgeline = cv2.imread("./shopcart/cart_tempt/board_ftmp_line.png")
|
edgeline = cv2.imread("./shopcart/cart_tempt/board_ftmp_line.png")
|
||||||
img_tracking = draw_all_trajectories(vts, edgeline, save_dir, file, draw5p=True)
|
img_tracking = draw_all_trajectories(vts, edgeline, save_dir, file, draw5p=True)
|
||||||
|
|
||||||
else:
|
if CamerType == "back":
|
||||||
vts = doBackTracks(bboxes, TracksDict)
|
vts = doBackTracks(bboxes, TracksDict)
|
||||||
|
|
||||||
Intrude = vts.isintrude()
|
Intrude = vts.isintrude()
|
||||||
|
Binary file not shown.
@ -205,10 +205,13 @@ def extract_data_realtime(datapath):
|
|||||||
if line.endswith(','):
|
if line.endswith(','):
|
||||||
line = line[:-1]
|
line = line[:-1]
|
||||||
ftlist = [float(x) for x in line.split()]
|
ftlist = [float(x) for x in line.split()]
|
||||||
if len(ftlist) != 265: continue
|
|
||||||
|
if len(ftlist) != 265:
|
||||||
|
continue
|
||||||
boxes.append(ftlist[:9])
|
boxes.append(ftlist[:9])
|
||||||
feats.append(ftlist[9:])
|
feats.append(ftlist[9:])
|
||||||
|
|
||||||
|
|
||||||
trackerboxes = np.array(boxes)
|
trackerboxes = np.array(boxes)
|
||||||
trackerfeats = np.array(feats)
|
trackerfeats = np.array(feats)
|
||||||
|
|
||||||
@ -381,14 +384,21 @@ def read_similar(filePath):
|
|||||||
if Flag_1ton:
|
if Flag_1ton:
|
||||||
label = line.split(':')[0].strip()
|
label = line.split(':')[0].strip()
|
||||||
value = line.split(':')[1].strip()
|
value = line.split(':')[1].strip()
|
||||||
|
Dict['barcode'] = ''
|
||||||
|
if label.find("_") > 0:
|
||||||
bcd = label.split('_')[-1]
|
bcd = label.split('_')[-1]
|
||||||
if len(bcd)<8: continue
|
if len(bcd)>=10 and bcd.isdigit():
|
||||||
|
Dict['barcode'] = bcd
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Dict['event'] = label
|
Dict['event'] = label
|
||||||
Dict['barcode'] = bcd
|
|
||||||
Dict['similar'] = float(value.split(',')[0])
|
Dict['similar'] = float(value.split(',')[0])
|
||||||
Dict['type'] = value.split(',')[1]
|
|
||||||
|
if value.find("=")>0:
|
||||||
|
Dict['type'] = value.split('=')[-1]
|
||||||
|
else:
|
||||||
|
Dict['type'] = value.split(',')[-1]
|
||||||
one2n_list.append(Dict)
|
one2n_list.append(Dict)
|
||||||
|
|
||||||
if len(one2one_list): SimiDict['one2one'] = one2one_list
|
if len(one2one_list): SimiDict['one2one'] = one2one_list
|
||||||
|
Reference in New Issue
Block a user