轨迹数为空.txt时,遍历文件中事件名,重新跑pipline写入轨迹数

This commit is contained in:
jiajie555
2025-04-18 15:17:46 +08:00
parent 010f5c445a
commit b0ce11f987
2 changed files with 43 additions and 17 deletions

View File

@ -142,13 +142,15 @@ def show_result(eventpath, event_tracks, yrtDict, savepath_pipe):
def pipeline(dict_data,
pickle_exist,
eventpath,
SourceType,
weights,
DataType = "raw", #raw, pkl: images or videos, pkl, pickle file
YoloVersion="V5",
savepath = None,
saveimages = True
saveimages = True,
):
## 构造购物事件字典
@ -172,10 +174,11 @@ def pipeline(dict_data,
yrt_out = []
if DataType == "raw":
### 不重复执行已经过yolo-resnet-tracker
if pklpath.exists():
print(f"Pickle file have saved: {evtname}.pickle")
return
if not pickle_exist:
### 不重复执行已经过yolo-resnet-tracker
if pklpath.exists():
print(f"Pickle file have saved: {evtname}.pickle")
return
if SourceType == "video":
vpaths = get_video_pairs(eventpath)
@ -441,7 +444,8 @@ def execute_pipeline(evtdir = r"D:\datasets\ym\后台数据\unzip",
weight_yolo_v5 = r'./ckpts/best_cls10_0906.pt' ,
weight_yolo_v10 = r'./ckpts/best_v10s_width0375_1205.pt',
saveimages = True,
max_col = 12
max_col = 12,
track_txt = ''
):
'''
运行函数 pipeline(),遍历事件文件夹,每个文件夹是一个事件
@ -481,18 +485,36 @@ def execute_pipeline(evtdir = r"D:\datasets\ym\后台数据\unzip",
if csv_data == '':
with open('no_datacsv.txt', 'a') as f:
f.write(str(date_file) + '\n')
for item in date_file.iterdir():
# dict_data = {}
if item.is_dir():
# item = evtdir/Path("20241212-171505-f0afe929-fdfe-4efa-94d0-2fa748d65fbb_6907992518930")
if len(track_txt) == 0: ## 无track_txt时遍历文件夹下的所有文件
pickle_exist = False
for item in date_file.iterdir():
# dict_data = {}
if item.is_dir():
# item = evtdir/Path("20241212-171505-f0afe929-fdfe-4efa-94d0-2fa748d65fbb_6907992518930")
parmDict["eventpath"] = item
event_name = str(item.name)
dict_data = get_process_csv_data(csv_data, item)
dict_data_all = pipeline(dict_data, pickle_exist, **parmDict)
if dict_data_all is not None: #已保存pickle文件的事件返回为None
# print('dict_data_all', dict_data_all)
excelWriter.write_simi_add(wb, ws, sheet, max_col, event_name, dict_data_all, headers, excel_name)
else: ## 有track_txt时遍历track_txt文件中的事件
pickle_exist = True ##不判断pickle文件是否存在的标志
txt_path = os.path.join(date_file, track_txt)
with open(txt_path, 'r') as f:
events = f.readlines()
events = [i.strip() for i in events]
for event in events:
item = date_file / event
parmDict["eventpath"] = item
event_name = str(item.name)
dict_data = get_process_csv_data(csv_data, item)
print('dict_data', dict_data)
dict_data_all = pipeline(dict_data, **parmDict)
if dict_data_all is not None: #已保存pickle文件的事件返回为None
dict_data_all = pipeline(dict_data, pickle_exist, **parmDict)
if dict_data_all is not None: # 已保存pickle文件的事件返回为None
# print('dict_data_all', dict_data_all)
excelWriter.write_simi_add(wb, ws, sheet, max_col, event_name, dict_data_all, headers, excel_name)
excelWriter.write_simi_add(wb, ws, sheet, max_col, event_name, dict_data_all, headers,
excel_name)
# try:
# pipeline(**parmDict)
@ -516,6 +538,7 @@ if __name__ == "__main__":
datapath = '/home/yujia/yj/gpu_code/callback_data_test_0417/'
savepath = '/home/yujia/yj/gpu_code/result_0417_v10/'
max_col = 12 ##excel表格列索引从0开始从这列开始写入代码解析内容
track_txt = '轨迹数为空.txt'
execute_pipeline(evtdir=datapath,
DataType = "raw", # raw, pkl
@ -526,7 +549,8 @@ if __name__ == "__main__":
weight_yolo_v5 = '/home/yujia/yj/gpu_code/ckpts/best_cls10_0906.pt' ,
weight_yolo_v10 = '/home/yujia/yj/gpu_code/ckpts/best_v10s_width0375_1205.pt',
saveimages = False,
max_col = max_col
max_col = max_col,
track_txt = track_txt
)