import onnxruntime as ort import numpy as np # 加载ONNX模型并指定执行提供程序 model_path = './ckpts/best_gift_v10n_rk.onnx' session = ort.InferenceSession(model_path, providers=['AzureExecutionProvider', 'CPUExecutionProvider']) # 准备输入数据 input_name = session.get_inputs()[0].name input_shape = session.get_inputs()[0].shape input_data = np.random.randn(*input_shape).astype(np.float32) # 示例输入数据 # 执行推理 outputs = session.run(None, {input_name: input_data}) # 处理输出结果 output_name = session.get_outputs()[0].name output_data = outputs[0] print(output_data)