yolor-onnxruntime
声明:资源链接索引至第三方,平台不作任何存储,仅提供信息检索服务,若有版权问题,请https://help.coders100.com提交工单反馈
YOLOR是一个基于YOLOv3的轻量级目标检测模型,它使用ONNXRuntime进行部署。以下是C和Python两种版本的程序:
C语言版本:
```c
#include
#include
#include
#include "onnxruntime/core/interpreter.h"
#include "onnxruntime/core/session.h"
#include "onnxruntime/core/converters/convert_utils.h"
#include "onnxruntime/core/converters/convert_inference.h"
#include "onnxruntime/core/converters/convert_variables.h"
#include "onnxruntime/core/session/session_builder.h"
#include "onnxruntime/core/session/session_context.h"
#include "onnxruntime/core/session/session_graph.h"
#include "onnxruntime/core/session/session_input.h"
#include "onnxruntime/core/session/session_output.h"
#include "onnxruntime/core/session/session_options.h"
#include "onnxruntime/core/session/session_status.h"
#include "onnxruntime/core/session/session_status_codes.h"
int main(int argc, char argv) {
if (argc != 2) {
printf("Usage: s ", argv[0]);
exit(1);
}
const char model_path = argv[1];
// Load the model
onnxruntime::SessionOptions session_options;
session_options.set_python_lib_dir("./");
session_options.set_python_lib_path(model_path);
session_options.set_python_lib_type("onnxrt");
session_options.set_python_lib_name("yolor-onnxruntime");
session_options.set_python_lib_version("0.1.0");
session_options.set_python_lib_build_date("2022-08-17");
session_options.set_python_lib_build_source("https://github.com/onnx/pytorch-onnxruntime/releases/download/v0.1.0/yolor-onnxruntime-0.1.0.tar.gz");
session_options.set_python_lib_build_sha256("e4b9f4a5d643d687a833f895f4f4d9f6d4d643d687a833f895f4f4d9f6d4d643");
session_options.set_python_lib_build_timestamp("2022-08-17 16:37:09");
session_options.set_python_lib_build_timestamp_ms(1637090009);
session_options.set_python_lib_build_duration_ms(1637090009 - 1637090008);
onnxruntime::Session session = new onnxruntime::Session(session_options);
// Set the input and output names
const std::string input_name = "input";
const std::string output_name = "output";
// Create the input graph
onnxruntime::Graph input_graph = session->create_graph();
onnxruntime::InputLayer input_layer = input_graph->add_input(input_name, "image", "float32", 1, 3, 3);
// Add the YOLOR model to the input graph
onnxruntime::Model yolo_model = session->load(model_path);
onnxruntime::ConvertInferenceConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_variables = session->create_converter();
onnxruntime::ConvertInferenceConverter converter_output = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables = session->create_converter();
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables_to_tensor_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter使用ONNXRuntime部署anchor-free系列的YOLOR,包含C++和Python两种版本的程序
C语言版本:
```c
#include
#include
#include
#include "onnxruntime/core/interpreter.h"
#include "onnxruntime/core/session.h"
#include "onnxruntime/core/converters/convert_utils.h"
#include "onnxruntime/core/converters/convert_inference.h"
#include "onnxruntime/core/converters/convert_variables.h"
#include "onnxruntime/core/session/session_builder.h"
#include "onnxruntime/core/session/session_context.h"
#include "onnxruntime/core/session/session_graph.h"
#include "onnxruntime/core/session/session_input.h"
#include "onnxruntime/core/session/session_output.h"
#include "onnxruntime/core/session/session_options.h"
#include "onnxruntime/core/session/session_status.h"
#include "onnxruntime/core/session/session_status_codes.h"
int main(int argc, char argv) {
if (argc != 2) {
printf("Usage: s ", argv[0]);
exit(1);
}
const char model_path = argv[1];
// Load the model
onnxruntime::SessionOptions session_options;
session_options.set_python_lib_dir("./");
session_options.set_python_lib_path(model_path);
session_options.set_python_lib_type("onnxrt");
session_options.set_python_lib_name("yolor-onnxruntime");
session_options.set_python_lib_version("0.1.0");
session_options.set_python_lib_build_date("2022-08-17");
session_options.set_python_lib_build_source("https://github.com/onnx/pytorch-onnxruntime/releases/download/v0.1.0/yolor-onnxruntime-0.1.0.tar.gz");
session_options.set_python_lib_build_sha256("e4b9f4a5d643d687a833f895f4f4d9f6d4d643d687a833f895f4f4d9f6d4d643");
session_options.set_python_lib_build_timestamp("2022-08-17 16:37:09");
session_options.set_python_lib_build_timestamp_ms(1637090009);
session_options.set_python_lib_build_duration_ms(1637090009 - 1637090008);
onnxruntime::Session session = new onnxruntime::Session(session_options);
// Set the input and output names
const std::string input_name = "input";
const std::string output_name = "output";
// Create the input graph
onnxruntime::Graph input_graph = session->create_graph();
onnxruntime::InputLayer input_layer = input_graph->add_input(input_name, "image", "float32", 1, 3, 3);
// Add the YOLOR model to the input graph
onnxruntime::Model yolo_model = session->load(model_path);
onnxruntime::ConvertInferenceConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_variables = session->create_converter();
onnxruntime::ConvertInferenceConverter converter_output = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables = session->create_converter();
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables_to_tensor_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor_variables_to_tensor_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter_output_variables_to_tensor_variables = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter_output_variables_to_tensor = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertInferenceConverter converter = session->create_converter(yolo_model);
onnxruntime::ConvertVariablesConverter converter使用ONNXRuntime部署anchor-free系列的YOLOR,包含C++和Python两种版本的程序
-
C-language-game-development
- 2025-06-21 00:32:13访问
- 积分:1
-
RACER
- 2025-06-21 00:18:13访问
- 积分:1
-
PointCloud-pipeline
- 2025-06-21 00:11:54访问
- 积分:1
-
piper_ros
- 2025-06-21 00:11:16访问
- 积分:1
-
MiotAPI
- 2025-06-20 23:55:36访问
- 积分:1
-
MioOS
- 2025-06-20 23:55:08访问
- 积分:1
-
lidarSLAM_learning
- 2025-06-20 23:52:10访问
- 积分:1
-
autounzip
- 2025-06-20 23:49:16访问
- 积分:1
-
yolov7-opencv-dnn-cpp
- 2025-06-20 23:44:02访问
- 积分:1
-
yolov5_onnxruntime_deploy
- 2025-06-20 23:43:16访问
- 积分:1
-
FileBind
- 2025-06-20 23:32:42访问
- 积分:1
-
executable-entangler
- 2025-06-20 23:32:15访问
- 积分:1
-
pslab-mini-hardware
- 2025-06-20 23:19:15访问
- 积分:1
-
QCustomPlot
- 2025-06-20 23:17:08访问
- 积分:1
-
USDS
- 2025-06-20 23:02:06访问
- 积分:1
-
QWaveBox
- 2025-06-20 22:56:09访问
- 积分:1
-
Common-time-systems-conversion
- 2025-06-20 22:40:12访问
- 积分:1
-
ROS_Drone
- 2025-06-20 22:27:12访问
- 积分:1
-
QCloud_cpp
- 2025-06-20 22:18:05访问
- 积分:1
-
4.6kachi
- 2025-06-20 22:14:06访问
- 积分:1
-
libdstl
- 2025-06-20 22:05:11访问
- 积分:1
访问申明(访问视为同意此申明)
2.部分网络用户分享TXT文件内容为网盘地址有可能会失效(此类多为视频教程,如发生失效情况【联系客服】自助退回)
3.请多看看评论和内容介绍大数据情况下资源并不能保证每一条都是完美的资源
4.是否访问均为用户自主行为,本站只提供搜索服务不提供技术支持,感谢您的支持