Open zach-welch-aquabyte opened 2 years ago
What's your use case for using an older ONNX version?
Since the current build relies on submodules, if you wish to use a lower version you may need to manually set the ONNX version:
# Clone submodules as usual:
git submodule update --init --recursive
# Manually update ONNX submodule commit
cd third_party/onnx
git pull
git checkout <commit/release that you want to use>
Then building normally with the same command would build with your specified ONNX version.
hi
i have the same problem, but with onnx builded from git and installed in the system
onnx-tensorrt can't find the installation and always search the submodules, with in this moment pont to an acient version of onnx sources (not support python 3.10 and seems protobuf 3.19 also either)
if run git checkout main
in the third_party/onnx
after git submodule update --init third_party/onnx
, get:
[ 98%] Building CXX object CMakeFiles/onnx2trt.dir/main.cpp.o
/tmp/makepkg/sl1-onnx-tensorrt-git/src/onnx-tensorrt/main.cpp:8:10: error fatal: onnx/optimizer/optimize.h: No existe el fichero o el directorio
8 | #include <onnx/optimizer/optimize.h>
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~
compilación terminada.
when build the libnvonnxparser.so
library and python module (throught cmake and run setup.py script)
because optimizer
is not splitted to own repository
greetings
I would recommend adding the-DBUILD_LIBRARY_ONLY=1
flag to your CMake build command to skip building the binaries that rely on the older ONNX headers.
For clarity, "previously built" does not mean "older". I am building the very latest upstream onnx
release; I want onnx-tensorrt
to link against that version.
My use case is a fairly standard packaging workflow. pytorch
had to solve this same sort of problem for Debian (https://github.com/pytorch/pytorch/issues/14699), adding USE_SYSTEM_*
build options. The same solution could be used here.
My build recipe already includes the -DBUILD_LIBRARY_ONLY
option, and it does not solve this particular problem.
i'm not coder. only do easy things, but i done my build with system onnx (from git) and onnx-optimizer (from git) (needs patch the cmake file) with this
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 80c5118..18ef786 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -65,6 +65,14 @@ if (NOT DEFINED BUILD_LIBRARY_ONLY)
)
endif()
+if (NOT TARGET ONNX)
+ FIND_PACKAGE(ONNX REQUIRED)
+endif()
+
+if (NOT TARGET ONNXOptimizer)
+ FIND_PACKAGE(ONNXOptimizer REQUIRED)
+endif()
+
if (NOT TARGET protobuf::libprotobuf)
FIND_PACKAGE(Protobuf REQUIRED)
else()
@@ -139,8 +147,8 @@ endif()
# --------------------------------
if (NOT DEFINED BUILD_LIBRARY_ONLY)
add_executable(onnx2trt ${EXECUTABLE_SOURCES})
- target_include_directories(onnx2trt PUBLIC ${ONNX_INCLUDE_DIRS})
- target_link_libraries(onnx2trt PUBLIC ${PROTOBUF_LIB} onnx nvonnxparser_static ${CMAKE_THREAD_LIBS_INIT} ${CMAKE_DL_LIBS}) #${CUDA_LIBRARIES}
+ target_include_directories(onnx2trt PUBLIC onnx_optimizer ${ONNX_INCLUDE_DIRS})
+ target_link_libraries(onnx2trt PUBLIC onnx onnx_optimizer nvonnxparser_static ${CMAKE_THREAD_LIBS_INIT} ${CMAKE_DL_LIBS}) #${CUDA_LIBRARIES}
endif()
# --------------------------------
diff --git a/NvOnnxParser.h b/NvOnnxParser.h
index e7f97cf..19f2e72 100644
--- a/NvOnnxParser.h
+++ b/NvOnnxParser.h
@@ -20,6 +20,7 @@
#define NV_ONNX_PARSER_PATCH 0
static const int NV_ONNX_PARSER_VERSION = ((NV_ONNX_PARSER_MAJOR * 10000) + (NV_ONNX_PARSER_MINOR * 100) + NV_ONNX_PARSER_PATCH);
+#define TENSORRTAPI
//! \typedef SubGraph_t
//!
diff --git a/main.cpp b/main.cpp
index 07a9932..3b02f7f 100644
--- a/main.cpp
+++ b/main.cpp
@@ -5,7 +5,7 @@
#include "NvOnnxParser.h"
#include "onnx_utils.hpp"
#include "common.hpp"
-#include <onnx/optimizer/optimize.h>
+#include <onnxoptimizer/optimize.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
is a very ugly patch. but build ok. carefull, not still tested
greetings
EDIT: my cmake rules
mkdir -p onnx-tensorrt/build
cd onnx-tensorrt/build
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DBUILD_SHARED_LIBS=ON \
-DCMAKE_SKIP_RPATH=ON \
-DCUDA_TOOLKIT_ROOT_DIR=/opt/cuda/include
make
cd ..
export LD_LIBRARY_PATH="$(pwd)/build"
pip wheel --no-deps . -w dist
onnx-optimize:
patch:
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c2e48b35..6be9c545 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -9,8 +9,8 @@ endif(NOT MSVC)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
-set(ONNX_ROOT ${PROJECT_SOURCE_DIR}/third_party/onnx)
-add_subdirectory(${ONNX_ROOT})
+#set(ONNX_ROOT ${PROJECT_SOURCE_DIR}/third_party/onnx)
+#add_subdirectory(${ONNX_ROOT})
file(READ "${PROJECT_SOURCE_DIR}/VERSION_NUMBER" ONNX_OPTIMIZER_VERSION)
string(STRIP "${ONNX_OPTIMIZER_VERSION}" ONNX_OPTIMIZER_VERSION)
@@ -20,6 +20,8 @@ file(GLOB_RECURSE onnx_opt_srcs "onnxoptimizer/*.cc"
)
list(REMOVE_ITEM onnx_opt_srcs "${PROJECT_SOURCE_DIR}/onnxoptimizer/cpp2py_export.cc")
+find_package(ONNX REQUIRED)
+find_package(Protobuf)
add_library(onnx_optimizer ${onnx_opt_srcs})
target_link_libraries(onnx_optimizer PUBLIC onnx)
target_include_directories(onnx_optimizer PUBLIC
@@ -47,7 +48,7 @@ if(BUILD_ONNX_PYTHON)
set_target_properties(onnx_opt_cpp2py_export
PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR})
target_include_directories(onnx_opt_cpp2py_export PRIVATE
- $<BUILD_INTERFACE:${ONNX_ROOT}>
+# $<BUILD_INTERFACE:${ONNX_ROOT}>
$<INSTALL_INTERFACE:include>
${PYTHON_INCLUDE_DIR})
# pybind11 is a header only lib
and cmake rules (parsed to python script)
cd optimizer
CXXFLAGS+=" -DONNX_ML=1 -DONNX_NAMESPACE=onnx" \
CMAKE_ARGS="-DCMAKE_INSTALL_PREFIX=/usr -DBUILD_SHARED_LIBS=ON -DCMAKE_SKIP_RPATH=ON -DONNX_ML=1 -DONNX_NAMESPACE=onnx" python setup.py build
Description
I am trying to package
onnx-tensorrt
for Yocto. In this case, I want to use a previously built version ofonnx
, rather than the version included in thethird_party
directory. This is not currently possible.Environment
TensorRT Version:
22.02
ONNX-TensorRT Version / Branch:main
GPU Type:jetson
Nvidia Driver Version: N/A CUDA Version:10.2
CUDNN Version:8.0.0.180
Operating System + Version: Yoctodunfell
Python Version (if applicable):3.8
TensorFlow + TF2ONNX Version (if applicable): PyTorch Version (if applicable): Baremetal or Container (if container which image + tag):Steps To Reproduce