|
38 | 38 | HINTS ${TENSORRT_ROOT}
|
39 | 39 | PATH_SUFFIXES include)
|
40 | 40 |
|
| 41 | + |
| 42 | + file(READ ${TENSORRT_INCLUDE_DIR}/NvInferVersion.h NVINFER_VER_CONTENT) |
| 43 | + string(REGEX MATCH "define NV_TENSORRT_MAJOR * +([0-9]+)" NV_TENSORRT_MAJOR "${NVINFER_VER_CONTENT}") |
| 44 | + string(REGEX REPLACE "define NV_TENSORRT_MAJOR * +([0-9]+)" "\\1" NV_TENSORRT_MAJOR "${NV_TENSORRT_MAJOR}") |
| 45 | + string(REGEX MATCH "define NV_TENSORRT_MINOR * +([0-9]+)" NV_TENSORRT_MINOR "${NVINFER_VER_CONTENT}") |
| 46 | + string(REGEX REPLACE "define NV_TENSORRT_MINOR * +([0-9]+)" "\\1" NV_TENSORRT_MINOR "${NV_TENSORRT_MINOR}") |
| 47 | + string(REGEX MATCH "define NV_TENSORRT_PATCH * +([0-9]+)" NV_TENSORRT_PATCH "${NVINFER_VER_CONTENT}") |
| 48 | + string(REGEX REPLACE "define NV_TENSORRT_PATCH * +([0-9]+)" "\\1" NV_TENSORRT_PATCH "${NV_TENSORRT_PATCH}") |
| 49 | + math(EXPR NV_TENSORRT_MAJOR_INT "${NV_TENSORRT_MAJOR}") |
| 50 | + math(EXPR NV_TENSORRT_MINOR_INT "${NV_TENSORRT_MINOR}") |
| 51 | + math(EXPR NV_TENSORRT_PATCH_INT "${NV_TENSORRT_PATCH}") |
| 52 | + |
| 53 | + if (NV_TENSORRT_MAJOR) |
| 54 | + MESSAGE(STATUS "NV_TENSORRT_MAJOR is ${NV_TENSORRT_MAJOR}") |
| 55 | + else() |
| 56 | + MESSAGE(STATUS "Can't find NV_TENSORRT_MAJOR macro") |
| 57 | + endif() |
| 58 | + |
| 59 | + # Check TRT version >= 10.0.1.6 |
| 60 | + if ((NV_TENSORRT_MAJOR_INT GREATER 10) OR |
| 61 | + (NV_TENSORRT_MAJOR_INT EQUAL 10 AND NV_TENSORRT_MINOR_INT GREATER 0) OR |
| 62 | + (NV_TENSORRT_MAJOR_INT EQUAL 10 AND NV_TENSORRT_PATCH_INT GREATER 0)) |
| 63 | + set(TRT_GREATER_OR_EQUAL_TRT_10_GA ON) |
| 64 | + endif() |
| 65 | + |
41 | 66 | # TensorRT 10 GA onwards, the TensorRT libraries will have major version appended to the end on Windows,
|
42 | 67 | # for example, nvinfer_10.dll, nvinfer_plugin_10.dll, nvonnxparser_10.dll ...
|
43 |
| - if (WIN32) |
44 |
| - file(READ ${TENSORRT_INCLUDE_DIR}/NvInferVersion.h NVINFER_VER_CONTENT) |
45 |
| - string(REGEX MATCH "define NV_TENSORRT_MAJOR * +([0-9]+)" NV_TENSORRT_MAJOR "${NVINFER_VER_CONTENT}") |
46 |
| - string(REGEX REPLACE "define NV_TENSORRT_MAJOR * +([0-9]+)" "\\1" NV_TENSORRT_MAJOR "${NV_TENSORRT_MAJOR}") |
47 |
| - string(REGEX MATCH "define NV_TENSORRT_MINOR * +([0-9]+)" NV_TENSORRT_MINOR "${NVINFER_VER_CONTENT}") |
48 |
| - string(REGEX REPLACE "define NV_TENSORRT_MINOR * +([0-9]+)" "\\1" NV_TENSORRT_MINOR "${NV_TENSORRT_MINOR}") |
49 |
| - string(REGEX MATCH "define NV_TENSORRT_PATCH * +([0-9]+)" NV_TENSORRT_PATCH "${NVINFER_VER_CONTENT}") |
50 |
| - string(REGEX REPLACE "define NV_TENSORRT_PATCH * +([0-9]+)" "\\1" NV_TENSORRT_PATCH "${NV_TENSORRT_PATCH}") |
51 |
| - math(EXPR NV_TENSORRT_MAJOR_INT "${NV_TENSORRT_MAJOR}") |
52 |
| - math(EXPR NV_TENSORRT_MINOR_INT "${NV_TENSORRT_MINOR}") |
53 |
| - math(EXPR NV_TENSORRT_PATCH_INT "${NV_TENSORRT_PATCH}") |
54 |
| - |
55 |
| - if (NV_TENSORRT_MAJOR) |
56 |
| - MESSAGE(STATUS "NV_TENSORRT_MAJOR is ${NV_TENSORRT_MAJOR}") |
57 |
| - else() |
58 |
| - MESSAGE(STATUS "Can't find NV_TENSORRT_MAJOR macro") |
59 |
| - endif() |
60 |
| - |
61 |
| - # Check TRT version >= 10.0.1.6 (Note: TRT 10 EA is 10.0.0.6 but with no major version appended to the end) |
62 |
| - if ((NV_TENSORRT_MAJOR_INT GREATER 10) OR |
63 |
| - (NV_TENSORRT_MAJOR_INT EQUAL 10 AND NV_TENSORRT_MINOR_INT GREATER 0) OR |
64 |
| - (NV_TENSORRT_MAJOR_INT EQUAL 10 AND NV_TENSORRT_PATCH_INT GREATER 0)) |
65 |
| - set(NVINFER_LIB "nvinfer_${NV_TENSORRT_MAJOR}") |
66 |
| - set(NVINFER_PLUGIN_LIB "nvinfer_plugin_${NV_TENSORRT_MAJOR}") |
67 |
| - set(PARSER_LIB "nvonnxparser_${NV_TENSORRT_MAJOR}") |
68 |
| - endif() |
| 68 | + if (WIN32 AND TRT_GREATER_OR_EQUAL_TRT_10_GA) |
| 69 | + set(NVINFER_LIB "nvinfer_${NV_TENSORRT_MAJOR}") |
| 70 | + set(NVINFER_PLUGIN_LIB "nvinfer_plugin_${NV_TENSORRT_MAJOR}") |
| 71 | + set(PARSER_LIB "nvonnxparser_${NV_TENSORRT_MAJOR}") |
69 | 72 | endif()
|
70 | 73 |
|
71 | 74 | if (NOT NVINFER_LIB)
|
|
80 | 83 | set(PARSER_LIB "nvonnxparser")
|
81 | 84 | endif()
|
82 | 85 |
|
83 |
| - if (onnxruntime_USE_TENSORRT_BUILTIN_PARSER) |
84 |
| - # Add TensorRT library |
85 |
| - MESSAGE(STATUS "Search for ${NVINFER_LIB}, ${NVINFER_PLUGIN_LIB} and ${PARSER_LIB}") |
| 86 | + MESSAGE(STATUS "Looking for ${NVINFER_LIB} and ${NVINFER_PLUGIN_LIB}") |
86 | 87 |
|
87 |
| - find_library(TENSORRT_LIBRARY_INFER ${NVINFER_LIB} |
88 |
| - HINTS ${TENSORRT_ROOT} |
89 |
| - PATH_SUFFIXES lib lib64 lib/x64) |
| 88 | + find_library(TENSORRT_LIBRARY_INFER ${NVINFER_LIB} |
| 89 | + HINTS ${TENSORRT_ROOT} |
| 90 | + PATH_SUFFIXES lib lib64 lib/x64) |
90 | 91 |
|
91 |
| - if (NOT TENSORRT_LIBRARY_INFER) |
92 |
| - MESSAGE(STATUS "Can't find ${NVINFER_LIB}") |
93 |
| - endif() |
| 92 | + if (NOT TENSORRT_LIBRARY_INFER) |
| 93 | + MESSAGE(STATUS "Can't find ${NVINFER_LIB}") |
| 94 | + endif() |
94 | 95 |
|
95 |
| - find_library(TENSORRT_LIBRARY_INFER_PLUGIN ${NVINFER_PLUGIN_LIB} |
96 |
| - HINTS ${TENSORRT_ROOT} |
97 |
| - PATH_SUFFIXES lib lib64 lib/x64) |
| 96 | + find_library(TENSORRT_LIBRARY_INFER_PLUGIN ${NVINFER_PLUGIN_LIB} |
| 97 | + HINTS ${TENSORRT_ROOT} |
| 98 | + PATH_SUFFIXES lib lib64 lib/x64) |
98 | 99 |
|
99 |
| - if (NOT TENSORRT_LIBRARY_INFER_PLUGIN) |
100 |
| - MESSAGE(STATUS "Can't find ${NVINFER_PLUGIN_LIB}") |
101 |
| - endif() |
| 100 | + if (NOT TENSORRT_LIBRARY_INFER_PLUGIN) |
| 101 | + MESSAGE(STATUS "Can't find ${NVINFER_PLUGIN_LIB}") |
| 102 | + endif() |
| 103 | + |
| 104 | + if (onnxruntime_USE_TENSORRT_BUILTIN_PARSER) |
| 105 | + MESSAGE(STATUS "Looking for ${PARSER_LIB}") |
102 | 106 |
|
103 | 107 | find_library(TENSORRT_LIBRARY_NVONNXPARSER ${PARSER_LIB}
|
104 | 108 | HINTS ${TENSORRT_ROOT}
|
|
111 | 115 | set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_INFER_PLUGIN} ${TENSORRT_LIBRARY_NVONNXPARSER})
|
112 | 116 | MESSAGE(STATUS "Find TensorRT libs at ${TENSORRT_LIBRARY}")
|
113 | 117 | else()
|
| 118 | + if (TRT_GREATER_OR_EQUAL_TRT_10_GA) |
| 119 | + set(ONNX_USE_LITE_PROTO ON) |
| 120 | + endif() |
114 | 121 | FetchContent_Declare(
|
115 | 122 | onnx_tensorrt
|
116 | 123 | URL ${DEP_URL_onnx_tensorrt}
|
|
132 | 139 | unset(PROTOBUF_LIBRARY)
|
133 | 140 | unset(OLD_CMAKE_CXX_FLAGS)
|
134 | 141 | unset(OLD_CMAKE_CUDA_FLAGS)
|
135 |
| - set_target_properties(nvonnxparser PROPERTIES LINK_FLAGS "/ignore:4199") |
| 142 | + set_target_properties(${PARSER_LIB} PROPERTIES LINK_FLAGS "/ignore:4199") |
136 | 143 | target_compile_options(nvonnxparser_static PRIVATE /FIio.h /wd4100)
|
137 |
| - target_compile_options(nvonnxparser PRIVATE /FIio.h /wd4100) |
| 144 | + target_compile_options(${PARSER_LIB} PRIVATE /FIio.h /wd4100) |
138 | 145 | endif()
|
139 | 146 | # Static libraries are just nvonnxparser_static on all platforms
|
140 | 147 | set(onnxparser_link_libs nvonnxparser_static)
|
| 148 | + set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_INFER_PLUGIN}) |
| 149 | + MESSAGE(STATUS "Find TensorRT libs at ${TENSORRT_LIBRARY}") |
141 | 150 | endif()
|
142 | 151 |
|
143 | 152 | include_directories(${TENSORRT_INCLUDE_DIR})
|
144 | 153 | # ${TENSORRT_LIBRARY} is empty if we link nvonnxparser_static.
|
145 | 154 | # nvonnxparser_static is linked against tensorrt libraries in onnx-tensorrt
|
146 | 155 | # See https://github.com/onnx/onnx-tensorrt/blob/8af13d1b106f58df1e98945a5e7c851ddb5f0791/CMakeLists.txt#L121
|
| 156 | + # However, starting from TRT 10 GA, nvonnxparser_static doesn't link against tensorrt libraries. |
| 157 | + # Therefore, the above code finds ${TENSORRT_LIBRARY_INFER} and ${TENSORRT_LIBRARY_INFER_PLUGIN}. |
147 | 158 | set(trt_link_libs cudnn cublas ${CMAKE_DL_LIBS} ${TENSORRT_LIBRARY})
|
148 | 159 |
|
149 | 160 | file(GLOB_RECURSE onnxruntime_providers_tensorrt_cc_srcs CONFIGURE_DEPENDS
|
|
0 commit comments