|
72 | 72 | endif() |
73 | 73 |
|
74 | 74 | # TensorRT 10 GA onwards, the TensorRT libraries will have major version appended to the end on Windows, |
75 | | - # for example, nvinfer_10.dll, nvonnxparser_10.dll ... |
| 75 | + # for example, nvinfer_10.dll, nvinfer_plugin_10.dll, nvonnxparser_10.dll ... |
76 | 76 | if (WIN32 AND TRT_GREATER_OR_EQUAL_TRT_10_GA) |
77 | 77 | set(NVINFER_LIB "nvinfer_${NV_TENSORRT_MAJOR}") |
| 78 | + set(NVINFER_PLUGIN_LIB "nvinfer_plugin_${NV_TENSORRT_MAJOR}") |
78 | 79 | set(PARSER_LIB "nvonnxparser_${NV_TENSORRT_MAJOR}") |
79 | 80 | endif() |
80 | 81 |
|
81 | 82 | if (NOT NVINFER_LIB) |
82 | 83 | set(NVINFER_LIB "nvinfer") |
83 | 84 | endif() |
84 | 85 |
|
| 86 | + if (NOT NVINFER_PLUGIN_LIB) |
| 87 | + set(NVINFER_PLUGIN_LIB "nvinfer_plugin") |
| 88 | + endif() |
| 89 | + |
85 | 90 | if (NOT PARSER_LIB) |
86 | 91 | set(PARSER_LIB "nvonnxparser") |
87 | 92 | endif() |
88 | 93 |
|
89 | | - MESSAGE(STATUS "Looking for ${NVINFER_LIB}") |
| 94 | + MESSAGE(STATUS "Looking for ${NVINFER_LIB} and ${NVINFER_PLUGIN_LIB}") |
90 | 95 |
|
91 | 96 | find_library(TENSORRT_LIBRARY_INFER ${NVINFER_LIB} |
92 | 97 | HINTS ${TENSORRT_ROOT} |
|
96 | 101 | MESSAGE(STATUS "Can't find ${NVINFER_LIB}") |
97 | 102 | endif() |
98 | 103 |
|
| 104 | + find_library(TENSORRT_LIBRARY_INFER_PLUGIN ${NVINFER_PLUGIN_LIB} |
| 105 | + HINTS ${TENSORRT_ROOT} |
| 106 | + PATH_SUFFIXES lib lib64 lib/x64) |
| 107 | + |
| 108 | + if (NOT TENSORRT_LIBRARY_INFER_PLUGIN) |
| 109 | + MESSAGE(STATUS "Can't find ${NVINFER_PLUGIN_LIB}") |
| 110 | + endif() |
| 111 | + |
99 | 112 | if (onnxruntime_USE_TENSORRT_BUILTIN_PARSER) |
100 | 113 | MESSAGE(STATUS "Looking for ${PARSER_LIB}") |
101 | 114 |
|
|
107 | 120 | MESSAGE(STATUS "Can't find ${PARSER_LIB}") |
108 | 121 | endif() |
109 | 122 |
|
110 | | - set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_NVONNXPARSER}) |
| 123 | + set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_INFER_PLUGIN} ${TENSORRT_LIBRARY_NVONNXPARSER}) |
111 | 124 | MESSAGE(STATUS "Find TensorRT libs at ${TENSORRT_LIBRARY}") |
112 | 125 | else() |
113 | 126 | if (TRT_GREATER_OR_EQUAL_TRT_10_GA) |
|
140 | 153 | endif() |
141 | 154 | # Static libraries are just nvonnxparser_static on all platforms |
142 | 155 | set(onnxparser_link_libs nvonnxparser_static) |
143 | | - set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER}) |
| 156 | + set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_INFER_PLUGIN}) |
144 | 157 | MESSAGE(STATUS "Find TensorRT libs at ${TENSORRT_LIBRARY}") |
145 | 158 | endif() |
146 | 159 |
|
147 | 160 | # ${TENSORRT_LIBRARY} is empty if we link nvonnxparser_static. |
148 | 161 | # nvonnxparser_static is linked against tensorrt libraries in onnx-tensorrt |
149 | 162 | # See https://github.com/onnx/onnx-tensorrt/blob/8af13d1b106f58df1e98945a5e7c851ddb5f0791/CMakeLists.txt#L121 |
150 | 163 | # However, starting from TRT 10 GA, nvonnxparser_static doesn't link against tensorrt libraries. |
151 | | - # Therefore, the above code finds ${TENSORRT_LIBRARY_INFER}. |
| 164 | + # Therefore, the above code finds ${TENSORRT_LIBRARY_INFER} and ${TENSORRT_LIBRARY_INFER_PLUGIN}. |
152 | 165 | if(onnxruntime_CUDA_MINIMAL) |
153 | 166 | set(trt_link_libs ${CMAKE_DL_LIBS} ${TENSORRT_LIBRARY}) |
154 | 167 | else() |
|
0 commit comments