add ORT_DIRECTORY compiler option

This commit is contained in:
zhoushunjie
2022-09-21 15:01:47 +08:00
parent a8e447f225
commit 662b1e6113
7 changed files with 59 additions and 30 deletions

View File

@@ -51,6 +51,7 @@ option(ENABLE_VISION_VISUALIZE "Whether to enable visualize vision model result
option(ENABLE_TEXT "Whether to enable text models usage." OFF)
option(WITH_TESTING "Whether to compile with unittest." OFF)
option(OPENCV_DIRECTORY "User can specify the installed opencv directory.")
option(ORT_DIRECTORY "User can specify the installed onnxruntime directory.")
# Please don't open this flag now, some bugs exists.
# option(ENABLE_OPENCV_CUDA "Whether to enable opencv with cuda, this will allow process image with GPU." OFF)
@@ -146,6 +147,12 @@ endif(ENABLE_PADDLE_FRONTEND)
if(ENABLE_ORT_BACKEND)
add_definitions(-DENABLE_ORT_BACKEND)
list(APPEND ALL_DEPLOY_SRCS ${DEPLOY_ORT_SRCS})
if (ORT_DIRECTORY)
message(STATUS "Use the onnxruntime lib specified by user. The ONNXRuntime path: ${ORT_DIRECTORY}")
STRING(REGEX REPLACE "\\\\" "/" ORT_DIRECTORY ${ORT_DIRECTORY})
else()
message(STATUS "Use the default onnxruntime lib.")
endif()
include(${PROJECT_SOURCE_DIR}/cmake/onnxruntime.cmake)
list(APPEND DEPEND_LIBS external_onnxruntime)
endif()

View File

@@ -13,6 +13,7 @@ set(ENABLE_TEXT @ENABLE_TEXT@)
# set(ENABLE_OPENCV_CUDA @ENABLE_OPENCV_CUDA@)
set(LIBRARY_NAME @LIBRARY_NAME@)
set(OPENCV_DIRECTORY @OPENCV_DIRECTORY@)
set(ORT_DIRECTORY @ORT_DIRECTORY@)
set(FASTDEPLOY_LIBS "")
set(FASTDEPLOY_INCS "")
@@ -26,7 +27,12 @@ find_library(FDLIB ${LIBRARY_NAME} ${CMAKE_CURRENT_LIST_DIR}/lib NO_DEFAULT_PATH
list(APPEND FASTDEPLOY_LIBS ${FDLIB})
if(ENABLE_ORT_BACKEND)
find_library(ORT_LIB onnxruntime ${CMAKE_CURRENT_LIST_DIR}/third_libs/install/onnxruntime/lib NO_DEFAULT_PATH)
if (ORT_DIRECTORY)
set(ORT_LIB_PATH ${ORT_DIRECTORY}/lib)
else()
set(ORT_LIB_PATH ${CMAKE_CURRENT_LIST_DIR}/third_libs/install/onnxruntime/lib)
endif()
find_library(ORT_LIB onnxruntime ${ORT_LIB_PATH} NO_DEFAULT_PATH)
list(APPEND FASTDEPLOY_LIBS ${ORT_LIB})
endif()

View File

@@ -19,12 +19,23 @@ set(ONNXRUNTIME_PREFIX_DIR ${THIRD_PARTY_PATH}/onnxruntime)
set(ONNXRUNTIME_SOURCE_DIR
${THIRD_PARTY_PATH}/onnxruntime/src/${ONNXRUNTIME_PROJECT})
set(ONNXRUNTIME_INSTALL_DIR ${THIRD_PARTY_PATH}/install/onnxruntime)
if (ORT_DIRECTORY)
set(ONNXRUNTIME_INC_DIR
"${ORT_DIRECTORY}/include"
CACHE PATH "onnxruntime include directory." FORCE)
set(ONNXRUNTIME_LIB_DIR
"${ORT_DIRECTORY}/lib"
CACHE PATH "onnxruntime lib directory." FORCE)
else()
set(ONNXRUNTIME_INC_DIR
"${ONNXRUNTIME_INSTALL_DIR}/include"
CACHE PATH "onnxruntime include directory." FORCE)
set(ONNXRUNTIME_LIB_DIR
"${ONNXRUNTIME_INSTALL_DIR}/lib"
CACHE PATH "onnxruntime lib directory." FORCE)
endif()
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${ONNXRUNTIME_LIB_DIR}")
set(ONNXRUNTIME_VERSION "1.12.0")
@@ -68,18 +79,19 @@ include_directories(${ONNXRUNTIME_INC_DIR}
if(WIN32)
set(ONNXRUNTIME_LIB
"${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.lib"
CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
"${ONNXRUNTIME_LIB_DIR}/onnxruntime.lib"
CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
elseif(APPLE)
set(ONNXRUNTIME_LIB
"${ONNXRUNTIME_INSTALL_DIR}/lib/libonnxruntime.dylib"
CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
"${ONNXRUNTIME_LIB_DIR}/libonnxruntime.dylib"
CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
else()
set(ONNXRUNTIME_LIB
"${ONNXRUNTIME_INSTALL_DIR}/lib/libonnxruntime.so"
CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
"${ONNXRUNTIME_LIB_DIR}/libonnxruntime.so"
CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
endif()
if (NOT ORT_DIRECTORY)
ExternalProject_Add(
${ONNXRUNTIME_PROJECT}
${EXTERNAL_PROJECT_LOG_ARGS}
@@ -96,6 +108,7 @@ ExternalProject_Add(
${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include
${ONNXRUNTIME_INC_DIR}
BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB})
endif()
add_library(external_onnxruntime STATIC IMPORTED GLOBAL)
set_property(TARGET external_onnxruntime PROPERTY IMPORTED_LOCATION ${ONNXRUNTIME_LIB})

View File

@@ -18,6 +18,8 @@
| TRT_DIRECTORY | 当启用TensorRT推理后端时需通过此参数指定TensorRT路径 | TensorRT 8.4及以上 |
| ENABLE_VISION | 启用视觉模型模块默认为ON | |
| ENABLE_TEXT | 启用文本模型模块默认为ON | |
| OPENCV_DIRECTORY | 指定已安装OpenCV库的路径默认为空| 若没指定OpenCV库路径则会自动下载安装OpenCV |
| ORT_DIRECTORY | 指定已安装ONNXRuntime库的路径默认为空| 若没指定ONNXRuntime库路径则会自动下载安装ONNXRuntime |
FastDeploy支持在编译时用户选择自己的后端进行编译, 目前已经支持Paddle Inference、ONNXRuntime、TensorRT(加载ONNX格式)。FastDeploy已支持的模型已完成在不同后端上的验证工作会自动根据编译时支持的后端进行选择如若无可用后端则会给出相应提示(如YOLOv7目前仅支持ONNXRuntime/TensorRT后端如若编译时未开启这两个后端则推理时会提示无可用后端)。

View File

@@ -16,7 +16,7 @@ import logging
import os
import sys
user_specified_dirs = ['@OPENCV_DIRECTORY@', ]
user_specified_dirs = ['@OPENCV_DIRECTORY@', '@ORT_DIRECTORY@', ]
def is_built_with_gpu() -> bool:

View File

@@ -18,7 +18,7 @@ import shutil
import subprocess
import platform
user_specified_dirs = ['@OPENCV_DIRECTORY@', ]
user_specified_dirs = ['@OPENCV_DIRECTORY@', '@ORT_DIRECTORY@', ]
def process_on_linux(current_dir):
rpaths = ["$ORIGIN:$ORIGIN/libs"]

View File

@@ -63,6 +63,7 @@ setup_configs["CUDA_DIRECTORY"] = os.getenv("CUDA_DIRECTORY",
setup_configs["LIBRARY_NAME"] = PACKAGE_NAME
setup_configs["PY_LIBRARY_NAME"] = PACKAGE_NAME + "_main"
setup_configs["OPENCV_DIRECTORY"] = os.getenv("OPENCV_DIRECTORY", "")
setup_configs["ORT_DIRECTORY"] = os.getenv("ORT_DIRECTORY", "")
if setup_configs["WITH_GPU"] == "ON":
wheel_name = "fastdeploy-gpu-python"