[Compile] Support custom paddle inference (#839)

* Support custom paddle inference

* update setup.py
This commit is contained in:
Jason
2022-12-09 11:41:19 +08:00
committed by GitHub
parent 788ca79e5f
commit b0988bf423
2 changed files with 56 additions and 39 deletions

View File

@@ -13,6 +13,8 @@
# limitations under the License. # limitations under the License.
include(ExternalProject) include(ExternalProject)
option(PADDLEINFERENCE_DIRECTORY "Directory of Paddle Inference library" OFF)
set(PADDLEINFERENCE_PROJECT "extern_paddle_inference") set(PADDLEINFERENCE_PROJECT "extern_paddle_inference")
set(PADDLEINFERENCE_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle_inference) set(PADDLEINFERENCE_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle_inference)
set(PADDLEINFERENCE_SOURCE_DIR set(PADDLEINFERENCE_SOURCE_DIR
@@ -27,6 +29,10 @@ set(PADDLEINFERENCE_LIB_DIR
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}"
"${PADDLEINFERENCE_LIB_DIR}") "${PADDLEINFERENCE_LIB_DIR}")
if(PADDLEINFERENCE_DIRECTORY)
set(PADDLEINFERENCE_INC_DIR ${PADDLEINFERENCE_DIRECTORY}/paddle/include)
endif()
include_directories(${PADDLEINFERENCE_INC_DIR}) include_directories(${PADDLEINFERENCE_INC_DIR})
if(WIN32) if(WIN32)
set(PADDLEINFERENCE_COMPILE_LIB set(PADDLEINFERENCE_COMPILE_LIB
@@ -47,50 +53,59 @@ else()
endif(WIN32) endif(WIN32)
set(PADDLEINFERENCE_URL_BASE "https://bj.bcebos.com/fastdeploy/third_libs/") if(PADDLEINFERENCE_DIRECTORY)
set(PADDLEINFERENCE_VERSION "2.4-dev3") if(EXISTS "${THIRD_PARTY_PATH}/install/paddle_inference")
if(WIN32) file(REMOVE_RECURSE "${THIRD_PARTY_PATH}/install/paddle_inference")
if (WITH_GPU)
set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-gpu-trt-${PADDLEINFERENCE_VERSION}.zip")
else()
set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-${PADDLEINFERENCE_VERSION}.zip")
endif()
elseif(APPLE)
if(CURRENT_OSX_ARCH MATCHES "arm64")
message(FATAL_ERROR "Paddle Backend doesn't support Mac OSX with Arm64 now.")
set(PADDLEINFERENCE_FILE "paddle_inference-osx-arm64-${PADDLEINFERENCE_VERSION}.tgz")
else()
set(PADDLEINFERENCE_FILE "paddle_inference-osx-x86_64-${PADDLEINFERENCE_VERSION}.tgz")
endif() endif()
find_package(Python COMPONENTS Interpreter Development REQUIRED)
message(STATUS "Copying ${PADDLEINFERENCE_DIRECTORY} to ${THIRD_PARTY_PATH}/install/paddle_inference ...")
execute_process(COMMAND ${Python_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/copy_directory.py ${PADDLEINFERENCE_DIRECTORY} ${THIRD_PARTY_PATH}/install/paddle_inference)
else() else()
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "aarch64") set(PADDLEINFERENCE_URL_BASE "https://bj.bcebos.com/fastdeploy/third_libs/")
message(FATAL_ERROR "Paddle Backend doesn't support linux aarch64 now.") set(PADDLEINFERENCE_VERSION "2.4-dev3")
set(PADDLEINFERENCE_FILE "paddle_inference-linux-aarch64-${PADDLEINFERENCE_VERSION}.tgz") if(WIN32)
else() if (WITH_GPU)
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-${PADDLEINFERENCE_VERSION}.tgz") set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-gpu-trt-${PADDLEINFERENCE_VERSION}.zip")
if(WITH_GPU) else()
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-gpu-trt-${PADDLEINFERENCE_VERSION}.tgz") set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-${PADDLEINFERENCE_VERSION}.zip")
endif() endif()
if (WITH_IPU) elseif(APPLE)
set(PADDLEINFERENCE_VERSION "2.4-dev1") if(CURRENT_OSX_ARCH MATCHES "arm64")
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-ipu-${PADDLEINFERENCE_VERSION}.tgz") message(FATAL_ERROR "Paddle Backend doesn't support Mac OSX with Arm64 now.")
set(PADDLEINFERENCE_FILE "paddle_inference-osx-arm64-${PADDLEINFERENCE_VERSION}.tgz")
else()
set(PADDLEINFERENCE_FILE "paddle_inference-osx-x86_64-${PADDLEINFERENCE_VERSION}.tgz")
endif()
else()
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "aarch64")
message(FATAL_ERROR "Paddle Backend doesn't support linux aarch64 now.")
set(PADDLEINFERENCE_FILE "paddle_inference-linux-aarch64-${PADDLEINFERENCE_VERSION}.tgz")
else()
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-${PADDLEINFERENCE_VERSION}.tgz")
if(WITH_GPU)
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-gpu-trt-${PADDLEINFERENCE_VERSION}.tgz")
endif()
if (WITH_IPU)
set(PADDLEINFERENCE_VERSION "2.4-dev1")
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-ipu-${PADDLEINFERENCE_VERSION}.tgz")
endif()
endif() endif()
endif() endif()
endif() set(PADDLEINFERENCE_URL "${PADDLEINFERENCE_URL_BASE}${PADDLEINFERENCE_FILE}")
set(PADDLEINFERENCE_URL "${PADDLEINFERENCE_URL_BASE}${PADDLEINFERENCE_FILE}")
ExternalProject_Add(
ExternalProject_Add( ${PADDLEINFERENCE_PROJECT}
${PADDLEINFERENCE_PROJECT} ${EXTERNAL_PROJECT_LOG_ARGS}
${EXTERNAL_PROJECT_LOG_ARGS} URL ${PADDLEINFERENCE_URL}
URL ${PADDLEINFERENCE_URL} PREFIX ${PADDLEINFERENCE_PREFIX_DIR}
PREFIX ${PADDLEINFERENCE_PREFIX_DIR} DOWNLOAD_NO_PROGRESS 1
DOWNLOAD_NO_PROGRESS 1 CONFIGURE_COMMAND ""
CONFIGURE_COMMAND "" BUILD_COMMAND ""
BUILD_COMMAND "" UPDATE_COMMAND ""
UPDATE_COMMAND "" INSTALL_COMMAND
INSTALL_COMMAND ${CMAKE_COMMAND} -E copy_directory ${PADDLEINFERENCE_SOURCE_DIR} ${PADDLEINFERENCE_INSTALL_DIR}
${CMAKE_COMMAND} -E copy_directory ${PADDLEINFERENCE_SOURCE_DIR} ${PADDLEINFERENCE_INSTALL_DIR} BUILD_BYPRODUCTS ${PADDLEINFERENCE_COMPILE_LIB})
BUILD_BYPRODUCTS ${PADDLEINFERENCE_COMPILE_LIB}) endif(PADDLEINFERENCE_DIRECTORY)
if(UNIX AND (NOT APPLE) AND (NOT ANDROID)) if(UNIX AND (NOT APPLE) AND (NOT ANDROID))
add_custom_target(patchelf_paddle_inference ALL COMMAND bash -c "PATCHELF_EXE=${PATCHELF_EXE} python ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" DEPENDS ${LIBRARY_NAME}) add_custom_target(patchelf_paddle_inference ALL COMMAND bash -c "PATCHELF_EXE=${PATCHELF_EXE} python ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" DEPENDS ${LIBRARY_NAME})

View File

@@ -78,6 +78,8 @@ setup_configs["LIBRARY_NAME"] = PACKAGE_NAME
setup_configs["PY_LIBRARY_NAME"] = PACKAGE_NAME + "_main" setup_configs["PY_LIBRARY_NAME"] = PACKAGE_NAME + "_main"
setup_configs["OPENCV_DIRECTORY"] = os.getenv("OPENCV_DIRECTORY", "") setup_configs["OPENCV_DIRECTORY"] = os.getenv("OPENCV_DIRECTORY", "")
setup_configs["ORT_DIRECTORY"] = os.getenv("ORT_DIRECTORY", "") setup_configs["ORT_DIRECTORY"] = os.getenv("ORT_DIRECTORY", "")
setup_configs["PADDLEINFERENCE_DIRECTORY"] = os.getenv("PADDLEINFERENCE_DIRECTORY", "")
setup_configs["RKNN2_TARGET_SOC"] = os.getenv("RKNN2_TARGET_SOC", "") setup_configs["RKNN2_TARGET_SOC"] = os.getenv("RKNN2_TARGET_SOC", "")
if setup_configs["RKNN2_TARGET_SOC"] != "" or setup_configs["BUILD_ON_JETSON"] != "OFF": if setup_configs["RKNN2_TARGET_SOC"] != "" or setup_configs["BUILD_ON_JETSON"] != "OFF":
REQUIRED_PACKAGES = REQUIRED_PACKAGES.replace("opencv-python", "") REQUIRED_PACKAGES = REQUIRED_PACKAGES.replace("opencv-python", "")