[Bug Fix] fixed paddle inference headers include policy (#1827)

* Update __init__.py

* [Python] Update python whl setup.py

* [cmake] support fd & paddle inference custom libs on win/linux

* [cmake] support fd & paddle inference custom libs on win/linux

* [Bug Fix] fixed paddle inference headers include policy

* [Bug Fix] fixed paddle inference headers include policy

* [Bug Fix] fixed paddle inference headers include policy
This commit is contained in:
DefTruth
2023-04-19 14:32:51 +08:00
committed by GitHub
parent a509dd8ec1
commit 326715569d
5 changed files with 48 additions and 23 deletions

3
.gitignore vendored
View File

@@ -42,4 +42,5 @@ examples/vision/collect_quantize_cc.sh
examples/vision/tests_quantize examples/vision/tests_quantize
fastdeploy/LICENSE fastdeploy/LICENSE
fastdeploy/ThirdPartyNotices.txt fastdeploy/ThirdPartyNotices.txt
FastDeployCSharp.cmake FastDeployCSharp.cmake
python/fastdeploy/code_version.py

View File

@@ -24,9 +24,13 @@ set(PADDLEINFERENCE_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle_inference)
set(PADDLEINFERENCE_SOURCE_DIR set(PADDLEINFERENCE_SOURCE_DIR
${THIRD_PARTY_PATH}/paddle_inference/src/${PADDLEINFERENCE_PROJECT}) ${THIRD_PARTY_PATH}/paddle_inference/src/${PADDLEINFERENCE_PROJECT})
set(PADDLEINFERENCE_INSTALL_DIR ${THIRD_PARTY_PATH}/install/paddle_inference) set(PADDLEINFERENCE_INSTALL_DIR ${THIRD_PARTY_PATH}/install/paddle_inference)
set(PADDLEINFERENCE_INC_DIR # set(PADDLEINFERENCE_INC_DIR
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/include" # "${PADDLEINFERENCE_INSTALL_DIR}/paddle/include"
CACHE PATH "paddle_inference include directory." FORCE) # CACHE PATH "paddle_inference include directory." FORCE)
# NOTE: The head path need by paddle inference is xxx/paddle_inference,
# not xxx/paddle_inference/paddle/include
set(PADDLEINFERENCE_INC_DIR "${PADDLEINFERENCE_INSTALL_DIR}"
CACHE PATH "paddle_inference include directory." FORCE)
set(PADDLEINFERENCE_LIB_DIR set(PADDLEINFERENCE_LIB_DIR
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/" "${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/"
CACHE PATH "paddle_inference lib directory." FORCE) CACHE PATH "paddle_inference lib directory." FORCE)
@@ -34,7 +38,8 @@ set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}"
"${PADDLEINFERENCE_LIB_DIR}") "${PADDLEINFERENCE_LIB_DIR}")
if(PADDLEINFERENCE_DIRECTORY) if(PADDLEINFERENCE_DIRECTORY)
set(PADDLEINFERENCE_INC_DIR ${PADDLEINFERENCE_DIRECTORY}/paddle/include) # set(PADDLEINFERENCE_INC_DIR ${PADDLEINFERENCE_DIRECTORY}/paddle/include)
set(PADDLEINFERENCE_INC_DIR ${PADDLEINFERENCE_DIRECTORY})
endif() endif()
include_directories(${PADDLEINFERENCE_INC_DIR}) include_directories(${PADDLEINFERENCE_INC_DIR})
@@ -66,49 +71,56 @@ endif(WIN32)
if(PADDLEINFERENCE_DIRECTORY) if(PADDLEINFERENCE_DIRECTORY)
# Use custom Paddle Inference libs.
if(EXISTS "${THIRD_PARTY_PATH}/install/paddle_inference") if(EXISTS "${THIRD_PARTY_PATH}/install/paddle_inference")
file(REMOVE_RECURSE "${THIRD_PARTY_PATH}/install/paddle_inference") file(REMOVE_RECURSE "${THIRD_PARTY_PATH}/install/paddle_inference")
endif() endif()
find_package(Python COMPONENTS Interpreter Development REQUIRED) find_package(Python COMPONENTS Interpreter Development REQUIRED)
message(STATUS "Copying ${PADDLEINFERENCE_DIRECTORY} to ${THIRD_PARTY_PATH}/install/paddle_inference ...") message(STATUS "Copying ${PADDLEINFERENCE_DIRECTORY} to ${THIRD_PARTY_PATH}/install/paddle_inference ...")
if(WIN32) if(WIN32)
message(FATAL_ERROR "Define PADDLEINFERENCE_DIRECTORY is not supported on Windows platform.") execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install)
execute_process(COMMAND cp -r ${PADDLEINFERENCE_DIRECTORY} ${THIRD_PARTY_PATH}/install/paddle_inference)
else() else()
execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install) execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install)
execute_process(COMMAND cp -r ${PADDLEINFERENCE_DIRECTORY} ${THIRD_PARTY_PATH}/install/paddle_inference) execute_process(COMMAND cp -r ${PADDLEINFERENCE_DIRECTORY} ${THIRD_PARTY_PATH}/install/paddle_inference)
execute_process(COMMAND rm -rf ${THIRD_PARTY_PATH}/install/paddle_inference/paddle/lib/*.a) execute_process(COMMAND rm -rf ${THIRD_PARTY_PATH}/install/paddle_inference/paddle/lib/*.a)
endif() endif()
else() else()
# Use default Paddle Inference libs.
set(PADDLEINFERENCE_URL_BASE "https://bj.bcebos.com/fastdeploy/third_libs/") set(PADDLEINFERENCE_URL_BASE "https://bj.bcebos.com/fastdeploy/third_libs/")
set(PADDLEINFERENCE_VERSION "2.4-dev7")
if(WIN32) if(WIN32)
set(PADDLEINFERENCE_VERSION "2.4-dev6") # dev7 for win is not ready now!
if (WITH_GPU) if (WITH_GPU)
# set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-gpu-trt-${PADDLEINFERENCE_VERSION}.zip") set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-gpu-trt8.5.2.2-mkl-avx-0.0.0.575cafb44b.zip")
set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-gpu-trt-2.4-dev-20230410.zip") set(PADDLEINFERENCE_VERSION "0.0.0.575cafb44b")
else() else()
set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-${PADDLEINFERENCE_VERSION}.zip") set(PADDLEINFERENCE_FILE "paddle_inference-win-x64-mkl-avx-0.0.0.cbdba50933.zip")
set(PADDLEINFERENCE_VERSION "0.0.0.cbdba50933")
endif() endif()
elseif(APPLE) elseif(APPLE)
if(CURRENT_OSX_ARCH MATCHES "arm64") if(CURRENT_OSX_ARCH MATCHES "arm64")
message(FATAL_ERROR "Paddle Backend doesn't support Mac OSX with Arm64 now.") message(FATAL_ERROR "Paddle Backend doesn't support Mac OSX with Arm64 now.")
set(PADDLEINFERENCE_FILE "paddle_inference-osx-arm64-${PADDLEINFERENCE_VERSION}.tgz")
else() else()
set(PADDLEINFERENCE_FILE "paddle_inference-osx-x86_64-${PADDLEINFERENCE_VERSION}.tgz") # TODO(qiuyanjun): Should remove this old paddle inference lib
set(PADDLEINFERENCE_FILE "paddle_inference-osx-x86_64-2.4-dev3.tgz")
# TODO(qiuyanjun): Should use the commit id to tag the version
set(PADDLEINFERENCE_VERSION "2.4-dev3")
endif() endif()
else() else()
# Linux with x86 CPU/Arm CPU/GPU/IPU ...
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "aarch64") if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "aarch64")
message(FATAL_ERROR "Paddle Backend doesn't support linux aarch64 now.") message(FATAL_ERROR "Paddle Backend doesn't support linux aarch64 now.")
set(PADDLEINFERENCE_FILE "paddle_inference-linux-aarch64-${PADDLEINFERENCE_VERSION}.tgz")
else() else()
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-${PADDLEINFERENCE_VERSION}.tgz")
if(WITH_GPU) if(WITH_GPU)
#set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-gpu-trt-${PADDLEINFERENCE_VERSION}.tgz") set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-gpu-trt8.5.2.2-mkl-avx-0.0.0.660f781b77.tgz")
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-gpu-trt-2.4-dev-20230408.tgz") set(PADDLEINFERENCE_VERSION "0.0.0.660f781b77")
else()
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-mkl-avx-0.0.0.660f781b77.tgz")
set(PADDLEINFERENCE_VERSION "0.0.0.660f781b77")
endif() endif()
if (WITH_IPU) if (WITH_IPU)
set(PADDLEINFERENCE_VERSION "2.4-dev1") set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-ipu-2.4-dev1.tgz")
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-ipu-${PADDLEINFERENCE_VERSION}.tgz") # TODO(qiuyanjun): Should use the commit id to tag the version
set(PADDLEINFERENCE_VERSION "2.4-dev1")
endif() endif()
if(NEED_ABI0) if(NEED_ABI0)
@@ -116,12 +128,12 @@ else()
message(WARNING "While NEED_ABI0=ON, only support CPU now, will fallback to CPU.") message(WARNING "While NEED_ABI0=ON, only support CPU now, will fallback to CPU.")
endif() endif()
set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-2.4.0-abi0.tgz") set(PADDLEINFERENCE_FILE "paddle_inference-linux-x64-2.4.0-abi0.tgz")
set(PADDLEINFERENCE_VERSION "2.4.0-abi0")
endif() endif()
endif() endif()
endif() endif()
set(PADDLEINFERENCE_URL "${PADDLEINFERENCE_URL_BASE}${PADDLEINFERENCE_FILE}") set(PADDLEINFERENCE_URL "${PADDLEINFERENCE_URL_BASE}${PADDLEINFERENCE_FILE}")
ExternalProject_Add( ExternalProject_Add(
${PADDLEINFERENCE_PROJECT} ${PADDLEINFERENCE_PROJECT}
${EXTERNAL_PROJECT_LOG_ARGS} ${EXTERNAL_PROJECT_LOG_ARGS}
@@ -134,8 +146,10 @@ else()
INSTALL_COMMAND INSTALL_COMMAND
${CMAKE_COMMAND} -E copy_directory ${PADDLEINFERENCE_SOURCE_DIR} ${PADDLEINFERENCE_INSTALL_DIR} ${CMAKE_COMMAND} -E copy_directory ${PADDLEINFERENCE_SOURCE_DIR} ${PADDLEINFERENCE_INSTALL_DIR}
BUILD_BYPRODUCTS ${PADDLEINFERENCE_COMPILE_LIB}) BUILD_BYPRODUCTS ${PADDLEINFERENCE_COMPILE_LIB})
endif(PADDLEINFERENCE_DIRECTORY) endif(PADDLEINFERENCE_DIRECTORY)
# Path Paddle Inference ELF lib file
if(UNIX AND (NOT APPLE) AND (NOT ANDROID)) if(UNIX AND (NOT APPLE) AND (NOT ANDROID))
add_custom_target(patchelf_paddle_inference ALL COMMAND bash -c "PATCHELF_EXE=${PATCHELF_EXE} python ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" DEPENDS ${LIBRARY_NAME}) add_custom_target(patchelf_paddle_inference ALL COMMAND bash -c "PATCHELF_EXE=${PATCHELF_EXE} python ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" DEPENDS ${LIBRARY_NAME})
endif() endif()

View File

@@ -25,7 +25,7 @@
#include "paddle2onnx/converter.h" #include "paddle2onnx/converter.h"
#endif #endif
#include "fastdeploy/utils/unique_ptr.h" #include "fastdeploy/utils/unique_ptr.h"
#include "paddle_inference_api.h" // NOLINT #include "paddle/include/paddle_inference_api.h" // NOLINT
namespace fastdeploy { namespace fastdeploy {

View File

@@ -45,6 +45,10 @@ if os.name != "nt" and os.path.exists(trt_directory):
# Note(zhoushunjie): Fix the import order of paddle and fastdeploy library. # Note(zhoushunjie): Fix the import order of paddle and fastdeploy library.
# This solution will be removed it when the confilct of paddle and # This solution will be removed it when the confilct of paddle and
# fastdeploy is fixed. # fastdeploy is fixed.
try:
import paddle
except:
pass
from .c_lib_wrap import ( from .c_lib_wrap import (
ModelFormat, ModelFormat,

View File

@@ -86,6 +86,8 @@ setup_configs["OPENCV_DIRECTORY"] = os.getenv("OPENCV_DIRECTORY", "")
setup_configs["ORT_DIRECTORY"] = os.getenv("ORT_DIRECTORY", "") setup_configs["ORT_DIRECTORY"] = os.getenv("ORT_DIRECTORY", "")
setup_configs["PADDLEINFERENCE_DIRECTORY"] = os.getenv( setup_configs["PADDLEINFERENCE_DIRECTORY"] = os.getenv(
"PADDLEINFERENCE_DIRECTORY", "") "PADDLEINFERENCE_DIRECTORY", "")
setup_configs["PADDLEINFERENCE_VERSION"] = os.getenv(
"PADDLEINFERENCE_VERSION", "")
setup_configs["RKNN2_TARGET_SOC"] = os.getenv("RKNN2_TARGET_SOC", "") setup_configs["RKNN2_TARGET_SOC"] = os.getenv("RKNN2_TARGET_SOC", "")
if setup_configs["RKNN2_TARGET_SOC"] != "" or setup_configs[ if setup_configs["RKNN2_TARGET_SOC"] != "" or setup_configs[
@@ -376,6 +378,10 @@ if sys.version_info[0] == 3:
package_data = {PACKAGE_NAME: ["LICENSE", "ThirdPartyNotices.txt"]} package_data = {PACKAGE_NAME: ["LICENSE", "ThirdPartyNotices.txt"]}
extra_version_info = ""
if setup_configs["PADDLEINFERENCE_VERSION"] != "":
extra_version_info += ("." + setup_configs["PADDLEINFERENCE_VERSION"])
if sys.argv[1] == "install" or sys.argv[1] == "bdist_wheel": if sys.argv[1] == "install" or sys.argv[1] == "bdist_wheel":
shutil.copy( shutil.copy(
os.path.join(TOP_DIR, "ThirdPartyNotices.txt"), os.path.join(TOP_DIR, "ThirdPartyNotices.txt"),
@@ -395,7 +401,7 @@ if sys.argv[1] == "install" or sys.argv[1] == "bdist_wheel":
package_data[PACKAGE_NAME].extend(all_lib_data) package_data[PACKAGE_NAME].extend(all_lib_data)
setuptools.setup( setuptools.setup(
name=wheel_name, name=wheel_name,
version=VersionInfo.version, version=VersionInfo.version + extra_version_info,
ext_modules=ext_modules, ext_modules=ext_modules,
description="Deploy Kit Tool For Deeplearning models.", description="Deploy Kit Tool For Deeplearning models.",
packages=packages, packages=packages,
@@ -416,7 +422,7 @@ if sys.argv[1] == "install" or sys.argv[1] == "bdist_wheel":
else: else:
setuptools.setup( setuptools.setup(
name=wheel_name, name=wheel_name,
version=VersionInfo.version, version=VersionInfo.version + extra_version_info,
description="Deploy Kit Tool For Deeplearning models.", description="Deploy Kit Tool For Deeplearning models.",
ext_modules=ext_modules, ext_modules=ext_modules,
cmdclass=cmdclass, cmdclass=cmdclass,