[Server] Support encrypt & auth for FD Server (#2018)

* [Server] Support GPU encrypt & auth for FD Triton Server

* fix dockerfile proxy env error

* update build scrpits

* remove some logs

---------

Co-authored-by: root <root@yq02-sys-rpm1206692e6.yq02.baidu.com>
Co-authored-by: qiuyanjun <qiuyanjun@baidu.com>
This commit is contained in:
DefTruth
2023-06-13 20:44:25 +08:00
committed by GitHub
parent 73f42e36be
commit ff631b7838
11 changed files with 228 additions and 69 deletions

5
.gitignore vendored
View File

@@ -47,4 +47,7 @@ python/fastdeploy/code_version.py
*.pdmodel *.pdmodel
*.pdiparams *.pdiparams
*.pdiparams.info *.pdiparams.info
log.txt log.txt
serving/build
serving/build.encrypt
serving/build.encrypt.auth

View File

@@ -392,7 +392,11 @@ if(ENABLE_TRT_BACKEND)
if(EXISTS "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib") if(EXISTS "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib")
file(REMOVE_RECURSE "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib") file(REMOVE_RECURSE "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib")
endif() endif()
find_package(Python COMPONENTS Interpreter Development REQUIRED)
if (NOT Python_EXECUTABLE)
find_package(Python COMPONENTS Interpreter Development REQUIRED)
endif()
message(STATUS "Copying ${TRT_DIRECTORY}/lib to ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib ...") message(STATUS "Copying ${TRT_DIRECTORY}/lib to ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib ...")
execute_process(COMMAND ${Python_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/copy_directory.py ${TRT_DIRECTORY}/lib ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib) execute_process(COMMAND ${Python_EXECUTABLE} ${PROJECT_SOURCE_DIR}/scripts/copy_directory.py ${TRT_DIRECTORY}/lib ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib)
file(GLOB_RECURSE TRT_STATIC_LIBS ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib/*.a) file(GLOB_RECURSE TRT_STATIC_LIBS ${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt/lib/*.a)
@@ -515,12 +519,9 @@ if(MSVC)
endif() endif()
target_link_libraries(${LIBRARY_NAME} ${DEPEND_LIBS}) target_link_libraries(${LIBRARY_NAME} ${DEPEND_LIBS})
# Note(qiuyanjun): Currently, we need to manually link the whole
# leveldb static lib into fastdeploy lib if PADDLEINFERENCE_WITH_ENCRYPT_AUTH
# is 'ON'. Will remove this policy while the bug of paddle inference lib with
# auth & encrypt fixed.
if(ENABLE_PADDLE_BACKEND) if(ENABLE_PADDLE_BACKEND)
enable_paddle_encrypt_auth_link_policy(${LIBRARY_NAME}) set_paddle_encrypt_auth_link_policy(${LIBRARY_NAME})
endif() endif()
if(ANDROID) if(ANDROID)

View File

@@ -63,7 +63,8 @@ set(WITH_ANDROID_JAVA @WITH_ANDROID_JAVA@)
set(WITH_ANDROID_TENSOR_FUNCS @WITH_ANDROID_TENSOR_FUNCS@) set(WITH_ANDROID_TENSOR_FUNCS @WITH_ANDROID_TENSOR_FUNCS@)
# encryption and auth # encryption and auth
set(PADDLEINFERENCE_WITH_ENCRYPT_AUTH @PADDLEINFERENCE_WITH_ENCRYPT_AUTH@) set(PADDLEINFERENCE_WITH_ENCRYPT @PADDLEINFERENCE_WITH_ENCRYPT@)
set(PADDLEINFERENCE_WITH_AUTH @PADDLEINFERENCE_WITH_AUTH@)
set(FASTDEPLOY_LIBS "") set(FASTDEPLOY_LIBS "")
set(FASTDEPLOY_INCS "") set(FASTDEPLOY_INCS "")
@@ -152,15 +153,21 @@ if(ENABLE_PADDLE_BACKEND)
else() else()
set(DNNL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/mkldnn/lib/libmkldnn.so.0") set(DNNL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/mkldnn/lib/libmkldnn.so.0")
set(IOMP_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/mklml/lib/libiomp5.so") set(IOMP_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/mklml/lib/libiomp5.so")
if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH) set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so")
set(FDMODEL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so") set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0")
set(FDMODEL_AUTH_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_auth.so") set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so")
set(FDMODEL_MODEL_LIB "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0") if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_MODEL_LIB}))
set(LEVELDB_LIB_DIR "${CMAKE_CURRENT_LIST_DIR}/third_libs/install/paddle_inference/third_party/install/leveldb/lib/") set(PADDLEINFERENCE_WITH_ENCRYPT ON CACHE BOOL "" FORCE)
list(APPEND FASTDEPLOY_LIBS ${FDMODEL_LIB} ${FDMODEL_AUTH_LIB} ${FDMODEL_MODEL_LIB}) list(APPEND FASTDEPLOY_LIBS ${FDMODEL_LIB} ${FDMODEL_MODEL_LIB})
# link_directories(LEVELDB_LIB_DIR) endif()
# list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto -lleveldb) if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_AUTH_LIB}))
list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto) set(PADDLEINFERENCE_WITH_AUTH ON CACHE BOOL "" FORCE)
list(APPEND FASTDEPLOY_LIBS ${FDMODEL_AUTH_LIB})
endif()
if(PADDLEINFERENCE_WITH_ENCRYPT OR PADDLEINFERENCE_WITH_AUTH)
if(WITH_KUNLUNXIN)
list(APPEND FASTDEPLOY_LIBS -lssl -lcrypto)
endif()
endif() endif()
endif() endif()
list(APPEND FASTDEPLOY_LIBS ${PADDLE_LIB}) list(APPEND FASTDEPLOY_LIBS ${PADDLE_LIB})

View File

@@ -22,7 +22,6 @@ endif()
# Custom options for Paddle Inference backend # Custom options for Paddle Inference backend
option(PADDLEINFERENCE_DIRECTORY "Directory of custom Paddle Inference library" OFF) option(PADDLEINFERENCE_DIRECTORY "Directory of custom Paddle Inference library" OFF)
option(PADDLEINFERENCE_WITH_ENCRYPT_AUTH "Whether the Paddle Inference is built with FD encryption and auth" OFF)
set(PADDLEINFERENCE_PROJECT "extern_paddle_inference") set(PADDLEINFERENCE_PROJECT "extern_paddle_inference")
set(PADDLEINFERENCE_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle_inference) set(PADDLEINFERENCE_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle_inference)
@@ -43,44 +42,15 @@ if(PADDLEINFERENCE_DIRECTORY)
endif() endif()
include_directories(${PADDLEINFERENCE_INC_DIR}) include_directories(${PADDLEINFERENCE_INC_DIR})
if(WIN32)
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/paddle_inference.lib"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/mkldnn.lib")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5md.lib")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/paddle2onnx.lib")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/onnxruntime.lib")
elseif(APPLE)
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.dylib"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.dylib")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.dylib")
else()
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.so")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.so")
if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH)
set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so")
set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so")
set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0")
set(LEVELDB_LIB_DIR "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb/lib")
endif()
endif(WIN32)
if(PADDLEINFERENCE_DIRECTORY) if(PADDLEINFERENCE_DIRECTORY)
# Use custom Paddle Inference libs. # Use custom Paddle Inference libs.
if(EXISTS "${THIRD_PARTY_PATH}/install/paddle_inference") if(EXISTS "${THIRD_PARTY_PATH}/install/paddle_inference")
file(REMOVE_RECURSE "${THIRD_PARTY_PATH}/install/paddle_inference") file(REMOVE_RECURSE "${THIRD_PARTY_PATH}/install/paddle_inference")
endif() endif()
find_package(Python COMPONENTS Interpreter Development REQUIRED) if(NOT Python_EXECUTABLE)
find_package(Python COMPONENTS Interpreter Development REQUIRED)
endif()
message(STATUS "Copying ${PADDLEINFERENCE_DIRECTORY} to ${THIRD_PARTY_PATH}/install/paddle_inference ...") message(STATUS "Copying ${PADDLEINFERENCE_DIRECTORY} to ${THIRD_PARTY_PATH}/install/paddle_inference ...")
if(WIN32) if(WIN32)
execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install) execute_process(COMMAND mkdir -p ${THIRD_PARTY_PATH}/install)
@@ -166,6 +136,51 @@ else()
endif(PADDLEINFERENCE_DIRECTORY) endif(PADDLEINFERENCE_DIRECTORY)
# check libs
set(PADDLEINFERENCE_WITH_AUTH OFF)
set(PADDLEINFERENCE_WITH_ENCRYPT OFF)
if(WIN32)
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/paddle_inference.lib"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/mkldnn.lib")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5md.lib")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/paddle2onnx.lib")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/onnxruntime.lib")
elseif(APPLE)
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.dylib"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.dylib")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.dylib")
else()
set(PADDLEINFERENCE_COMPILE_LIB
"${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so"
CACHE FILEPATH "paddle_inference compile library." FORCE)
set(DNNL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mkldnn/lib/libdnnl.so.2")
set(OMP_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/mklml/lib/libiomp5.so")
set(P2O_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/paddle2onnx/lib/libpaddle2onnx.so")
set(ORT_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/onnxruntime/lib/libonnxruntime.so")
# Check whether the encrypt and auth tools exists. only support PADDLEINFERENCE_DIRECTORY now.
if(PADDLEINFERENCE_DIRECTORY)
set(FDMODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_wenxin.so")
set(FDMODEL_MODEL_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_model.so.2.0.0")
set(FDMODEL_AUTH_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/fdmodel/lib/libfastdeploy_auth.so")
set(FDMODEL_LEVELDB_LIB_DIR "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb")
set(FDMODEL_LEVELDB_LIB_LIB "${PADDLEINFERENCE_INSTALL_DIR}/third_party/install/leveldb/lib/libleveldb.a")
if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_MODEL_LIB}))
set(PADDLEINFERENCE_WITH_ENCRYPT ON CACHE BOOL "" FORCE)
message(STATUS "Detected ${FDMODEL_LIB} and ${FDMODEL_MODEL_LIB} exists, fource PADDLEINFERENCE_WITH_ENCRYPT=${PADDLEINFERENCE_WITH_ENCRYPT}")
endif()
if((EXISTS ${FDMODEL_LIB}) AND (EXISTS ${FDMODEL_AUTH_LIB}))
set(PADDLEINFERENCE_WITH_AUTH ON CACHE BOOL "" FORCE)
message(STATUS "Detected ${FDMODEL_LIB} and ${FDMODEL_AUTH_LIB} exists, fource PADDLEINFERENCE_WITH_AUTH=${PADDLEINFERENCE_WITH_AUTH}")
endif()
endif()
endif(WIN32)
# Path Paddle Inference ELF lib file # Path Paddle Inference ELF lib file
if(UNIX AND (NOT APPLE) AND (NOT ANDROID)) if(UNIX AND (NOT APPLE) AND (NOT ANDROID))
add_custom_target(patchelf_paddle_inference ALL COMMAND bash -c "PATCHELF_EXE=${PATCHELF_EXE} python ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" DEPENDS ${LIBRARY_NAME}) add_custom_target(patchelf_paddle_inference ALL COMMAND bash -c "PATCHELF_EXE=${PATCHELF_EXE} python ${PROJECT_SOURCE_DIR}/scripts/patch_paddle_inference.py ${PADDLEINFERENCE_INSTALL_DIR}/paddle/lib/libpaddle_inference.so" DEPENDS ${LIBRARY_NAME})
@@ -198,28 +213,37 @@ set_property(TARGET external_omp PROPERTY IMPORTED_LOCATION
add_dependencies(external_omp ${PADDLEINFERENCE_PROJECT}) add_dependencies(external_omp ${PADDLEINFERENCE_PROJECT})
set(ENCRYPT_AUTH_LIBS ) set(ENCRYPT_AUTH_LIBS )
if(PADDLEINFERENCE_WITH_ENCRYPT_AUTH) if(PADDLEINFERENCE_WITH_ENCRYPT)
add_library(external_fdmodel STATIC IMPORTED GLOBAL) add_library(external_fdmodel STATIC IMPORTED GLOBAL)
set_property(TARGET external_fdmodel PROPERTY IMPORTED_LOCATION set_property(TARGET external_fdmodel PROPERTY IMPORTED_LOCATION
${FDMODEL_LIB}) ${FDMODEL_LIB})
add_library(external_fdmodel_auth STATIC IMPORTED GLOBAL)
set_property(TARGET external_fdmodel_auth PROPERTY IMPORTED_LOCATION
${FDMODEL_AUTH_LIB})
add_library(external_fdmodel_model STATIC IMPORTED GLOBAL) add_library(external_fdmodel_model STATIC IMPORTED GLOBAL)
set_property(TARGET external_fdmodel_model PROPERTY IMPORTED_LOCATION set_property(TARGET external_fdmodel_model PROPERTY IMPORTED_LOCATION
${FDMODEL_MODEL_LIB}) ${FDMODEL_MODEL_LIB})
add_dependencies(external_fdmodel ${PADDLEINFERENCE_PROJECT}) list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel external_fdmodel_model)
add_dependencies(external_fdmodel_auth ${PADDLEINFERENCE_PROJECT}) endif()
add_dependencies(external_fdmodel_model ${PADDLEINFERENCE_PROJECT})
list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel external_fdmodel_auth external_fdmodel_model) if(PADDLEINFERENCE_WITH_AUTH)
add_library(external_fdmodel_auth STATIC IMPORTED GLOBAL)
set_property(TARGET external_fdmodel_auth PROPERTY IMPORTED_LOCATION
${FDMODEL_AUTH_LIB})
list(APPEND ENCRYPT_AUTH_LIBS external_fdmodel_auth)
endif() endif()
function(enable_paddle_encrypt_auth_link_policy LIBRARY_NAME) function(set_paddle_encrypt_auth_link_policy LIBRARY_NAME)
if(ENABLE_PADDLE_BACKEND AND PADDLEINFERENCE_WITH_ENCRYPT_AUTH) if(ENABLE_PADDLE_BACKEND AND (PADDLEINFERENCE_WITH_ENCRYPT OR PADDLEINFERENCE_WITH_AUTH))
link_directories(${LEVELDB_LIB_DIR}) target_link_libraries(${LIBRARY_NAME} ${ENCRYPT_AUTH_LIBS})
target_link_libraries(${LIBRARY_NAME} ${ENCRYPT_AUTH_LIBS} -lssl -lcrypto) # Note(qiuyanjun): Currently, for XPU, we need to manually link the whole
target_link_libraries(${LIBRARY_NAME} ${LEVELDB_LIB_DIR}/libleveldb.a) # leveldb static lib into fastdeploy lib if PADDLEINFERENCE_WITH_ENCRYPT
set_target_properties(${LIBRARY_NAME} PROPERTIES LINK_FLAGS # or PADDLEINFERENCE_WITH_AUTH is 'ON'. Will remove this policy while
"-Wl,--whole-archive ${LEVELDB_LIB_DIR}/libleveldb.a -Wl,-no-whole-archive") # the bug of paddle inference lib with auth & encrypt fixed.
if((EXISTS ${FDMODEL_LEVELDB_LIB_LIB}) AND WITH_KUNLUNXIN)
target_link_libraries(${LIBRARY_NAME} -lssl -lcrypto)
link_directories(${FDMODEL_LEVELDB_LIB_DIR})
target_link_libraries(${LIBRARY_NAME} ${FDMODEL_LEVELDB_LIB_LIB})
set_target_properties(${LIBRARY_NAME} PROPERTIES LINK_FLAGS
"-Wl,--whole-archive ${FDMODEL_LEVELDB_LIB_LIB} -Wl,-no-whole-archive")
endif()
endif() endif()
endfunction() endfunction()

View File

@@ -62,6 +62,8 @@ function(fastdeploy_summary)
endif() endif()
if(ENABLE_PADDLE_BACKEND) if(ENABLE_PADDLE_BACKEND)
message(STATUS " Paddle Inference version : ${PADDLEINFERENCE_VERSION}") message(STATUS " Paddle Inference version : ${PADDLEINFERENCE_VERSION}")
message(STATUS " PADDLE_WITH_ENCRYPT : ${PADDLEINFERENCE_WITH_ENCRYPT}")
message(STATUS " PADDLE_WITH_AUTH : ${PADDLEINFERENCE_WITH_AUTH}")
endif() endif()
if(ENABLE_POROS_BACKEND) if(ENABLE_POROS_BACKEND)
message(STATUS " Poros version : ${POROS_VERSION}") message(STATUS " Poros version : ${POROS_VERSION}")

View File

@@ -0,0 +1,106 @@
#!/usr/bin/env bash
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
set +x
# -------------------------------------------------------------------------------
# readonly global variables
# -------------------------------------------------------------------------------
readonly ROOT_PATH=$(pwd)
readonly BUILD_ROOT=build/Linux
readonly BUILD_DIR="${BUILD_ROOT}/x86_64_gpu"
# -------------------------------------------------------------------------------
# tasks
# -------------------------------------------------------------------------------
__make_build_dir() {
if [ ! -d "${BUILD_DIR}" ]; then
echo "-- [INFO] BUILD_DIR: ${BUILD_DIR} not exists, setup manually ..."
if [ ! -d "${BUILD_ROOT}" ]; then
mkdir -p "${BUILD_ROOT}" && echo "-- [INFO] Created ${BUILD_ROOT} !"
fi
mkdir -p "${BUILD_DIR}" && echo "-- [INFO] Created ${BUILD_DIR} !"
else
echo "-- [INFO] Found BUILD_DIR: ${BUILD_DIR}"
fi
}
__check_cxx_envs() {
if [ $LDFLAGS ]; then
echo "-- [INFO] Found LDFLAGS: ${LDFLAGS}, \c"
echo "unset it before crossing compiling ${BUILD_DIR}"
unset LDFLAGS
fi
if [ $CPPFLAGS ]; then
echo "-- [INFO] Found CPPFLAGS: ${CPPFLAGS}, \c"
echo "unset it before crossing compiling ${BUILD_DIR}"
unset CPPFLAGS
fi
if [ $CPLUS_INCLUDE_PATH ]; then
echo "-- [INFO] Found CPLUS_INCLUDE_PATH: ${CPLUS_INCLUDE_PATH}, \c"
echo "unset it before crossing compiling ${BUILD_DIR}"
unset CPLUS_INCLUDE_PATH
fi
if [ $C_INCLUDE_PATH ]; then
echo "-- [INFO] Found C_INCLUDE_PATH: ${C_INCLUDE_PATH}, \c"
echo "unset it before crossing compiling ${BUILD_DIR}"
unset C_INCLUDE_PATH
fi
}
__build_fastdeploy_linux_x86_64_gpu_shared_custom_paddle() {
local FASDEPLOY_INSTALL_DIR="${ROOT_PATH}/${BUILD_DIR}/fastdeploy_install"
cd "${BUILD_DIR}" && echo "-- [INFO] Working Dir: ${PWD}"
cmake -DCMAKE_BUILD_TYPE=Release \
-DWITH_GPU=ON \
-DTRT_DIRECTORY=${TRT_DIRECTORY} \
-DCUDA_DIRECTORY=${CUDA_DIRECTORY} \
-DENABLE_ORT_BACKEND=ON \
-DENABLE_TRT_BACKEND=ON \
-DENABLE_PADDLE_BACKEND=ON \
-DPADDLEINFERENCE_DIRECTORY=${PADDLEINFERENCE_DIRECTORY} \
-DPADDLEINFERENCE_VERSION=${PADDLEINFERENCE_VERSION} \
-DENABLE_OPENVINO_BACKEND=ON \
-DENABLE_PADDLE2ONNX=ON \
-DENABLE_VISION=OFF \
-DENABLE_BENCHMARK=OFF \
-DBUILD_EXAMPLES=OFF \
-DPython_EXECUTABLE=/usr/bin/python3 \
-DCMAKE_INSTALL_PREFIX=${FASDEPLOY_INSTALL_DIR} \
-DLIBRARY_NAME=fastdeploy_runtime \
-Wno-dev ../../.. && make -j8 && make install
echo "-- [INFO][built][x86_64_gpu}][${FASDEPLOY_INSTALL_DIR}]"
echo "-- [INFO][${PADDLEINFERENCE_DIRECTORY}][${PADDLEINFERENCE_VERSION}]"
}
main() {
__make_build_dir
__check_cxx_envs
__build_fastdeploy_linux_x86_64_gpu_shared_custom_paddle
exit 0
}
main
# Usage:
# export PADDLEINFERENCE_DIRECTORY=xxx
# export PADDLEINFERENCE_VERSION=xxx
# export CUDA_DIRECTOY=/usr/local/cuda
# export TRT_DIRECTORY=/home/qiuyanjun/TensorRT-8.5.2.2
# ./scripts/linux/build_linux_x86_64_cpp_gpu_encrypt_runtime.sh

View File

@@ -80,4 +80,9 @@ COPY build/fastdeploy_install/* /opt/fastdeploy/
# Set environment variable # Set environment variable
ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib/:/opt/fastdeploy/third_libs/install/tensorrt/lib/:/opt/fastdeploy/third_libs/install/opencv/lib64/:$LD_LIBRARY_PATH" ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib/:/opt/fastdeploy/third_libs/install/tensorrt/lib/:/opt/fastdeploy/third_libs/install/opencv/lib64/:$LD_LIBRARY_PATH"
ENV PATH="/opt/tritonserver/bin:$PATH" ENV PATH="/opt/tritonserver/bin:$PATH"
ENV http_proxy=
ENV https_proxy=
ENV no_proxy=
ENV TZ=Asia/Shanghai

View File

@@ -58,5 +58,5 @@ RUN python3 -m pip install https://paddle-wheel.bj.bcebos.com/2.4.2/linux/linux-
COPY serving/build/libtriton_fastdeploy.so /opt/tritonserver/backends/fastdeploy/ COPY serving/build/libtriton_fastdeploy.so /opt/tritonserver/backends/fastdeploy/
COPY build/fastdeploy_install /opt/fastdeploy/ COPY build/fastdeploy_install /opt/fastdeploy/
ENV LD_LIBRARY_PATH="/opt/TensorRT-8.5.2.2/lib/:/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH" ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH"
ENV PATH="/opt/tritonserver/bin:$PATH" ENV PATH="/opt/tritonserver/bin:$PATH"

View File

@@ -55,5 +55,5 @@ RUN python3 -m pip install paddlepaddle-gpu==2.4.1.post112 -f https://www.paddle
COPY serving/build/libtriton_fastdeploy.so /opt/tritonserver/backends/fastdeploy/ COPY serving/build/libtriton_fastdeploy.so /opt/tritonserver/backends/fastdeploy/
COPY build/fastdeploy_install /opt/fastdeploy/ COPY build/fastdeploy_install /opt/fastdeploy/
ENV LD_LIBRARY_PATH="/opt/TensorRT-8.4.1.5/lib/:/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH" ENV LD_LIBRARY_PATH="/opt/fastdeploy/lib:/opt/fastdeploy/third_libs/install/onnxruntime/lib:/opt/fastdeploy/third_libs/install/paddle2onnx/lib:/opt/fastdeploy/third_libs/install/tensorrt/lib:/opt/fastdeploy/third_libs/install/paddle_inference/paddle/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mkldnn/lib:/opt/fastdeploy/third_libs/install/paddle_inference/third_party/install/mklml/lib:/opt/fastdeploy/third_libs/install/openvino/runtime/lib:$LD_LIBRARY_PATH"
ENV PATH="/opt/tritonserver/bin:$PATH" ENV PATH="/opt/tritonserver/bin:$PATH"

View File

@@ -41,8 +41,11 @@ docker run -i --rm --name build_fd_xpu_auth_dev \
python setup.py build; python setup.py build;
python setup.py bdist_wheel; python setup.py bdist_wheel;
cd /workspace/fastdeploy; cd /workspace/fastdeploy;
wget ${PADDLEINFERENCE_URL} && tar -zxvf ${PADDLEINFERENCE_URL##*/}
mv ${PADDLEINFERENCE_URL##*/} paddle_inference
PADDLEINFERENCE_DIRECTORY=${PWD}/paddle_inference
rm -rf build; mkdir build; cd build; rm -rf build; mkdir build; cd build;
cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${PWD}/fastdeploy_install -DWITH_KUNLUNXIN=ON -DENABLE_PADDLE_BACKEND=ON -DPADDLEINFERENCE_URL=${PADDLEINFERENCE_URL} -DPADDLEINFERENCE_WITH_ENCRYPT_AUTH=ON -DENABLE_VISION=ON -DENABLE_BENCHMARK=ON -DLIBRARY_NAME=fastdeploy_runtime; cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=${PWD}/fastdeploy_install -DWITH_KUNLUNXIN=ON -DENABLE_PADDLE_BACKEND=ON -DPADDLEINFERENCE_DIRECTORY=${PADDLEINFERENCE_DIRECTORY} -DENABLE_BENCHMARK=ON -DLIBRARY_NAME=fastdeploy_runtime;
make -j`nproc`; make -j`nproc`;
make install; make install;
# fix the link error of libbkcl.so # fix the link error of libbkcl.so

View File

@@ -0,0 +1,8 @@
# This script offer a demo to build triton fastdeploy backend only.
cd serving
rm -rf build && mkdir build
cd build
cmake .. -DFASTDEPLOY_DIR=${FD_GPU_SDK} -DTRITON_COMMON_REPO_TAG=r21.10 -DTRITON_CORE_REPO_TAG=r21.10 -DTRITON_BACKEND_REPO_TAG=r21.10;
make -j`nproc`