mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-07 01:22:59 +08:00
Add openvino backend for windows (#196)
* Add InitTensorInfo for openvino * update openvino source dir * add openvino windows compile * Add windows ExternalProject_Add * Cp 3rdparty dir of openvino to installdir in windows Co-authored-by: Jason <jiangjiajun@baidu.com>
This commit is contained in:
@@ -62,6 +62,19 @@ ov::element::Type FDDataTypeToOV(const FDDataType& type) {
|
|||||||
return ov::element::f32;
|
return ov::element::f32;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void OpenVINOBackend::InitTensorInfo(
|
||||||
|
const std::vector<ov::Output<ov::Node>>& ov_outputs,
|
||||||
|
std::vector<TensorInfo>* tensor_infos) {
|
||||||
|
for (size_t i = 0; i < ov_outputs.size(); ++i) {
|
||||||
|
TensorInfo info;
|
||||||
|
auto partial_shape = PartialShapeToVec(ov_outputs[i].get_partial_shape());
|
||||||
|
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
||||||
|
info.name = ov_outputs[i].get_any_name();
|
||||||
|
info.dtype = OpenVINODataTypeToFD(ov_outputs[i].get_element_type());
|
||||||
|
tensor_infos->emplace_back(info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
bool OpenVINOBackend::InitFromPaddle(const std::string& model_file,
|
bool OpenVINOBackend::InitFromPaddle(const std::string& model_file,
|
||||||
const std::string& params_file,
|
const std::string& params_file,
|
||||||
const OpenVINOBackendOption& option) {
|
const OpenVINOBackendOption& option) {
|
||||||
@@ -80,23 +93,10 @@ bool OpenVINOBackend::InitFromPaddle(const std::string& model_file,
|
|||||||
|
|
||||||
// Get inputs/outputs information from loaded model
|
// Get inputs/outputs information from loaded model
|
||||||
const std::vector<ov::Output<ov::Node>> inputs = model->inputs();
|
const std::vector<ov::Output<ov::Node>> inputs = model->inputs();
|
||||||
for (size_t i = 0; i < inputs.size(); ++i) {
|
InitTensorInfo(inputs, &input_infos_);
|
||||||
TensorInfo info;
|
|
||||||
auto partial_shape = PartialShapeToVec(inputs[i].get_partial_shape());
|
|
||||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
|
||||||
info.name = inputs[i].get_any_name();
|
|
||||||
info.dtype = OpenVINODataTypeToFD(inputs[i].get_element_type());
|
|
||||||
input_infos_.emplace_back(info);
|
|
||||||
}
|
|
||||||
const std::vector<ov::Output<ov::Node>> outputs = model->outputs();
|
const std::vector<ov::Output<ov::Node>> outputs = model->outputs();
|
||||||
for (size_t i = 0; i < outputs.size(); ++i) {
|
InitTensorInfo(outputs, &output_infos_);
|
||||||
TensorInfo info;
|
|
||||||
auto partial_shape = PartialShapeToVec(outputs[i].get_partial_shape());
|
|
||||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
|
||||||
info.name = outputs[i].get_any_name();
|
|
||||||
info.dtype = OpenVINODataTypeToFD(outputs[i].get_element_type());
|
|
||||||
output_infos_.emplace_back(info);
|
|
||||||
}
|
|
||||||
|
|
||||||
compiled_model_ = core_.compile_model(model, "CPU", properties);
|
compiled_model_ = core_.compile_model(model, "CPU", properties);
|
||||||
request_ = compiled_model_.create_infer_request();
|
request_ = compiled_model_.create_infer_request();
|
||||||
@@ -135,23 +135,10 @@ bool OpenVINOBackend::InitFromOnnx(const std::string& model_file,
|
|||||||
|
|
||||||
// Get inputs/outputs information from loaded model
|
// Get inputs/outputs information from loaded model
|
||||||
const std::vector<ov::Output<ov::Node>> inputs = model->inputs();
|
const std::vector<ov::Output<ov::Node>> inputs = model->inputs();
|
||||||
for (size_t i = 0; i < inputs.size(); ++i) {
|
InitTensorInfo(inputs, &input_infos_);
|
||||||
TensorInfo info;
|
|
||||||
auto partial_shape = PartialShapeToVec(inputs[i].get_partial_shape());
|
|
||||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
|
||||||
info.name = inputs[i].get_any_name();
|
|
||||||
info.dtype = OpenVINODataTypeToFD(inputs[i].get_element_type());
|
|
||||||
input_infos_.emplace_back(info);
|
|
||||||
}
|
|
||||||
const std::vector<ov::Output<ov::Node>> outputs = model->outputs();
|
const std::vector<ov::Output<ov::Node>> outputs = model->outputs();
|
||||||
for (size_t i = 0; i < outputs.size(); ++i) {
|
InitTensorInfo(outputs, &output_infos_);
|
||||||
TensorInfo info;
|
|
||||||
auto partial_shape = PartialShapeToVec(outputs[i].get_partial_shape());
|
|
||||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
|
||||||
info.name = outputs[i].get_any_name();
|
|
||||||
info.dtype = OpenVINODataTypeToFD(outputs[i].get_element_type());
|
|
||||||
output_infos_.emplace_back(info);
|
|
||||||
}
|
|
||||||
|
|
||||||
compiled_model_ = core_.compile_model(model, "CPU", properties);
|
compiled_model_ = core_.compile_model(model, "CPU", properties);
|
||||||
request_ = compiled_model_.create_infer_request();
|
request_ = compiled_model_.create_infer_request();
|
||||||
@@ -196,4 +183,4 @@ bool OpenVINOBackend::Infer(std::vector<FDTensor>& inputs,
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace fastdeploy
|
} // namespace fastdeploy
|
||||||
|
@@ -34,24 +34,27 @@ class OpenVINOBackend : public BaseBackend {
|
|||||||
OpenVINOBackend() {}
|
OpenVINOBackend() {}
|
||||||
virtual ~OpenVINOBackend() = default;
|
virtual ~OpenVINOBackend() = default;
|
||||||
|
|
||||||
bool
|
bool InitFromPaddle(
|
||||||
InitFromPaddle(const std::string& model_file, const std::string& params_file,
|
const std::string& model_file, const std::string& params_file,
|
||||||
const OpenVINOBackendOption& option = OpenVINOBackendOption());
|
const OpenVINOBackendOption& option = OpenVINOBackendOption());
|
||||||
|
|
||||||
bool
|
bool InitFromOnnx(
|
||||||
InitFromOnnx(const std::string& model_file,
|
const std::string& model_file,
|
||||||
const OpenVINOBackendOption& option = OpenVINOBackendOption());
|
const OpenVINOBackendOption& option = OpenVINOBackendOption());
|
||||||
|
|
||||||
bool Infer(std::vector<FDTensor>& inputs, std::vector<FDTensor>* outputs);
|
bool Infer(std::vector<FDTensor>& inputs,
|
||||||
|
std::vector<FDTensor>* outputs) override;
|
||||||
|
|
||||||
int NumInputs() const;
|
int NumInputs() const override;
|
||||||
|
|
||||||
int NumOutputs() const;
|
int NumOutputs() const override;
|
||||||
|
|
||||||
TensorInfo GetInputInfo(int index);
|
TensorInfo GetInputInfo(int index) override;
|
||||||
TensorInfo GetOutputInfo(int index);
|
TensorInfo GetOutputInfo(int index) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
void InitTensorInfo(const std::vector<ov::Output<ov::Node>>& ov_outputs,
|
||||||
|
std::vector<TensorInfo>* tensor_infos);
|
||||||
ov::Core core_;
|
ov::Core core_;
|
||||||
ov::CompiledModel compiled_model_;
|
ov::CompiledModel compiled_model_;
|
||||||
ov::InferRequest request_;
|
ov::InferRequest request_;
|
||||||
|
@@ -26,13 +26,16 @@ const char sep = '/';
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
int main(int argc, char* argv[]) {
|
||||||
if (argc < 3) {
|
if (argc != 3 && argc != 4) {
|
||||||
std::cout << "Usage: infer_demo path/to/model run_option, "
|
std::cout << "Usage: infer_demo /path/to/model device [backend], "
|
||||||
"e.g ./infer_demo uie-base 0"
|
"e.g ./infer_demo uie-base 0 [0]"
|
||||||
<< std::endl;
|
<< std::endl;
|
||||||
std::cout << "The data type of run_option is int, 0: run with cpu; 1: run "
|
std::cout << "The data type of device is int, 0: run with cpu; 1: run "
|
||||||
"with gpu."
|
"with gpu."
|
||||||
<< std::endl;
|
<< std::endl;
|
||||||
|
std::cout << "The data type of backend is int, 0: use paddle backend; 1: "
|
||||||
|
"use onnxruntime backend; 2: use openvino backend. Default 0."
|
||||||
|
<< std::endl;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
auto option = fastdeploy::RuntimeOption();
|
auto option = fastdeploy::RuntimeOption();
|
||||||
@@ -41,6 +44,23 @@ int main(int argc, char* argv[]) {
|
|||||||
} else {
|
} else {
|
||||||
option.UseGpu();
|
option.UseGpu();
|
||||||
}
|
}
|
||||||
|
auto backend_type = 0;
|
||||||
|
if (argc == 4) {
|
||||||
|
backend_type = std::atoi(argv[3]);
|
||||||
|
}
|
||||||
|
switch (backend_type) {
|
||||||
|
case 0:
|
||||||
|
option.UsePaddleBackend();
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
option.UseOrtBackend();
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
option.UseOpenVINOBackend();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
std::string model_dir(argv[1]);
|
std::string model_dir(argv[1]);
|
||||||
std::string model_path = model_dir + sep + "inference.pdmodel";
|
std::string model_path = model_dir + sep + "inference.pdmodel";
|
||||||
std::string param_path = model_dir + sep + "inference.pdiparams";
|
std::string param_path = model_dir + sep + "inference.pdiparams";
|
||||||
|
1
external/faster_tokenizer.cmake
vendored
1
external/faster_tokenizer.cmake
vendored
@@ -23,7 +23,6 @@ set(FASTERTOKENIZER_INSTALL_DIR ${THIRD_PARTY_PATH}/install/faster_tokenizer)
|
|||||||
set(FASTERTOKENIZER_INC_DIR
|
set(FASTERTOKENIZER_INC_DIR
|
||||||
"${FASTERTOKENIZER_INSTALL_DIR}/include"
|
"${FASTERTOKENIZER_INSTALL_DIR}/include"
|
||||||
"${FASTERTOKENIZER_INSTALL_DIR}/third_party/include"
|
"${FASTERTOKENIZER_INSTALL_DIR}/third_party/include"
|
||||||
"${FASTERTOKENIZER_INSTALL_DIR}/include/faster_tokenizer" # TODO (zhoushunjie): Will remove it later.
|
|
||||||
CACHE PATH "faster_tokenizer include directory." FORCE)
|
CACHE PATH "faster_tokenizer include directory." FORCE)
|
||||||
set(FASTERTOKENIZER_LIB_DIR
|
set(FASTERTOKENIZER_LIB_DIR
|
||||||
"${FASTERTOKENIZER_INSTALL_DIR}/lib/"
|
"${FASTERTOKENIZER_INSTALL_DIR}/lib/"
|
||||||
|
80
external/openvino.cmake
vendored
80
external/openvino.cmake
vendored
@@ -16,22 +16,39 @@ include(ExternalProject)
|
|||||||
|
|
||||||
set(OPENVINO_PROJECT "extern_openvino")
|
set(OPENVINO_PROJECT "extern_openvino")
|
||||||
set(OPENVINO_PREFIX_DIR ${THIRD_PARTY_PATH}/openvino)
|
set(OPENVINO_PREFIX_DIR ${THIRD_PARTY_PATH}/openvino)
|
||||||
set(OPENVINO_SOURCE_DIR
|
|
||||||
${THIRD_PARTY_PATH}/openvino/src/${OPENVINO_PROJECT})
|
|
||||||
set(OPENVINO_INSTALL_DIR ${THIRD_PARTY_PATH}/install/openvino)
|
set(OPENVINO_INSTALL_DIR ${THIRD_PARTY_PATH}/install/openvino)
|
||||||
set(OPENVINO_INC_DIR
|
set(OPENVINO_INSTALL_INC_DIR
|
||||||
|
"${OPENVINO_INSTALL_DIR}/include"
|
||||||
|
CACHE PATH "openvino install include directory." FORCE)
|
||||||
|
|
||||||
|
if (WIN32)
|
||||||
|
set(OPENVINO_SOURCE_DIR
|
||||||
|
${THIRD_PARTY_PATH}/openvino/src/${OPENVINO_PROJECT}/openvino-win-x64-2022.1.0)
|
||||||
|
set(OPENVINO_INC_DIR
|
||||||
"${OPENVINO_INSTALL_DIR}/include"
|
"${OPENVINO_INSTALL_DIR}/include"
|
||||||
|
"${OPENVINO_INSTALL_DIR}/include/ie"
|
||||||
CACHE PATH "openvino include directory." FORCE)
|
CACHE PATH "openvino include directory." FORCE)
|
||||||
set(OPENVINO_LIB_DIR
|
set(OPENVINO_LIB_DIR
|
||||||
"${OPENVINO_INSTALL_DIR}/lib/"
|
"${OPENVINO_INSTALL_DIR}/lib/"
|
||||||
CACHE PATH "openvino lib directory." FORCE)
|
CACHE PATH "openvino lib directory." FORCE)
|
||||||
|
else()
|
||||||
|
set(OPENVINO_SOURCE_DIR
|
||||||
|
${THIRD_PARTY_PATH}/openvino/src/${OPENVINO_PROJECT})
|
||||||
|
set(OPENVINO_INC_DIR
|
||||||
|
"${OPENVINO_INSTALL_DIR}/include"
|
||||||
|
CACHE PATH "openvino include directory." FORCE)
|
||||||
|
set(OPENVINO_LIB_DIR
|
||||||
|
"${OPENVINO_INSTALL_DIR}/lib/"
|
||||||
|
CACHE PATH "openvino lib directory." FORCE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
|
||||||
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${OPENVINO_LIB_DIR}")
|
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${OPENVINO_LIB_DIR}")
|
||||||
|
|
||||||
set(OPENVINO_VERSION "2022.3.0")
|
set(OPENVINO_VERSION "2022.3.0")
|
||||||
set(OPENVINO_URL_PREFIX "https://bj.bcebos.com/fastdeploy/third_libs/")
|
set(OPENVINO_URL_PREFIX "https://bj.bcebos.com/fastdeploy/third_libs/")
|
||||||
|
|
||||||
if(WIN32)
|
if(WIN32)
|
||||||
message(FATAL_ERROR "FastDeploy cannot ENABLE_OPENVINO_BACKEND in windows now.")
|
|
||||||
set(OPENVINO_FILENAME "openvino-win-x64-${OPENVINO_VERSION}.zip")
|
set(OPENVINO_FILENAME "openvino-win-x64-${OPENVINO_VERSION}.zip")
|
||||||
if(NOT CMAKE_CL_64)
|
if(NOT CMAKE_CL_64)
|
||||||
message(FATAL_ERROR "FastDeploy cannot ENABLE_OPENVINO_BACKEND in win32 now.")
|
message(FATAL_ERROR "FastDeploy cannot ENABLE_OPENVINO_BACKEND in win32 now.")
|
||||||
@@ -69,23 +86,42 @@ else()
|
|||||||
CACHE FILEPATH "OPENVINO static library." FORCE)
|
CACHE FILEPATH "OPENVINO static library." FORCE)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
ExternalProject_Add(
|
if (WIN32)
|
||||||
${OPENVINO_PROJECT}
|
ExternalProject_Add(
|
||||||
${EXTERNAL_PROJECT_LOG_ARGS}
|
${OPENVINO_PROJECT}
|
||||||
URL ${OPENVINO_URL}
|
${EXTERNAL_PROJECT_LOG_ARGS}
|
||||||
PREFIX ${OPENVINO_PREFIX_DIR}
|
URL ${OPENVINO_URL}
|
||||||
DOWNLOAD_NO_PROGRESS 1
|
PREFIX ${OPENVINO_PREFIX_DIR}
|
||||||
CONFIGURE_COMMAND ""
|
DOWNLOAD_NO_PROGRESS 1
|
||||||
BUILD_COMMAND ""
|
CONFIGURE_COMMAND ""
|
||||||
UPDATE_COMMAND ""
|
BUILD_COMMAND ""
|
||||||
INSTALL_COMMAND
|
UPDATE_COMMAND ""
|
||||||
${CMAKE_COMMAND} -E remove_directory ${OPENVINO_INSTALL_DIR} &&
|
INSTALL_COMMAND
|
||||||
${CMAKE_COMMAND} -E make_directory ${OPENVINO_INSTALL_DIR} &&
|
${CMAKE_COMMAND} -E remove_directory ${OPENVINO_INSTALL_DIR} &&
|
||||||
${CMAKE_COMMAND} -E rename ${OPENVINO_SOURCE_DIR}/lib/intel64 ${OPENVINO_INSTALL_DIR}/lib &&
|
${CMAKE_COMMAND} -E make_directory ${OPENVINO_INSTALL_DIR} &&
|
||||||
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/include
|
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/lib/intel64/Release ${OPENVINO_INSTALL_DIR}/lib &&
|
||||||
${OPENVINO_INC_DIR}
|
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/bin/intel64/Release ${OPENVINO_INSTALL_DIR}/bin &&
|
||||||
BUILD_BYPRODUCTS ${OPENVINO_LIB})
|
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/include ${OPENVINO_INSTALL_INC_DIR} &&
|
||||||
|
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/3rdparty ${OPENVINO_INSTALL_DIR}/3rdparty
|
||||||
|
BUILD_BYPRODUCTS ${OPENVINO_LIB})
|
||||||
|
else()
|
||||||
|
ExternalProject_Add(
|
||||||
|
${OPENVINO_PROJECT}
|
||||||
|
${EXTERNAL_PROJECT_LOG_ARGS}
|
||||||
|
URL ${OPENVINO_URL}
|
||||||
|
PREFIX ${OPENVINO_PREFIX_DIR}
|
||||||
|
DOWNLOAD_NO_PROGRESS 1
|
||||||
|
CONFIGURE_COMMAND ""
|
||||||
|
BUILD_COMMAND ""
|
||||||
|
UPDATE_COMMAND ""
|
||||||
|
INSTALL_COMMAND
|
||||||
|
${CMAKE_COMMAND} -E remove_directory ${OPENVINO_INSTALL_DIR} &&
|
||||||
|
${CMAKE_COMMAND} -E make_directory ${OPENVINO_INSTALL_DIR} &&
|
||||||
|
${CMAKE_COMMAND} -E rename ${OPENVINO_SOURCE_DIR}/lib/intel64 ${OPENVINO_INSTALL_DIR}/lib &&
|
||||||
|
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/include
|
||||||
|
${OPENVINO_INSTALL_INC_DIR}
|
||||||
|
BUILD_BYPRODUCTS ${OPENVINO_LIB})
|
||||||
|
endif()
|
||||||
add_library(external_openvino STATIC IMPORTED GLOBAL)
|
add_library(external_openvino STATIC IMPORTED GLOBAL)
|
||||||
set_property(TARGET external_openvino PROPERTY IMPORTED_LOCATION ${OPENVINO_LIB})
|
set_property(TARGET external_openvino PROPERTY IMPORTED_LOCATION ${OPENVINO_LIB})
|
||||||
add_dependencies(external_openvino ${OPENVINO_PROJECT})
|
add_dependencies(external_openvino ${OPENVINO_PROJECT})
|
||||||
|
Reference in New Issue
Block a user