mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-07 09:31:35 +08:00
Add openvino backend for windows (#196)
* Add InitTensorInfo for openvino * update openvino source dir * add openvino windows compile * Add windows ExternalProject_Add * Cp 3rdparty dir of openvino to installdir in windows Co-authored-by: Jason <jiangjiajun@baidu.com>
This commit is contained in:
@@ -62,6 +62,19 @@ ov::element::Type FDDataTypeToOV(const FDDataType& type) {
|
||||
return ov::element::f32;
|
||||
}
|
||||
|
||||
void OpenVINOBackend::InitTensorInfo(
|
||||
const std::vector<ov::Output<ov::Node>>& ov_outputs,
|
||||
std::vector<TensorInfo>* tensor_infos) {
|
||||
for (size_t i = 0; i < ov_outputs.size(); ++i) {
|
||||
TensorInfo info;
|
||||
auto partial_shape = PartialShapeToVec(ov_outputs[i].get_partial_shape());
|
||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
||||
info.name = ov_outputs[i].get_any_name();
|
||||
info.dtype = OpenVINODataTypeToFD(ov_outputs[i].get_element_type());
|
||||
tensor_infos->emplace_back(info);
|
||||
}
|
||||
}
|
||||
|
||||
bool OpenVINOBackend::InitFromPaddle(const std::string& model_file,
|
||||
const std::string& params_file,
|
||||
const OpenVINOBackendOption& option) {
|
||||
@@ -80,23 +93,10 @@ bool OpenVINOBackend::InitFromPaddle(const std::string& model_file,
|
||||
|
||||
// Get inputs/outputs information from loaded model
|
||||
const std::vector<ov::Output<ov::Node>> inputs = model->inputs();
|
||||
for (size_t i = 0; i < inputs.size(); ++i) {
|
||||
TensorInfo info;
|
||||
auto partial_shape = PartialShapeToVec(inputs[i].get_partial_shape());
|
||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
||||
info.name = inputs[i].get_any_name();
|
||||
info.dtype = OpenVINODataTypeToFD(inputs[i].get_element_type());
|
||||
input_infos_.emplace_back(info);
|
||||
}
|
||||
InitTensorInfo(inputs, &input_infos_);
|
||||
|
||||
const std::vector<ov::Output<ov::Node>> outputs = model->outputs();
|
||||
for (size_t i = 0; i < outputs.size(); ++i) {
|
||||
TensorInfo info;
|
||||
auto partial_shape = PartialShapeToVec(outputs[i].get_partial_shape());
|
||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
||||
info.name = outputs[i].get_any_name();
|
||||
info.dtype = OpenVINODataTypeToFD(outputs[i].get_element_type());
|
||||
output_infos_.emplace_back(info);
|
||||
}
|
||||
InitTensorInfo(outputs, &output_infos_);
|
||||
|
||||
compiled_model_ = core_.compile_model(model, "CPU", properties);
|
||||
request_ = compiled_model_.create_infer_request();
|
||||
@@ -135,23 +135,10 @@ bool OpenVINOBackend::InitFromOnnx(const std::string& model_file,
|
||||
|
||||
// Get inputs/outputs information from loaded model
|
||||
const std::vector<ov::Output<ov::Node>> inputs = model->inputs();
|
||||
for (size_t i = 0; i < inputs.size(); ++i) {
|
||||
TensorInfo info;
|
||||
auto partial_shape = PartialShapeToVec(inputs[i].get_partial_shape());
|
||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
||||
info.name = inputs[i].get_any_name();
|
||||
info.dtype = OpenVINODataTypeToFD(inputs[i].get_element_type());
|
||||
input_infos_.emplace_back(info);
|
||||
}
|
||||
InitTensorInfo(inputs, &input_infos_);
|
||||
|
||||
const std::vector<ov::Output<ov::Node>> outputs = model->outputs();
|
||||
for (size_t i = 0; i < outputs.size(); ++i) {
|
||||
TensorInfo info;
|
||||
auto partial_shape = PartialShapeToVec(outputs[i].get_partial_shape());
|
||||
info.shape.assign(partial_shape.begin(), partial_shape.end());
|
||||
info.name = outputs[i].get_any_name();
|
||||
info.dtype = OpenVINODataTypeToFD(outputs[i].get_element_type());
|
||||
output_infos_.emplace_back(info);
|
||||
}
|
||||
InitTensorInfo(outputs, &output_infos_);
|
||||
|
||||
compiled_model_ = core_.compile_model(model, "CPU", properties);
|
||||
request_ = compiled_model_.create_infer_request();
|
||||
|
@@ -34,24 +34,27 @@ class OpenVINOBackend : public BaseBackend {
|
||||
OpenVINOBackend() {}
|
||||
virtual ~OpenVINOBackend() = default;
|
||||
|
||||
bool
|
||||
InitFromPaddle(const std::string& model_file, const std::string& params_file,
|
||||
bool InitFromPaddle(
|
||||
const std::string& model_file, const std::string& params_file,
|
||||
const OpenVINOBackendOption& option = OpenVINOBackendOption());
|
||||
|
||||
bool
|
||||
InitFromOnnx(const std::string& model_file,
|
||||
bool InitFromOnnx(
|
||||
const std::string& model_file,
|
||||
const OpenVINOBackendOption& option = OpenVINOBackendOption());
|
||||
|
||||
bool Infer(std::vector<FDTensor>& inputs, std::vector<FDTensor>* outputs);
|
||||
bool Infer(std::vector<FDTensor>& inputs,
|
||||
std::vector<FDTensor>* outputs) override;
|
||||
|
||||
int NumInputs() const;
|
||||
int NumInputs() const override;
|
||||
|
||||
int NumOutputs() const;
|
||||
int NumOutputs() const override;
|
||||
|
||||
TensorInfo GetInputInfo(int index);
|
||||
TensorInfo GetOutputInfo(int index);
|
||||
TensorInfo GetInputInfo(int index) override;
|
||||
TensorInfo GetOutputInfo(int index) override;
|
||||
|
||||
private:
|
||||
void InitTensorInfo(const std::vector<ov::Output<ov::Node>>& ov_outputs,
|
||||
std::vector<TensorInfo>* tensor_infos);
|
||||
ov::Core core_;
|
||||
ov::CompiledModel compiled_model_;
|
||||
ov::InferRequest request_;
|
||||
|
@@ -26,13 +26,16 @@ const char sep = '/';
|
||||
#endif
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
if (argc < 3) {
|
||||
std::cout << "Usage: infer_demo path/to/model run_option, "
|
||||
"e.g ./infer_demo uie-base 0"
|
||||
if (argc != 3 && argc != 4) {
|
||||
std::cout << "Usage: infer_demo /path/to/model device [backend], "
|
||||
"e.g ./infer_demo uie-base 0 [0]"
|
||||
<< std::endl;
|
||||
std::cout << "The data type of run_option is int, 0: run with cpu; 1: run "
|
||||
std::cout << "The data type of device is int, 0: run with cpu; 1: run "
|
||||
"with gpu."
|
||||
<< std::endl;
|
||||
std::cout << "The data type of backend is int, 0: use paddle backend; 1: "
|
||||
"use onnxruntime backend; 2: use openvino backend. Default 0."
|
||||
<< std::endl;
|
||||
return -1;
|
||||
}
|
||||
auto option = fastdeploy::RuntimeOption();
|
||||
@@ -41,6 +44,23 @@ int main(int argc, char* argv[]) {
|
||||
} else {
|
||||
option.UseGpu();
|
||||
}
|
||||
auto backend_type = 0;
|
||||
if (argc == 4) {
|
||||
backend_type = std::atoi(argv[3]);
|
||||
}
|
||||
switch (backend_type) {
|
||||
case 0:
|
||||
option.UsePaddleBackend();
|
||||
break;
|
||||
case 1:
|
||||
option.UseOrtBackend();
|
||||
break;
|
||||
case 2:
|
||||
option.UseOpenVINOBackend();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
std::string model_dir(argv[1]);
|
||||
std::string model_path = model_dir + sep + "inference.pdmodel";
|
||||
std::string param_path = model_dir + sep + "inference.pdiparams";
|
||||
|
1
external/faster_tokenizer.cmake
vendored
1
external/faster_tokenizer.cmake
vendored
@@ -23,7 +23,6 @@ set(FASTERTOKENIZER_INSTALL_DIR ${THIRD_PARTY_PATH}/install/faster_tokenizer)
|
||||
set(FASTERTOKENIZER_INC_DIR
|
||||
"${FASTERTOKENIZER_INSTALL_DIR}/include"
|
||||
"${FASTERTOKENIZER_INSTALL_DIR}/third_party/include"
|
||||
"${FASTERTOKENIZER_INSTALL_DIR}/include/faster_tokenizer" # TODO (zhoushunjie): Will remove it later.
|
||||
CACHE PATH "faster_tokenizer include directory." FORCE)
|
||||
set(FASTERTOKENIZER_LIB_DIR
|
||||
"${FASTERTOKENIZER_INSTALL_DIR}/lib/"
|
||||
|
52
external/openvino.cmake
vendored
52
external/openvino.cmake
vendored
@@ -16,22 +16,39 @@ include(ExternalProject)
|
||||
|
||||
set(OPENVINO_PROJECT "extern_openvino")
|
||||
set(OPENVINO_PREFIX_DIR ${THIRD_PARTY_PATH}/openvino)
|
||||
set(OPENVINO_SOURCE_DIR
|
||||
${THIRD_PARTY_PATH}/openvino/src/${OPENVINO_PROJECT})
|
||||
set(OPENVINO_INSTALL_DIR ${THIRD_PARTY_PATH}/install/openvino)
|
||||
set(OPENVINO_INC_DIR
|
||||
set(OPENVINO_INSTALL_INC_DIR
|
||||
"${OPENVINO_INSTALL_DIR}/include"
|
||||
CACHE PATH "openvino install include directory." FORCE)
|
||||
|
||||
if (WIN32)
|
||||
set(OPENVINO_SOURCE_DIR
|
||||
${THIRD_PARTY_PATH}/openvino/src/${OPENVINO_PROJECT}/openvino-win-x64-2022.1.0)
|
||||
set(OPENVINO_INC_DIR
|
||||
"${OPENVINO_INSTALL_DIR}/include"
|
||||
"${OPENVINO_INSTALL_DIR}/include/ie"
|
||||
CACHE PATH "openvino include directory." FORCE)
|
||||
set(OPENVINO_LIB_DIR
|
||||
set(OPENVINO_LIB_DIR
|
||||
"${OPENVINO_INSTALL_DIR}/lib/"
|
||||
CACHE PATH "openvino lib directory." FORCE)
|
||||
else()
|
||||
set(OPENVINO_SOURCE_DIR
|
||||
${THIRD_PARTY_PATH}/openvino/src/${OPENVINO_PROJECT})
|
||||
set(OPENVINO_INC_DIR
|
||||
"${OPENVINO_INSTALL_DIR}/include"
|
||||
CACHE PATH "openvino include directory." FORCE)
|
||||
set(OPENVINO_LIB_DIR
|
||||
"${OPENVINO_INSTALL_DIR}/lib/"
|
||||
CACHE PATH "openvino lib directory." FORCE)
|
||||
endif()
|
||||
|
||||
|
||||
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${OPENVINO_LIB_DIR}")
|
||||
|
||||
set(OPENVINO_VERSION "2022.3.0")
|
||||
set(OPENVINO_URL_PREFIX "https://bj.bcebos.com/fastdeploy/third_libs/")
|
||||
|
||||
if(WIN32)
|
||||
message(FATAL_ERROR "FastDeploy cannot ENABLE_OPENVINO_BACKEND in windows now.")
|
||||
set(OPENVINO_FILENAME "openvino-win-x64-${OPENVINO_VERSION}.zip")
|
||||
if(NOT CMAKE_CL_64)
|
||||
message(FATAL_ERROR "FastDeploy cannot ENABLE_OPENVINO_BACKEND in win32 now.")
|
||||
@@ -69,7 +86,26 @@ else()
|
||||
CACHE FILEPATH "OPENVINO static library." FORCE)
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(
|
||||
if (WIN32)
|
||||
ExternalProject_Add(
|
||||
${OPENVINO_PROJECT}
|
||||
${EXTERNAL_PROJECT_LOG_ARGS}
|
||||
URL ${OPENVINO_URL}
|
||||
PREFIX ${OPENVINO_PREFIX_DIR}
|
||||
DOWNLOAD_NO_PROGRESS 1
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
UPDATE_COMMAND ""
|
||||
INSTALL_COMMAND
|
||||
${CMAKE_COMMAND} -E remove_directory ${OPENVINO_INSTALL_DIR} &&
|
||||
${CMAKE_COMMAND} -E make_directory ${OPENVINO_INSTALL_DIR} &&
|
||||
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/lib/intel64/Release ${OPENVINO_INSTALL_DIR}/lib &&
|
||||
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/bin/intel64/Release ${OPENVINO_INSTALL_DIR}/bin &&
|
||||
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/include ${OPENVINO_INSTALL_INC_DIR} &&
|
||||
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/3rdparty ${OPENVINO_INSTALL_DIR}/3rdparty
|
||||
BUILD_BYPRODUCTS ${OPENVINO_LIB})
|
||||
else()
|
||||
ExternalProject_Add(
|
||||
${OPENVINO_PROJECT}
|
||||
${EXTERNAL_PROJECT_LOG_ARGS}
|
||||
URL ${OPENVINO_URL}
|
||||
@@ -83,9 +119,9 @@ ExternalProject_Add(
|
||||
${CMAKE_COMMAND} -E make_directory ${OPENVINO_INSTALL_DIR} &&
|
||||
${CMAKE_COMMAND} -E rename ${OPENVINO_SOURCE_DIR}/lib/intel64 ${OPENVINO_INSTALL_DIR}/lib &&
|
||||
${CMAKE_COMMAND} -E copy_directory ${OPENVINO_SOURCE_DIR}/include
|
||||
${OPENVINO_INC_DIR}
|
||||
${OPENVINO_INSTALL_INC_DIR}
|
||||
BUILD_BYPRODUCTS ${OPENVINO_LIB})
|
||||
|
||||
endif()
|
||||
add_library(external_openvino STATIC IMPORTED GLOBAL)
|
||||
set_property(TARGET external_openvino PROPERTY IMPORTED_LOCATION ${OPENVINO_LIB})
|
||||
add_dependencies(external_openvino ${OPENVINO_PROJECT})
|
||||
|
Reference in New Issue
Block a user