Add Huawei Ascend NPU deploy through PaddleLite CANN

This commit is contained in:
yunyaoXYY
2022-11-30 07:50:24 +00:00
parent 9d78b1d414
commit a5e3c1ecb3
18 changed files with 371 additions and 6 deletions

View File

@@ -0,0 +1,14 @@
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of fastdeploy-cann sdk")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})

View File

@@ -0,0 +1,43 @@
# PaddleClas Ascend NPU C++ 部署示例
本目录下提供的 `infer.cc`,可以帮助用户快速完成 PaddleClas 模型在华为昇腾NPU上的部署.
本例在鲲鹏920+Atlas 300I Pro的硬件平台下完成测试.(目前暂不支持 X86 CPU的Linux系统部署)
## 部署准备
### 华为昇腾NPU 部署环境编译准备
- 1. 软硬件环境满足要求以及华为昇腾NPU的部署编译环境的准备请参考[FastDeploy 华为昇腾NPU部署环境编译准备](../../../../../../docs/cn/build_and_install/ascend.md)
## 在 华为昇腾NPU 上部署ResNet50_Vd分类模型
请按照以下步骤完成在 华为昇腾NPU 上部署 ResNet50_Vd 模型:
1. 完成[华为昇腾NPU 部署环境编译准备](../../../../../../docs/cn/build_and_install/ascend.md)
2. 在当前路径下载部署所需的模型和示例图片:
```bash
mkdir models && mkdir images
# 下载模型,并放置于models目录下
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
cp -r ResNet50_vd_infer models
# 下载图片,放置于images目录下
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
cp -r ILSVRC2012_val_00000010.jpeg images
```
3. 编译部署示例,用户直接运行本目录下的`build.sh`文件,或者使用如下命令:
```bash
mkdir build && cd build
cmake .. -DFASTDEPLOY_INSTALL_DIR=../../../../../../build/fastdeploy-cann/
make -j8
# 编译成功后,会在build目录下生成infer_demo
```
4. 运行示例
用户直接运行`install`文件夹下的`run.sh`脚本即可.
部署成功后输出结果如下:
```bash
ClassifyResult(
label_ids: 153,
scores: 0.685547,
)
#此结果出现后,还会出现一些华为昇腾自带的log信息,属正常现象.
```

View File

@@ -0,0 +1,6 @@
rm -rf build
mkdir build
cd build
cmake .. -DFASTDEPLOY_INSTALL_DIR=../../../../../../build/fastdeploy-cann/
make -j8

View File

@@ -0,0 +1,61 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <string>
#include "fastdeploy/vision.h"
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void InitAndInfer(const std::string& model_dir, const std::string& image_file) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
fastdeploy::RuntimeOption option;
option.UseCANN();
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(&im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char* argv[]) {
if (argc < 3) {
std::cout << "Usage: infer_demo path/to/quant_model "
"path/to/image "
"run_option, "
"e.g ./infer_demo ./ResNet50_vd_quant ./test.jpeg"
<< std::endl;
return -1;
}
std::string model_dir = argv[1];
std::string test_image = argv[2];
InitAndInfer(model_dir, test_image);
return 0;
}

View File

@@ -0,0 +1,31 @@
#!/bin/bash
# 模型名字
MODEL_NAME=ResNet50_vd_infer
# 预测的图片名字
DATA_NAME=ILSVRC2012_val_00000010.jpeg
# 本demo的可执行文件
DEMO_NAME=infer_demo
export GLOG_v=5
# 设置本demo的环境变量
# 正确设置fastdeploy-cann的安装路径
FASTDEPLOY_INSTALL_DIR="../../../../../../build/fastdeploy-cann/"
# 设置fastdeploy,opencv和paddlelite相关的环境变量
export LD_LIBRARY_PATH=$FASTDEPLOY_INSTALL_DIR/lib/:$FASTDEPLOY_INSTALL_DIR/third_libs/install/opencv/lib/:$FASTDEPLOY_INSTALL_DIR/third_libs/install/paddlelite/lib/:$LD_LIBRARY_PATH
# 设置昇腾相关环境变量
HUAWEI_ASCEND_TOOLKIT_HOME="/usr/local/Ascend/ascend-toolkit/latest"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/stub:$HUAWEI_ASCEND_TOOLKIT_HOME/acllib/lib64:$HUAWEI_ASCEND_TOOLKIT_HOME/atc/lib64:$HUAWEI_ASCEND_TOOLKIT_HOME/opp/op_proto/built-in
export PYTHONPATH=$PYTHONPATH:$HUAWEI_ASCEND_TOOLKIT_HOME/fwkacllib/python/site-packages:$HUAWEI_ASCEND_TOOLKIT_HOME/acllib/python/site-packages:$HUAWEI_ASCEND_TOOLKIT_HOME/toolkit/python/site-packages:$HUAWEI_ASCEND_TOOLKIT_HOME/atc/python/site-packages:$HUAWEI_ASCEND_TOOLKIT_HOME/pyACL/python/site-packages/acl
export PATH=$PATH:$HUAWEI_ASCEND_TOOLKIT_HOME/atc/ccec_compiler/bin:${HUAWEI_ASCEND_TOOLKIT_HOME}/acllib/bin:$HUAWEI_ASCEND_TOOLKIT_HOME/atc/bin
export ASCEND_AICPU_PATH=$HUAWEI_ASCEND_TOOLKIT_HOME
export ASCEND_OPP_PATH=$HUAWEI_ASCEND_TOOLKIT_HOME/opp
export TOOLCHAIN_HOME=$HUAWEI_ASCEND_TOOLKIT_HOME/toolkit
export ASCEND_SLOG_PRINT_TO_STDOUT=0
export ASCEND_GLOBAL_LOG_LEVEL=3
chmod +x ./$BUILD_DIR
# 运行本demo.
./build/$DEMO_NAME ./models/$MODEL_NAME ./images/$DATA_NAME