Improve ascend

This commit is contained in:
yunyaoXYY
2022-12-22 07:10:36 +00:00
parent 2439fdcbe5
commit d6588b29f8
11 changed files with 86 additions and 138 deletions

View File

@@ -621,6 +621,13 @@ else()
)
endif()
if(WITH_ASCEND)
install(
FILES ${PROJECT_SOURCE_DIR}/scripts/ascend_init.sh
DESTINATION ${CMAKE_INSTALL_PREFIX}
)
endif()
############################### Building: FastDeploy Python Wheel #############################
if(BUILD_FASTDEPLOY_PYTHON)
add_definitions(-DBUILD_FASTDEPLOY_PYTHON)

View File

@@ -0,0 +1,19 @@
[English](../../en/faq/use_sdk_on_linux.md) | 中文
# Linux上使用C++在华为昇腾部署
在完成部署示例的编译之后, 在运行程序之前, 由于我们需要借助华为昇腾工具包的功能, 所以还需要导入一些环境变量来初始化部署环境.
用户可以直接使用如下脚本(位于编译好的FastDeploy库的目录下), 来初始化华为昇腾部署的环境.
```
# 我们默认的昇腾工具包的路径如下,
# HUAWEI_ASCEND_TOOLKIT_HOME="/usr/local/Ascend/ascend-toolkit/latest"
# 如果用户的安装目录与他不同, 需要自己先手动export.
# export HUAWEI_ASCEND_TOOLKIT_HOME="user_path"
source fastdeploy-ascend/fastdeploy_init.sh
```
注意此命令执行后仅在当前的命令环境中生效(切换一个新的终端窗口,或关闭窗口重新打开后会无效),如若需要在系统中持续生效,可将这些环境变量加入到`~/.bashrc`文件中。

View File

@@ -0,0 +1,19 @@
[简体中文](../../cn/faq/use_sdk_on_linux.md) | English
# # Linux deployment with C++ on Huawei Ascend
After the deployment example is compiled, we need to import some environment variables to initialize the deployment environment before running the program, because we need to use the Huawei Ascend toolkit.
Users can use the following script (located in the directory of the compiled FastDeploy library) to initialize the Huawei Ascend deployment environment.
```
# The path to our default Ascend Toolkit is as follows,
# HUAWEI_ASCEND_TOOLKIT_HOME="/usr/local/Ascend/ascend-toolkit/latest"
# If the user's installation directory is different from this, you need to export it manually first.
# export HUAWEI_ASCEND_TOOLKIT_HOME="user_path"
source fastdeploy-ascend/fastdeploy_init.sh
```
Note that this command only takes effect in the current command environment after execution (switching to a new terminal window, or closing the window and reopening it will not work), if you need to keep it in effect on the system, add these environment variables to the `~/.bashrc` file.

View File

@@ -1,9 +0,0 @@
# PaddleClas 分类模型在华为昇腾NPU 上的部署
目前 FastDeploy 已经支持在华为昇腾 NPU 上, 基于 Paddle Lite 部署 PaddleClas 模型.
## 详细部署文档
在华为昇腾NPU 上支持 Python 和 C++ 的部署。
- [C++部署](./cpp)
- [Python部署](../python)

View File

@@ -1,14 +0,0 @@
PROJECT(infer_demo C CXX)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of fastdeploy-ascend sdk")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc)
# 添加FastDeploy库依赖
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})

View File

@@ -1,39 +0,0 @@
# PaddleClas 华为昇腾 NPU C++ 部署示例
本目录下提供的 `infer.cc`,可以帮助用户快速完成 PaddleClas 模型在华为昇腾NPU上的部署.
本例在鲲鹏920+Atlas 300I Pro的硬件平台下完成测试.(目前暂不支持 X86 CPU的Linux系统部署)
## 部署准备
### 华为昇腾NPU 部署环境编译准备
- 1. 软硬件环境满足要求以及华为昇腾NPU的部署编译环境的准备请参考[FastDeploy 华为昇腾NPU部署环境编译准备](../../../../../../docs/cn/build_and_install/huawei_ascend.md.md)
## 在 华为昇腾NPU 上部署ResNet50_Vd分类模型
请按照以下步骤完成在 华为昇腾NPU 上部署 ResNet50_Vd 模型:
1. 完成[华为昇腾NPU 部署环境编译准备](../../../../../../docs/cn/build_and_install/huawei_ascend.md.md)
2. 编译当前demo, 并完成部署:
```bash
# 编译当前demo
mkdir build
cd build
cmake .. -DFASTDEPLOY_INSTALL_DIR=../../../../../../build/fastdeploy-ascend
make -j8
cd ..
# 下载模型
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
# 下载图片
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# 用户直接运行当前目录下的run.sh即可
bash run.sh
```
部署成功后输出结果如下:
```bash
ClassifyResult(
label_ids: 153,
scores: 0.685547,
)
#此结果出现后,还会出现一些华为昇腾自带的log信息,属正常现象.
```

View File

@@ -1,61 +0,0 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <string>
#include "fastdeploy/vision.h"
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void InitAndInfer(const std::string& model_dir, const std::string& image_file) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
fastdeploy::RuntimeOption option;
option.UseAscend();
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(&im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char* argv[]) {
if (argc < 3) {
std::cout << "Usage: infer_demo path/to/model "
"path/to/image "
"run_option, "
"e.g ./infer_demo ./ResNet50_vd_quant ./test.jpeg"
<< std::endl;
return -1;
}
std::string model_dir = argv[1];
std::string test_image = argv[2];
InitAndInfer(model_dir, test_image);
return 0;
}

View File

@@ -34,11 +34,16 @@ wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/Ima
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 3
# KunlunXin XPU推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 4
# Huawei Ascend NPU推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 5
```
以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考:
- [如何在Windows中使用FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md)
如果用户使用华为昇腾NPU部署, 请参考以下方式在部署前准备环境变量:
- [如何使用华为昇腾NPU部署](../../../../../docs/cn/faq/use_sdk_on_ascend.md)
## PaddleClas C++接口
### PaddleClas类

View File

@@ -148,6 +148,31 @@ void TrtInfer(const std::string& model_dir, const std::string& image_file) {
std::cout << res.Str() << std::endl;
}
void AscendInfer(const std::string& model_dir, const std::string& image_file) {
auto model_file = model_dir + sep + "inference.pdmodel";
auto params_file = model_dir + sep + "inference.pdiparams";
auto config_file = model_dir + sep + "inference_cls.yaml";
auto option = fastdeploy::RuntimeOption();
option.UseAscend();
auto model = fastdeploy::vision::classification::PaddleClasModel(
model_file, params_file, config_file, option);
assert(model.Initialized());
auto im = cv::imread(image_file);
fastdeploy::vision::ClassifyResult res;
if (!model.Predict(&im, &res)) {
std::cerr << "Failed to predict." << std::endl;
return;
}
std::cout << res.Str() << std::endl;
}
int main(int argc, char* argv[]) {
if (argc < 4) {
std::cout << "Usage: infer_demo path/to/model path/to/image run_option, "
@@ -169,6 +194,8 @@ int main(int argc, char* argv[]) {
IpuInfer(argv[1], argv[2]);
} else if (std::atoi(argv[3]) == 4) {
XpuInfer(argv[1], argv[2]);
} else if (std::atoi(argv[3]) == 5) {
AscendInfer(argv[1], argv[2]);
}
return 0;
}

View File

@@ -1,13 +1,4 @@
#!/bin/bash
export GLOG_v=5
# 设置本demo的环境变量
# 正确设置fastdeploy-ascend的安装路径
FASTDEPLOY_INSTALL_DIR="../../../../../../build/fastdeploy-ascend/"
# 设置fastdeploy,opencv和paddlelite相关的环境变量
export LD_LIBRARY_PATH=$FASTDEPLOY_INSTALL_DIR/lib/:$FASTDEPLOY_INSTALL_DIR/third_libs/install/opencv/lib/:$FASTDEPLOY_INSTALL_DIR/third_libs/install/paddlelite/lib/:$LD_LIBRARY_PATH
# 设置昇腾相关环境变量
# Set huawei ascend toolkit correctly.
HUAWEI_ASCEND_TOOLKIT_HOME="/usr/local/Ascend/ascend-toolkit/latest"
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/stub:$HUAWEI_ASCEND_TOOLKIT_HOME/acllib/lib64:$HUAWEI_ASCEND_TOOLKIT_HOME/atc/lib64:$HUAWEI_ASCEND_TOOLKIT_HOME/opp/op_proto/built-in
export PYTHONPATH=$PYTHONPATH:$HUAWEI_ASCEND_TOOLKIT_HOME/fwkacllib/python/site-packages:$HUAWEI_ASCEND_TOOLKIT_HOME/acllib/python/site-packages:$HUAWEI_ASCEND_TOOLKIT_HOME/toolkit/python/site-packages:$HUAWEI_ASCEND_TOOLKIT_HOME/atc/python/site-packages:$HUAWEI_ASCEND_TOOLKIT_HOME/pyACL/python/site-packages/acl
@@ -18,7 +9,4 @@ export TOOLCHAIN_HOME=$HUAWEI_ASCEND_TOOLKIT_HOME/toolkit
export ASCEND_SLOG_PRINT_TO_STDOUT=0
export ASCEND_GLOBAL_LOG_LEVEL=3
chmod +x ./$BUILD_DIR
# 运行本demo, 输入的参数分别为模型路径和图片路径
./build/infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg
echo "===== Finish Initializing Environment for Ascend Deployment ====="

View File

@@ -1,10 +1,11 @@
# source this file to import libraries
PLATFORM=`uname`
FASTDEPLOY_LIBRARY_PATH=${BASH_SOURCE%/*}
FASTDEPLOY_LIBRARY_PATH=${BASH_SOURCE}
if [ "$PLATFORM" == "Linux" ];then
FASTDEPLOY_LIBRARY_PATH=`readlink -f ${FASTDEPLOY_LIBRARY_PATH}`
fi
FASTDEPLOY_LIBRARY_PATH=${FASTDEPLOY_LIBRARY_PATH%/*}
echo "=============== Information ======================"
echo "FastDeploy Library Path: $FASTDEPLOY_LIBRARY_PATH"
@@ -32,5 +33,10 @@ for LIB_DIR in ${LIBS_DIRECOTRIES[@]};do
IMPORT_PATH=${LIB_DIR}":"$IMPORT_PATH
done
if [ -f "ascend_init.sh" ]
then
source ascend_init.sh
fi
echo "[Execute] Will try to export all the library directories to environments, if not work, please try to export these path by your self."
export LD_LIBRARY_PATH=${IMPORT_PATH}:$LD_LIBRARY_PATH