mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-06 00:57:33 +08:00

* 10-29/14:05 * 新增cmake * 新增rknpu2 backend * 10-29/14:43 * Runtime fd_type新增RKNPU代码 * 10-29/15:02 * 新增ppseg RKNPU2推理代码 * 10-29/15:46 * 新增ppseg RKNPU2 cpp example代码 * 10-29/15:51 * 新增README文档 * 10-29/15:51 * 按照要求修改部分注释以及变量名称 * 10-29/15:51 * 修复重命名之后,cc文件中的部分代码还用旧函数名的bug * 10-29/22:32 * str(Device::NPU)将输出NPU而不是UNKOWN * 修改runtime文件中的注释格式 * 新增Building Summary ENABLE_RKNPU2_BACKEND输出 * pybind新增支持rknpu2 * 新增python编译选项 * 新增PPSeg Python代码 * 新增以及更新各种文档 * 10-30/14:11 * 尝试修复编译cuda时产生的错误 * 10-30/19:27 * 修改CpuName和CoreMask层级 * 修改ppseg rknn推理层级 * 图片将移动到网络进行下载 * 10-30/19:39 * 更新文档 * 10-30/19:39 * 更新文档 * 更新ppseg rknpu2 example中的函数命名方式 * 更新ppseg rknpu2 example为一个cc文件 * 修复disable_normalize_and_permute部分的逻辑错误 * 移除rknpu2初始化时的无用参数 * 10-30/19:39 * 尝试重置python代码 * 10-30/10:16 * rknpu2_config.h文件不再包含rknn_api头文件防止出现导入错误的问题 * 10-31/14:31 * 修改pybind,支持最新的rknpu2 backends * 再次支持ppseg python推理 * 移动cpuname 和 coremask的层级 * 10-31/15:35 * 尝试修复rknpu2导入错误 * 10-31/19:00 * 新增RKNPU2模型导出代码以及其对应的文档 * 更新大量文档错误 * 10-31/19:00 * 现在编译完fastdeploy仓库后无需重新设置RKNN2_TARGET_SOC * 10-31/19:26 * 修改部分错误文档 * 10-31/19:26 * 修复错误删除的部分 * 修复各种错误文档 * 修复FastDeploy.cmake在设置RKNN2_TARGET_SOC错误时,提示错误的信息 * 修复rknpu2_backend.cc中存在的中文注释 * 10-31/20:45 * 删除无用的注释 * 10-31/20:45 * 按照要求修改Device::NPU为Device::RKNPU,硬件将共用valid_hardware_backends * 删除无用注释以及debug代码 * 11-01/09:45 * 更新变量命名方式 * 11-01/10:16 * 修改部分文档,修改函数命名方式 Co-authored-by: Jason <jiangjiajun@baidu.com>
76 lines
2.7 KiB
Python
76 lines
2.7 KiB
Python
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
import os
|
|
import yaml
|
|
import argparse
|
|
from rknn.api import RKNN
|
|
|
|
|
|
def get_config():
|
|
parser = argparse.ArgumentParser()
|
|
parser.add_argument("--verbose", default=True, help="rknntoolkit verbose")
|
|
parser.add_argument("--config_path")
|
|
args = parser.parse_args()
|
|
return args
|
|
|
|
|
|
if __name__ == "__main__":
|
|
config = get_config()
|
|
with open(config.config_path) as file:
|
|
file_data = file.read()
|
|
yaml_config = yaml.safe_load(file_data)
|
|
print(yaml_config)
|
|
model = RKNN(config.verbose)
|
|
|
|
# Config
|
|
mean_values = [[255 * mean for mean in yaml_config["normalize"]["mean"]]]
|
|
std_values = [[255 * std for std in yaml_config["normalize"]["std"]]]
|
|
model.config(mean_values=mean_values,
|
|
std_values=std_values,
|
|
target_platform=yaml_config["target_platform"])
|
|
|
|
# Load ONNX model
|
|
print(type(yaml_config["outputs"]))
|
|
print("yaml_config[\"outputs\"] = ", yaml_config["outputs"])
|
|
if yaml_config["outputs"] == "None":
|
|
ret = model.load_onnx(model=yaml_config["model_path"])
|
|
else:
|
|
ret = model.load_onnx(model=yaml_config["model_path"],
|
|
outputs=yaml_config["outputs"])
|
|
assert ret == 0, "Load model failed!"
|
|
|
|
# Build model
|
|
ret = model.build(do_quantization=None)
|
|
assert ret == 0, "Build model failed!"
|
|
|
|
# Init Runtime
|
|
ret = model.init_runtime()
|
|
assert ret == 0, "Init runtime environment failed!"
|
|
|
|
# Export
|
|
if not os.path.exists(yaml_config["output_folder"]):
|
|
os.mkdir(yaml_config["output_folder"])
|
|
|
|
model_base_name = os.path.basename(yaml_config["model_path"]).split(".")[0]
|
|
model_device_name = yaml_config["target_platform"].lower()
|
|
model_save_name = model_base_name + "_" + model_device_name + ".rknn"
|
|
ret = model.export_rknn(
|
|
os.path.join(yaml_config["output_folder"], model_save_name))
|
|
assert ret == 0, "Export rknn model failed!"
|
|
print("Export OK!")
|