mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-06 09:07:10 +08:00

* 11-02/14:35 * 新增输入数据format错误判断 * 优化推理过程,减少内存分配次数 * 支持多输入rknn模型 * rknn模型输出shape为三维时,输出将被强制对齐为4纬。现在将直接抹除rknn补充的shape,方便部分对输出shape进行判断的模型进行正确的后处理。 * 11-03/17:25 * 支持导出多输入RKNN模型 * 更新各种文档 * ppseg改用Fastdeploy中的模型进行转换 * 11-03/17:25 * 新增开源头 * 11-03/21:48 * 删除无用debug代码,补充注释
78 lines
2.7 KiB
Python
78 lines
2.7 KiB
Python
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
import os
|
|
import yaml
|
|
import argparse
|
|
from rknn.api import RKNN
|
|
|
|
|
|
def get_config():
|
|
parser = argparse.ArgumentParser()
|
|
parser.add_argument("--verbose", default=True, help="rknntoolkit verbose")
|
|
parser.add_argument("--config_path")
|
|
args = parser.parse_args()
|
|
return args
|
|
|
|
|
|
if __name__ == "__main__":
|
|
config = get_config()
|
|
with open(config.config_path) as file:
|
|
file_data = file.read()
|
|
yaml_config = yaml.safe_load(file_data)
|
|
print(yaml_config)
|
|
model = RKNN(config.verbose)
|
|
|
|
# Config
|
|
if yaml_config["normalize"] == "None":
|
|
model.config(target_platform=yaml_config["target_platform"])
|
|
else:
|
|
mean_values = [[256 * mean for mean in mean_ls]
|
|
for mean_ls in yaml_config["normalize"]["mean"]]
|
|
std_values = [[256 * std for std in std_ls]
|
|
for std_ls in yaml_config["normalize"]["std"]]
|
|
model.config(
|
|
mean_values=mean_values,
|
|
std_values=std_values,
|
|
target_platform=yaml_config["target_platform"])
|
|
|
|
# Load ONNX model
|
|
print(type(yaml_config["outputs"]))
|
|
print("yaml_config[\"outputs\"] = ", yaml_config["outputs"])
|
|
if yaml_config["outputs"] == "None":
|
|
ret = model.load_onnx(model=yaml_config["model_path"])
|
|
else:
|
|
ret = model.load_onnx(
|
|
model=yaml_config["model_path"], outputs=yaml_config["outputs"])
|
|
assert ret == 0, "Load model failed!"
|
|
|
|
# Build model
|
|
ret = model.build(do_quantization=None)
|
|
assert ret == 0, "Build model failed!"
|
|
|
|
# Init Runtime
|
|
ret = model.init_runtime()
|
|
assert ret == 0, "Init runtime environment failed!"
|
|
|
|
# Export
|
|
if not os.path.exists(yaml_config["output_folder"]):
|
|
os.mkdir(yaml_config["output_folder"])
|
|
|
|
model_base_name = os.path.basename(yaml_config["model_path"]).split(".")[0]
|
|
model_device_name = yaml_config["target_platform"].lower()
|
|
model_save_name = model_base_name + "_" + model_device_name + ".rknn"
|
|
ret = model.export_rknn(
|
|
os.path.join(yaml_config["output_folder"], model_save_name))
|
|
assert ret == 0, "Export rknn model failed!"
|
|
print("Export OK!")
|