更新pptinypose模型

This commit is contained in:
Zheng-Bicheng
2023-02-15 20:55:26 +08:00
parent 53333c5db6
commit 2b1631b563
2 changed files with 6 additions and 1 deletions

View File

@@ -18,6 +18,7 @@ void RKNPU2Infer(const std::string& tinypose_model_dir,
const std::string& image_file) { const std::string& image_file) {
auto tinypose_model_file = auto tinypose_model_file =
tinypose_model_dir + "/PP_TinyPose_256x192_infer_rk3588_unquantized.rknn"; tinypose_model_dir + "/PP_TinyPose_256x192_infer_rk3588_unquantized.rknn";
std::cout << tinypose_model_file << std::endl;
auto tinypose_params_file = ""; auto tinypose_params_file = "";
auto tinypose_config_file = tinypose_model_dir + "/infer_cfg.yml"; auto tinypose_config_file = tinypose_model_dir + "/infer_cfg.yml";
auto option = fastdeploy::RuntimeOption(); auto option = fastdeploy::RuntimeOption();

View File

@@ -135,10 +135,12 @@ bool PPTinyPose::Postprocess(std::vector<FDTensor>& infer_result,
KeyPointDetectionResult* result, KeyPointDetectionResult* result,
const std::vector<float>& center, const std::vector<float>& center,
const std::vector<float>& scale) { const std::vector<float>& scale) {
FDASSERT(infer_result[1].shape[0] == 1, FDASSERT(infer_result[0].shape[0] == 1,
"Only support batch = 1 in FastDeploy now."); "Only support batch = 1 in FastDeploy now.");
result->Clear(); result->Clear();
std::cout << "Postprocess" << std::endl;
std::cout << "infer_result.size() is " << infer_result.size() << std::endl;
if (infer_result.size() == 1) { if (infer_result.size() == 1) {
FDTensor result_copy = infer_result[0]; FDTensor result_copy = infer_result[0];
std::cout << "Reshape result_copy!" << std::endl; std::cout << "Reshape result_copy!" << std::endl;
@@ -206,12 +208,14 @@ bool PPTinyPose::Predict(cv::Mat* im, KeyPointDetectionResult* result) {
<< ModelName() << "." << std::endl; << ModelName() << "." << std::endl;
return false; return false;
} }
std::vector<FDTensor> infer_result; std::vector<FDTensor> infer_result;
if (!Infer(processed_data, &infer_result)) { if (!Infer(processed_data, &infer_result)) {
FDERROR << "Failed to inference while using model:" << ModelName() << "." FDERROR << "Failed to inference while using model:" << ModelName() << "."
<< std::endl; << std::endl;
return false; return false;
} }
if (!Postprocess(infer_result, result, center, scale)) { if (!Postprocess(infer_result, result, center, scale)) {
FDERROR << "Failed to postprocess while using model:" << ModelName() << "." FDERROR << "Failed to postprocess while using model:" << ModelName() << "."
<< std::endl; << std::endl;