mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-05 16:48:03 +08:00
[Model] update pptracking letterboxresize and add some comment (#438)
* add override mark * delete some * recovery * recovery * add tracking * add tracking py_bind and example * add pptracking * add pptracking * iomanip head file * add opencv_video lib * add python libs package Signed-off-by: ChaoII <849453582@qq.com> * complete comments Signed-off-by: ChaoII <849453582@qq.com> * add jdeTracker_ member variable Signed-off-by: ChaoII <849453582@qq.com> * add 'FASTDEPLOY_DECL' macro Signed-off-by: ChaoII <849453582@qq.com> * remove kwargs params Signed-off-by: ChaoII <849453582@qq.com> * [Doc]update pptracking docs * delete 'ENABLE_PADDLE_FRONTEND' switch * add pptracking unit test * update pptracking unit test Signed-off-by: ChaoII <849453582@qq.com> * modify test video file path and remove trt test * update unit test model url * remove 'FASTDEPLOY_DECL' macro Signed-off-by: ChaoII <849453582@qq.com> * fix build python packages about pptracking on win32 Signed-off-by: ChaoII <849453582@qq.com> * update comment Signed-off-by: ChaoII <849453582@qq.com> * add pptracking model explain Signed-off-by: ChaoII <849453582@qq.com> Signed-off-by: ChaoII <849453582@qq.com> Co-authored-by: Jason <jiangjiajun@baidu.com>
This commit is contained in:
35
examples/vision/tracking/pptracking/README.md
Normal file
35
examples/vision/tracking/pptracking/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# PP-Tracking模型部署
|
||||
|
||||
## 模型版本说明
|
||||
|
||||
- [PaddleDetection release/2.5](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.5)
|
||||
|
||||
## 支持模型列表
|
||||
|
||||
目前FastDeploy支持如下模型的部署
|
||||
|
||||
- [PP-Tracking系列模型](https://github.com/PaddlePaddle/PaddleDetection/blob/release/2.5/configs/mot)
|
||||
|
||||
|
||||
## 导出部署模型
|
||||
|
||||
在部署前,需要先将训练好的PP-Tracking导出成部署模型,导出PPTracking导出模型步骤,参考文档[导出模型](https://github.com/PaddlePaddle/PaddleDetection/blob/release/2.5/deploy/pptracking/cpp/README.md)。
|
||||
|
||||
|
||||
## 下载预训练模型
|
||||
|
||||
为了方便开发者的测试,下面提供了PP-Tracking行人跟踪垂类模型,开发者可直接下载使用,更多模型参见[PPTracking](https://github.com/PaddlePaddle/PaddleDetection/blob/release/2.5/deploy/pptracking/README_cn.md)。
|
||||
|
||||
| 模型 | 参数大小 | 精度 | 备注 |
|
||||
|:-----------------------------------------------------------------------------------------------------|:-------|:----- | :------ |
|
||||
| [PP-Tracking](https://bj.bcebos.com/paddlehub/fastdeploy/fairmot_hrnetv2_w18_dlafpn_30e_576x320.tgz) | 51.2MB | - |
|
||||
|
||||
**说明**
|
||||
- 仅支持JDE模型(JDE,FairMOT,MCFairMOT);
|
||||
- 目前暂不支持SDE模型的部署,待PaddleDetection官方更新SED部署代码后,对SDE模型进行支持。
|
||||
|
||||
|
||||
## 详细部署文档
|
||||
|
||||
- [Python部署](python)
|
||||
- [C++部署](cpp)
|
@@ -22,7 +22,6 @@ make -j
|
||||
wget https://bj.bcebos.com/paddlehub/fastdeploy/fairmot_hrnetv2_w18_dlafpn_30e_576x320.tgz
|
||||
tar -xvf fairmot_hrnetv2_w18_dlafpn_30e_576x320.tgz
|
||||
wget https://bj.bcebos.com/paddlehub/fastdeploy/person.mp4
|
||||
wget https://bj.bcebos.com/paddlehub/fastdeploy/person.mp4
|
||||
|
||||
|
||||
# CPU推理
|
||||
|
71
fastdeploy/vision/common/processors/letter_box.cc
Normal file
71
fastdeploy/vision/common/processors/letter_box.cc
Normal file
@@ -0,0 +1,71 @@
|
||||
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "fastdeploy/vision/common/processors/letter_box.h"
|
||||
|
||||
namespace fastdeploy{
|
||||
namespace vision{
|
||||
bool LetterBoxResize::ImplByOpenCV(Mat* mat) {
|
||||
|
||||
if (mat->Channels() != color_.size()) {
|
||||
FDERROR << "Pad: Require input channels equals to size of padding value, "
|
||||
"but now channels = "
|
||||
<< mat->Channels()
|
||||
<< ", the size of padding values = " << color_.size() << "."
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
cv::Mat* im = mat->GetOpenCVMat();
|
||||
// generate scale_factor
|
||||
int origin_w = mat->Width();
|
||||
int origin_h = mat->Height();
|
||||
int target_h = target_size_[0];
|
||||
int target_w = target_size_[1];
|
||||
float ratio_h = static_cast<float>(target_h) / static_cast<float>(origin_h);
|
||||
float ratio_w = static_cast<float>(target_w) / static_cast<float>(origin_w);
|
||||
float resize_scale = std::min(ratio_h, ratio_w);
|
||||
// get_resized_shape
|
||||
int new_shape_w = std::round(im->cols * resize_scale);
|
||||
int new_shape_h = std::round(im->rows * resize_scale);
|
||||
// calculate pad
|
||||
float padw = (target_size_[1] - new_shape_w) / 2.;
|
||||
float padh = (target_size_[0] - new_shape_h) / 2.;
|
||||
int top = std::round(padh - 0.1);
|
||||
int bottom = std::round(padh + 0.1);
|
||||
int left = std::round(padw - 0.1);
|
||||
int right = std::round(padw + 0.1);
|
||||
cv::resize(*im, *im, cv::Size(new_shape_w, new_shape_h), 0, 0, cv::INTER_AREA);
|
||||
cv::Scalar color;
|
||||
if (color_.size() == 1) {
|
||||
color = cv::Scalar(color_[0]);
|
||||
} else if (color_.size() == 2) {
|
||||
color = cv::Scalar(color_[0], color_[1]);
|
||||
} else if (color_.size() == 3) {
|
||||
color = cv::Scalar(color_[0], color_[1], color_[2]);
|
||||
} else {
|
||||
color = cv::Scalar(color_[0], color_[1], color_[2], color_[3]);
|
||||
}
|
||||
cv::copyMakeBorder(*im, *im, top, bottom, left, right, cv::BORDER_CONSTANT, color);
|
||||
mat->SetWidth(im->cols);
|
||||
mat->SetHeight(im->rows);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool LetterBoxResize::Run(Mat* mat, const std::vector<int>& target_size, const std::vector<float>& color, ProcLib lib) {
|
||||
auto l = LetterBoxResize(target_size,color);
|
||||
return l(mat, lib);
|
||||
}
|
||||
|
||||
} // namespace vision
|
||||
} // namespace fastdeploy
|
@@ -14,23 +14,28 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "fastdeploy/vision/common/processors/transform.h"
|
||||
#include "fastdeploy/fastdeploy_model.h"
|
||||
#include "fastdeploy/vision/common/processors/base.h"
|
||||
|
||||
namespace fastdeploy {
|
||||
namespace vision {
|
||||
namespace tracking {
|
||||
|
||||
class LetterBoxResize : public Processor {
|
||||
public:
|
||||
LetterBoxResize(const std::vector<int>& target_size, const std::vector<float>& color);
|
||||
bool ImplByOpenCV(Mat* mat) override;
|
||||
std::string Name() override { return "LetterBoxResize"; }
|
||||
LetterBoxResize(const std::vector<int>& target_size, const std::vector<float>& color) {
|
||||
target_size_ = target_size;
|
||||
color_ = color;
|
||||
}
|
||||
|
||||
bool ImplByOpenCV(Mat* mat);
|
||||
|
||||
std::string Name() { return "LetterBoxResize"; }
|
||||
|
||||
static bool Run(Mat* mat,const std::vector<int>& target_size, const std::vector<float>& color,
|
||||
ProcLib lib = ProcLib::OPENCV);
|
||||
|
||||
private:
|
||||
std::vector<int> target_size_;
|
||||
std::vector<float> color_;
|
||||
};
|
||||
|
||||
} // namespace tracking
|
||||
} // namespace vision
|
||||
} // namespace fastdeploy
|
||||
|
||||
|
@@ -31,3 +31,4 @@
|
||||
#include "fastdeploy/vision/common/processors/resize_by_short.h"
|
||||
#include "fastdeploy/vision/common/processors/stride_pad.h"
|
||||
#include "fastdeploy/vision/common/processors/warp_affine.h"
|
||||
#include "fastdeploy/vision/common/processors/letter_box.h"
|
||||
|
@@ -155,16 +155,25 @@ struct FASTDEPLOY_DECL OCRResult : public BaseResult {
|
||||
std::string Str();
|
||||
};
|
||||
|
||||
/*! @brief MOT(Multi-Object Tracking) result structure for all the MOT models
|
||||
*/
|
||||
struct FASTDEPLOY_DECL MOTResult : public BaseResult {
|
||||
// left top right bottom
|
||||
/** \brief All the tracking object boxes for an input image, the size of `boxes` is the number of tracking objects, and the element of `boxes` is a array of 4 float values, means [xmin, ymin, xmax, ymax]
|
||||
*/
|
||||
std::vector<std::array<int, 4>> boxes;
|
||||
/** \brief All the tracking object ids
|
||||
*/
|
||||
std::vector<int> ids;
|
||||
/** \brief The confidence for all the tracking objects
|
||||
*/
|
||||
std::vector<float> scores;
|
||||
/** \brief The classify label id for all the tracking object
|
||||
*/
|
||||
std::vector<int> class_ids;
|
||||
ResultType type = ResultType::MOT;
|
||||
|
||||
/// Clear MOT result
|
||||
void Clear();
|
||||
|
||||
/// Debug function, convert the result to string to print
|
||||
std::string Str();
|
||||
};
|
||||
|
||||
|
@@ -1,59 +0,0 @@
|
||||
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "fastdeploy/vision/tracking/pptracking/letter_box.h"
|
||||
|
||||
namespace fastdeploy {
|
||||
namespace vision {
|
||||
namespace tracking {
|
||||
|
||||
LetterBoxResize::LetterBoxResize(const std::vector<int>& target_size, const std::vector<float>& color){
|
||||
target_size_=target_size;
|
||||
color_=color;
|
||||
}
|
||||
bool LetterBoxResize::ImplByOpenCV(Mat* mat){
|
||||
if (mat->Channels() != color_.size()) {
|
||||
FDERROR << "Pad: Require input channels equals to size of padding value, "
|
||||
"but now channels = "
|
||||
<< mat->Channels()
|
||||
<< ", the size of padding values = " << color_.size() << "."
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
// generate scale_factor
|
||||
int origin_w = mat->Width();
|
||||
int origin_h = mat->Height();
|
||||
int target_h = target_size_[0];
|
||||
int target_w = target_size_[1];
|
||||
float ratio_h = static_cast<float>(target_h) / static_cast<float>(origin_h);
|
||||
float ratio_w = static_cast<float>(target_w) / static_cast<float>(origin_w);
|
||||
float resize_scale = std::min(ratio_h, ratio_w);
|
||||
|
||||
int new_shape_w = std::round(mat->Width() * resize_scale);
|
||||
int new_shape_h = std::round(mat->Height() * resize_scale);
|
||||
float padw = (target_size_[1] - new_shape_w) / 2.;
|
||||
float padh = (target_size_[0] - new_shape_h) / 2.;
|
||||
int top = std::round(padh - 0.1);
|
||||
int bottom = std::round(padh + 0.1);
|
||||
int left = std::round(padw - 0.1);
|
||||
int right = std::round(padw + 0.1);
|
||||
|
||||
Resize::Run(mat,new_shape_w,new_shape_h);
|
||||
Pad::Run(mat,top,bottom,left,right,color_);
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace tracking
|
||||
} // namespace vision
|
||||
} // namespace fastdeploy
|
@@ -151,32 +151,7 @@ bool PPTracking::BuildPreprocessPipelineFromConfig(){
|
||||
return true;
|
||||
}
|
||||
|
||||
void PPTracking::GetNmsInfo() {
|
||||
if (runtime_option.model_format == ModelFormat::PADDLE) {
|
||||
std::string contents;
|
||||
if (!ReadBinaryFromFile(runtime_option.model_file, &contents)) {
|
||||
return;
|
||||
}
|
||||
auto reader = paddle2onnx::PaddleReader(contents.c_str(), contents.size());
|
||||
if (reader.has_nms) {
|
||||
has_nms_ = true;
|
||||
background_label = reader.nms_params.background_label;
|
||||
keep_top_k = reader.nms_params.keep_top_k;
|
||||
nms_eta = reader.nms_params.nms_eta;
|
||||
nms_threshold = reader.nms_params.nms_threshold;
|
||||
score_threshold = reader.nms_params.score_threshold;
|
||||
nms_top_k = reader.nms_params.nms_top_k;
|
||||
normalized = reader.nms_params.normalized;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool PPTracking::Initialize() {
|
||||
// remove multiclass_nms3 now
|
||||
// this is a trick operation for ppyoloe while inference on trt
|
||||
GetNmsInfo();
|
||||
runtime_option.remove_multiclass_nms_ = true;
|
||||
runtime_option.custom_op_info_["multiclass_nms3"] = "MultiClassNMS";
|
||||
if (!BuildPreprocessPipelineFromConfig()) {
|
||||
FDERROR << "Failed to build preprocess pipeline from configuration file."
|
||||
<< std::endl;
|
||||
@@ -186,18 +161,6 @@ bool PPTracking::Initialize() {
|
||||
FDERROR << "Failed to initialize fastdeploy backend." << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (has_nms_ && runtime_option.backend == Backend::TRT) {
|
||||
FDINFO << "Detected operator multiclass_nms3 in your model, will replace "
|
||||
"it with fastdeploy::backend::MultiClassNMS(background_label="
|
||||
<< background_label << ", keep_top_k=" << keep_top_k
|
||||
<< ", nms_eta=" << nms_eta << ", nms_threshold=" << nms_threshold
|
||||
<< ", score_threshold=" << score_threshold
|
||||
<< ", nms_top_k=" << nms_top_k << ", normalized=" << normalized
|
||||
<< ")." << std::endl;
|
||||
has_nms_ = false;
|
||||
}
|
||||
|
||||
// create JDETracker instance
|
||||
std::unique_ptr<JDETracker> jdeTracker(new JDETracker);
|
||||
jdeTracker_ = std::move(jdeTracker);
|
||||
|
@@ -18,7 +18,7 @@
|
||||
#include "fastdeploy/fastdeploy_model.h"
|
||||
#include "fastdeploy/vision/common/result.h"
|
||||
#include "fastdeploy/vision/tracking/pptracking/tracker.h"
|
||||
#include "fastdeploy/vision/tracking/pptracking/letter_box.h"
|
||||
//#include "fastdeploy/vision/tracking/pptracking/letter_box.h"
|
||||
|
||||
namespace fastdeploy {
|
||||
namespace vision {
|
||||
@@ -56,8 +56,8 @@ public:
|
||||
private:
|
||||
|
||||
bool BuildPreprocessPipelineFromConfig();
|
||||
|
||||
bool Initialize();
|
||||
void GetNmsInfo();
|
||||
|
||||
bool Preprocess(Mat* img, std::vector<FDTensor>* outputs);
|
||||
|
||||
@@ -69,19 +69,8 @@ private:
|
||||
float conf_thresh_;
|
||||
float tracked_thresh_;
|
||||
float min_box_area_;
|
||||
bool is_scale_ = true;
|
||||
std::unique_ptr<JDETracker> jdeTracker_;
|
||||
|
||||
// configuration for nms
|
||||
int64_t background_label = -1;
|
||||
int64_t keep_top_k = 300;
|
||||
float nms_eta = 1.0;
|
||||
float nms_threshold = 0.7;
|
||||
float score_threshold = 0.01;
|
||||
int64_t nms_top_k = 10000;
|
||||
bool normalized = true;
|
||||
bool has_nms_ = true;
|
||||
|
||||
};
|
||||
|
||||
} // namespace tracking
|
||||
|
@@ -24,6 +24,14 @@ class PPTracking(FastDeployModel):
|
||||
config_file,
|
||||
runtime_option=None,
|
||||
model_format=ModelFormat.PADDLE):
|
||||
"""Load a PPTracking model exported by PaddleDetection.
|
||||
|
||||
:param model_file: (str)Path of model file, e.g pptracking/model.pdmodel
|
||||
:param params_file: (str)Path of parameters file, e.g ppyoloe/model.pdiparams
|
||||
:param config_file: (str)Path of configuration file for deployment, e.g ppyoloe/infer_cfg.yml
|
||||
:param runtime_option: (fastdeploy.RuntimeOption)RuntimeOption for inference this model, if it's None, will use the default backend on CPU
|
||||
:param model_format: (fastdeploy.ModelForamt)Model format of the loaded model
|
||||
"""
|
||||
super(PPTracking, self).__init__(runtime_option)
|
||||
|
||||
assert model_format == ModelFormat.PADDLE, "PPTracking model only support model format of ModelFormat.Paddle now."
|
||||
@@ -33,5 +41,10 @@ class PPTracking(FastDeployModel):
|
||||
assert self.initialized, "PPTracking model initialize failed."
|
||||
|
||||
def predict(self, input_image):
|
||||
"""Predict the MOT result for an input image
|
||||
|
||||
:param input_image: (numpy.ndarray)The input image data, 3-D array with layout HWC, BGR format
|
||||
:return: MOTResult
|
||||
"""
|
||||
assert input_image is not None, "The input image data is None."
|
||||
return self._model.predict(input_image)
|
||||
|
Reference in New Issue
Block a user