mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-06 09:07:10 +08:00
Remove debug code (#266)
* Rename Frontend to ModelFormat in documents * Remove useless debug flag
This commit is contained in:
@@ -55,7 +55,6 @@ option(ORT_DIRECTORY "User can specify the installed onnxruntime directory.")
|
|||||||
|
|
||||||
# Please don't open this flag now, some bugs exists.
|
# Please don't open this flag now, some bugs exists.
|
||||||
# option(ENABLE_OPENCV_CUDA "Whether to enable opencv with cuda, this will allow process image with GPU." OFF)
|
# option(ENABLE_OPENCV_CUDA "Whether to enable opencv with cuda, this will allow process image with GPU." OFF)
|
||||||
option(ENABLE_DEBUG "Whether to enable print debug information, this may reduce performance." OFF)
|
|
||||||
|
|
||||||
# Whether to build fastdeply with vision/text/... examples, only for testings.
|
# Whether to build fastdeply with vision/text/... examples, only for testings.
|
||||||
option(BUILD_EXAMPLES "Whether to build fastdeply with vision examples" OFF)
|
option(BUILD_EXAMPLES "Whether to build fastdeply with vision examples" OFF)
|
||||||
@@ -90,9 +89,6 @@ if(WIN32 AND ENABLE_VISION)
|
|||||||
set(CMAKE_POLICY_DEFAULT_CMP0077 NEW)
|
set(CMAKE_POLICY_DEFAULT_CMP0077 NEW)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(ENABLE_DEBUG)
|
|
||||||
add_definitions(-DFASTDEPLOY_DEBUG)
|
|
||||||
endif()
|
|
||||||
if(NOT CUDA_DIRECTORY)
|
if(NOT CUDA_DIRECTORY)
|
||||||
set(CUDA_DIRECTORY "/usr/local/cuda")
|
set(CUDA_DIRECTORY "/usr/local/cuda")
|
||||||
endif()
|
endif()
|
||||||
|
@@ -13,10 +13,6 @@
|
|||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#ifndef FASTDEPLOY_DEBUG
|
|
||||||
#cmakedefine FASTDEPLOY_DEBUG
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef FASTDEPLOY_LIB
|
#ifndef FASTDEPLOY_LIB
|
||||||
#cmakedefine FASTDEPLOY_LIB
|
#cmakedefine FASTDEPLOY_LIB
|
||||||
#endif
|
#endif
|
||||||
|
@@ -200,18 +200,4 @@ std::map<std::string, float> FastDeployModel::PrintStatisInfoOfRuntime() {
|
|||||||
statis_info_of_runtime_dict["iterations"] = time_of_runtime_.size();
|
statis_info_of_runtime_dict["iterations"] = time_of_runtime_.size();
|
||||||
return statis_info_of_runtime_dict;
|
return statis_info_of_runtime_dict;
|
||||||
}
|
}
|
||||||
|
|
||||||
void FastDeployModel::EnableDebug() {
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
debug_ = true;
|
|
||||||
#else
|
|
||||||
FDWARNING << "The compile FastDeploy is not with -DENABLE_DEBUG=ON, so "
|
|
||||||
"cannot enable debug mode."
|
|
||||||
<< std::endl;
|
|
||||||
debug_ = false;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
bool FastDeployModel::DebugEnabled() { return debug_; }
|
|
||||||
|
|
||||||
} // namespace fastdeploy
|
} // namespace fastdeploy
|
||||||
|
@@ -54,30 +54,15 @@ class FASTDEPLOY_DECL FastDeployModel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
virtual std::map<std::string, float> PrintStatisInfoOfRuntime();
|
virtual std::map<std::string, float> PrintStatisInfoOfRuntime();
|
||||||
virtual void EnableDebug();
|
|
||||||
virtual bool DebugEnabled();
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::unique_ptr<Runtime> runtime_;
|
std::unique_ptr<Runtime> runtime_;
|
||||||
bool runtime_initialized_ = false;
|
bool runtime_initialized_ = false;
|
||||||
// whether to record inference time
|
// whether to record inference time
|
||||||
bool enable_record_time_of_runtime_ = false;
|
bool enable_record_time_of_runtime_ = false;
|
||||||
bool debug_ = false;
|
|
||||||
|
|
||||||
// record inference time for backend
|
// record inference time for backend
|
||||||
std::vector<double> time_of_runtime_;
|
std::vector<double> time_of_runtime_;
|
||||||
};
|
};
|
||||||
|
|
||||||
#define TIMERECORD_START(id) \
|
|
||||||
TimeCounter tc_##id; \
|
|
||||||
tc_##id.Start();
|
|
||||||
|
|
||||||
#define TIMERECORD_END(id, prefix) \
|
|
||||||
if (DebugEnabled()) { \
|
|
||||||
tc_##id.End(); \
|
|
||||||
FDLogger() << __FILE__ << "(" << __LINE__ << "):" << __FUNCTION__ << " " \
|
|
||||||
<< prefix << " duration = " << tc_##id.Duration() << "s." \
|
|
||||||
<< std::endl; \
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace fastdeploy
|
} // namespace fastdeploy
|
||||||
|
@@ -302,10 +302,6 @@ bool NanoDetPlus::Postprocess(
|
|||||||
|
|
||||||
bool NanoDetPlus::Predict(cv::Mat* im, DetectionResult* result,
|
bool NanoDetPlus::Predict(cv::Mat* im, DetectionResult* result,
|
||||||
float conf_threshold, float nms_iou_threshold) {
|
float conf_threshold, float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -322,31 +318,18 @@ bool NanoDetPlus::Predict(cv::Mat* im, DetectionResult* result,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
||||||
nms_iou_threshold)) {
|
nms_iou_threshold)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -256,9 +256,6 @@ bool YOLOv5::Postprocess(
|
|||||||
|
|
||||||
bool YOLOv5::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
bool YOLOv5::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
||||||
float nms_iou_threshold) {
|
float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
@@ -272,31 +269,18 @@ bool YOLOv5::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors, result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors, result, im_info, conf_threshold,
|
||||||
nms_iou_threshold, multi_label_)) {
|
nms_iou_threshold, multi_label_)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -339,9 +339,6 @@ bool YOLOv5Lite::Postprocess(
|
|||||||
|
|
||||||
bool YOLOv5Lite::Predict(cv::Mat* im, DetectionResult* result,
|
bool YOLOv5Lite::Predict(cv::Mat* im, DetectionResult* result,
|
||||||
float conf_threshold, float nms_iou_threshold) {
|
float conf_threshold, float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -358,21 +355,12 @@ bool YOLOv5Lite::Predict(cv::Mat* im, DetectionResult* result,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (is_decode_exported) {
|
if (is_decode_exported) {
|
||||||
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
||||||
@@ -387,10 +375,6 @@ bool YOLOv5Lite::Predict(cv::Mat* im, DetectionResult* result,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -214,10 +214,6 @@ bool YOLOv6::Postprocess(
|
|||||||
|
|
||||||
bool YOLOv6::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
bool YOLOv6::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
||||||
float nms_iou_threshold) {
|
float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -234,31 +230,18 @@ bool YOLOv6::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
||||||
nms_iou_threshold)) {
|
nms_iou_threshold)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -279,10 +279,6 @@ bool YOLOX::PostprocessWithDecode(
|
|||||||
|
|
||||||
bool YOLOX::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
bool YOLOX::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
||||||
float nms_iou_threshold) {
|
float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -299,21 +295,12 @@ bool YOLOX::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (is_decode_exported) {
|
if (is_decode_exported) {
|
||||||
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
||||||
@@ -328,10 +315,6 @@ bool YOLOX::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -257,10 +257,6 @@ bool RetinaFace::Postprocess(
|
|||||||
|
|
||||||
bool RetinaFace::Predict(cv::Mat* im, FaceDetectionResult* result,
|
bool RetinaFace::Predict(cv::Mat* im, FaceDetectionResult* result,
|
||||||
float conf_threshold, float nms_iou_threshold) {
|
float conf_threshold, float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -277,31 +273,18 @@ bool RetinaFace::Predict(cv::Mat* im, FaceDetectionResult* result,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors, result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors, result, im_info, conf_threshold,
|
||||||
nms_iou_threshold)) {
|
nms_iou_threshold)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -318,9 +318,6 @@ bool SCRFD::Postprocess(
|
|||||||
|
|
||||||
bool SCRFD::Predict(cv::Mat* im, FaceDetectionResult* result,
|
bool SCRFD::Predict(cv::Mat* im, FaceDetectionResult* result,
|
||||||
float conf_threshold, float nms_iou_threshold) {
|
float conf_threshold, float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -337,31 +334,18 @@ bool SCRFD::Predict(cv::Mat* im, FaceDetectionResult* result,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors, result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors, result, im_info, conf_threshold,
|
||||||
nms_iou_threshold)) {
|
nms_iou_threshold)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -168,10 +168,6 @@ bool UltraFace::Postprocess(
|
|||||||
|
|
||||||
bool UltraFace::Predict(cv::Mat* im, FaceDetectionResult* result,
|
bool UltraFace::Predict(cv::Mat* im, FaceDetectionResult* result,
|
||||||
float conf_threshold, float nms_iou_threshold) {
|
float conf_threshold, float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -187,32 +183,18 @@ bool UltraFace::Predict(cv::Mat* im, FaceDetectionResult* result,
|
|||||||
FDERROR << "Failed to preprocess input image." << std::endl;
|
FDERROR << "Failed to preprocess input image." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors, result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors, result, im_info, conf_threshold,
|
||||||
nms_iou_threshold)) {
|
nms_iou_threshold)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -241,10 +241,6 @@ bool YOLOv5Face::Postprocess(
|
|||||||
|
|
||||||
bool YOLOv5Face::Predict(cv::Mat* im, FaceDetectionResult* result,
|
bool YOLOv5Face::Predict(cv::Mat* im, FaceDetectionResult* result,
|
||||||
float conf_threshold, float nms_iou_threshold) {
|
float conf_threshold, float nms_iou_threshold) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -260,32 +256,18 @@ bool YOLOv5Face::Predict(cv::Mat* im, FaceDetectionResult* result,
|
|||||||
FDERROR << "Failed to preprocess input image." << std::endl;
|
FDERROR << "Failed to preprocess input image." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
if (!Postprocess(output_tensors[0], result, im_info, conf_threshold,
|
||||||
nms_iou_threshold)) {
|
nms_iou_threshold)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -109,10 +109,6 @@ bool InsightFaceRecognitionModel::Postprocess(
|
|||||||
|
|
||||||
bool InsightFaceRecognitionModel::Predict(cv::Mat* im,
|
bool InsightFaceRecognitionModel::Predict(cv::Mat* im,
|
||||||
FaceRecognitionResult* result) {
|
FaceRecognitionResult* result) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -121,30 +117,17 @@ bool InsightFaceRecognitionModel::Predict(cv::Mat* im,
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors, result)) {
|
if (!Postprocess(output_tensors, result)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -126,10 +126,6 @@ bool MODNet::Postprocess(
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool MODNet::Predict(cv::Mat* im, MattingResult* result) {
|
bool MODNet::Predict(cv::Mat* im, MattingResult* result) {
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_START(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Mat mat(*im);
|
Mat mat(*im);
|
||||||
std::vector<FDTensor> input_tensors(1);
|
std::vector<FDTensor> input_tensors(1);
|
||||||
|
|
||||||
@@ -142,31 +138,17 @@ bool MODNet::Predict(cv::Mat* im, MattingResult* result) {
|
|||||||
FDERROR << "Failed to preprocess input image." << std::endl;
|
FDERROR << "Failed to preprocess input image." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(0, "Preprocess")
|
|
||||||
TIMERECORD_START(1)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
input_tensors[0].name = InputInfoOfRuntime(0).name;
|
||||||
std::vector<FDTensor> output_tensors;
|
std::vector<FDTensor> output_tensors;
|
||||||
if (!Infer(input_tensors, &output_tensors)) {
|
if (!Infer(input_tensors, &output_tensors)) {
|
||||||
FDERROR << "Failed to inference." << std::endl;
|
FDERROR << "Failed to inference." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(1, "Inference")
|
|
||||||
TIMERECORD_START(2)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!Postprocess(output_tensors, result, im_info)) {
|
if (!Postprocess(output_tensors, result, im_info)) {
|
||||||
FDERROR << "Failed to post process." << std::endl;
|
FDERROR << "Failed to post process." << std::endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef FASTDEPLOY_DEBUG
|
|
||||||
TIMERECORD_END(2, "Postprocess")
|
|
||||||
#endif
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user