From 81eaeddbd0f37ce479bfd9069c9fa96b77b7a0e6 Mon Sep 17 00:00:00 2001 From: Wang Xinyu Date: Mon, 19 Dec 2022 10:14:17 +0800 Subject: [PATCH] [Streamer] Basic framework, YAML parser, ppyoloe demo and video decoder demo (#863) * support trt installed in sys dir * streamer init * link elements and run * create source bin * add config * restruct dirs * set property * link elements * integrate perf * add bbox parser * parse yaml to string, video decoder * use try pull for decoder and nits * streamer ppyoloe cpp * update readme * video decoder cpp dir * add cn readme * update readme * cmake nits * refactor perf measurement --- CMakeLists.txt | 20 ++- FastDeploy.cmake.in | 19 ++- streamer/CMakeLists.txt | 52 +++++++ streamer/README.md | 1 + streamer/README_CN.md | 45 ++++++ streamer/README_EN.md | 44 ++++++ streamer/examples/ppyoloe/cpp/CMakeLists.txt | 30 ++++ streamer/examples/ppyoloe/cpp/README.md | 1 + streamer/examples/ppyoloe/cpp/README_CN.md | 44 ++++++ streamer/examples/ppyoloe/cpp/README_EN.md | 44 ++++++ streamer/examples/ppyoloe/cpp/main.cc | 22 +++ .../examples/ppyoloe/cpp/nvinfer_config.txt | 23 +++ .../examples/ppyoloe/cpp/streamer_cfg.yml | 46 ++++++ .../examples/video_decoder/cpp/CMakeLists.txt | 30 ++++ streamer/examples/video_decoder/cpp/README.md | 1 + .../examples/video_decoder/cpp/README_CN.md | 20 +++ .../examples/video_decoder/cpp/README_EN.md | 20 +++ streamer/examples/video_decoder/cpp/main.cc | 43 ++++++ .../video_decoder/cpp/streamer_cfg.yml | 19 +++ streamer/src/app/base_app.cc | 135 +++++++++++++++++ streamer/src/app/base_app.h | 86 +++++++++++ streamer/src/app/video_analytics.cc | 21 +++ streamer/src/app/video_analytics.h | 33 ++++ streamer/src/app/video_decoder.cc | 69 +++++++++ streamer/src/app/video_decoder.h | 41 +++++ streamer/src/app/yaml_parser.cc | 125 ++++++++++++++++ streamer/src/app/yaml_parser.h | 58 +++++++ streamer/src/deepstream/bbox_parser.cc | 76 ++++++++++ streamer/src/fd_streamer.cc | 56 +++++++ streamer/src/fd_streamer.h | 52 +++++++ streamer/src/gstreamer/perf.cc | 114 ++++++++++++++ streamer/src/gstreamer/perf.h | 29 ++++ streamer/src/gstreamer/types.h | 60 ++++++++ streamer/src/gstreamer/utils.cc | 141 ++++++++++++++++++ streamer/src/gstreamer/utils.h | 28 ++++ 35 files changed, 1635 insertions(+), 13 deletions(-) create mode 100644 streamer/CMakeLists.txt create mode 120000 streamer/README.md create mode 100644 streamer/README_CN.md create mode 100644 streamer/README_EN.md create mode 100644 streamer/examples/ppyoloe/cpp/CMakeLists.txt create mode 120000 streamer/examples/ppyoloe/cpp/README.md create mode 100644 streamer/examples/ppyoloe/cpp/README_CN.md create mode 100644 streamer/examples/ppyoloe/cpp/README_EN.md create mode 100644 streamer/examples/ppyoloe/cpp/main.cc create mode 100644 streamer/examples/ppyoloe/cpp/nvinfer_config.txt create mode 100644 streamer/examples/ppyoloe/cpp/streamer_cfg.yml create mode 100644 streamer/examples/video_decoder/cpp/CMakeLists.txt create mode 120000 streamer/examples/video_decoder/cpp/README.md create mode 100644 streamer/examples/video_decoder/cpp/README_CN.md create mode 100644 streamer/examples/video_decoder/cpp/README_EN.md create mode 100644 streamer/examples/video_decoder/cpp/main.cc create mode 100644 streamer/examples/video_decoder/cpp/streamer_cfg.yml create mode 100644 streamer/src/app/base_app.cc create mode 100644 streamer/src/app/base_app.h create mode 100644 streamer/src/app/video_analytics.cc create mode 100644 streamer/src/app/video_analytics.h create mode 100644 streamer/src/app/video_decoder.cc create mode 100644 streamer/src/app/video_decoder.h create mode 100644 streamer/src/app/yaml_parser.cc create mode 100644 streamer/src/app/yaml_parser.h create mode 100644 streamer/src/deepstream/bbox_parser.cc create mode 100644 streamer/src/fd_streamer.cc create mode 100644 streamer/src/fd_streamer.h create mode 100644 streamer/src/gstreamer/perf.cc create mode 100644 streamer/src/gstreamer/perf.h create mode 100644 streamer/src/gstreamer/types.h create mode 100644 streamer/src/gstreamer/utils.cc create mode 100644 streamer/src/gstreamer/utils.h diff --git a/CMakeLists.txt b/CMakeLists.txt index 0bcfa4084..04954acb1 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -362,14 +362,20 @@ if(ENABLE_TRT_BACKEND) endif() if(NOT BUILD_ON_JETSON) if(NOT TRT_DIRECTORY) - message(FATAL_ERROR "While -DENABLE_TRT_BACKEND=ON, must define -DTRT_DIRECTORY, e.g -DTRT_DIRECTORY=/Downloads/TensorRT-8.4") + set(TRT_INC_DIR /usr/include/x86_64-linux-gnu/) + set(TRT_LIB_DIR /usr/lib/x86_64-linux-gnu/) endif() endif() - set(TRT_INC_DIR /usr/include/aarch64-linux-gnu/) - set(TRT_LIB_DIR /usr/lib/aarch64-linux-gnu/) - if(NOT BUILD_ON_JETSON) - set(TRT_INC_DIR ${TRT_DIRECTORY}/include) - set(TRT_LIB_DIR ${TRT_DIRECTORY}/lib) + if(BUILD_ON_JETSON) + set(TRT_INC_DIR /usr/include/aarch64-linux-gnu/) + set(TRT_LIB_DIR /usr/lib/aarch64-linux-gnu/) + else() + set(TRT_INC_DIR /usr/include/x86_64-linux-gnu/) + set(TRT_LIB_DIR /usr/lib/x86_64-linux-gnu/) + if(TRT_DIRECTORY) + set(TRT_INC_DIR ${TRT_DIRECTORY}/include) + set(TRT_LIB_DIR ${TRT_DIRECTORY}/lib) + endif() endif() add_definitions(-DENABLE_TRT_BACKEND) @@ -382,7 +388,7 @@ if(ENABLE_TRT_BACKEND) list(APPEND DEPEND_LIBS ${TRT_INFER_LIB} ${TRT_ONNX_LIB} ${TRT_PLUGIN_LIB}) list(APPEND ALL_DEPLOY_SRCS ${DEPLOY_OP_CUDA_KERNEL_SRCS}) - if(NOT BUILD_ON_JETSON) + if(NOT BUILD_ON_JETSON AND TRT_DIRECTORY) if(NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt") file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/third_libs/install/tensorrt") endif() diff --git a/FastDeploy.cmake.in b/FastDeploy.cmake.in index d8c0df3d6..17f83eb69 100755 --- a/FastDeploy.cmake.in +++ b/FastDeploy.cmake.in @@ -141,13 +141,19 @@ if(WITH_GPU) if (ENABLE_TRT_BACKEND) if(BUILD_ON_JETSON) - find_library(TRT_INFER_LIB nvinfer /usr/include/aarch64-linux-gnu/) - find_library(TRT_ONNX_LIB nvonnxparser /usr/include/aarch64-linux-gnu/) - find_library(TRT_PLUGIN_LIB nvinfer_plugin /usr/include/aarch64-linux-gnu/) + find_library(TRT_INFER_LIB nvinfer /usr/lib/aarch64-linux-gnu/) + find_library(TRT_ONNX_LIB nvonnxparser /usr/lib/aarch64-linux-gnu/) + find_library(TRT_PLUGIN_LIB nvinfer_plugin /usr/lib/aarch64-linux-gnu/) else() - find_library(TRT_INFER_LIB nvinfer ${CMAKE_CURRENT_LIST_DIR}/third_libs/install/tensorrt/lib NO_DEFAULT_PATH) - find_library(TRT_ONNX_LIB nvonnxparser ${CMAKE_CURRENT_LIST_DIR}/third_libs/install/tensorrt/lib NO_DEFAULT_PATH) - find_library(TRT_PLUGIN_LIB nvinfer_plugin ${CMAKE_CURRENT_LIST_DIR}/third_libs/install/tensorrt/lib NO_DEFAULT_PATH) + if(TRT_DIRECTORY) + find_library(TRT_INFER_LIB nvinfer ${CMAKE_CURRENT_LIST_DIR}/third_libs/install/tensorrt/lib NO_DEFAULT_PATH) + find_library(TRT_ONNX_LIB nvonnxparser ${CMAKE_CURRENT_LIST_DIR}/third_libs/install/tensorrt/lib NO_DEFAULT_PATH) + find_library(TRT_PLUGIN_LIB nvinfer_plugin ${CMAKE_CURRENT_LIST_DIR}/third_libs/install/tensorrt/lib NO_DEFAULT_PATH) + else() + find_library(TRT_INFER_LIB nvinfer /usr/lib/x86_64-linux-gnu/) + find_library(TRT_ONNX_LIB nvonnxparser /usr/lib/x86_64-linux-gnu/) + find_library(TRT_PLUGIN_LIB nvinfer_plugin /usr/lib/x86_64-linux-gnu/) + endif() endif() list(APPEND FASTDEPLOY_LIBS ${TRT_INFER_LIB} ${TRT_ONNX_LIB} ${TRT_PLUGIN_LIB}) endif() @@ -275,6 +281,7 @@ message(STATUS " ENABLE_VISION : ${ENABLE_VISION}") message(STATUS " ENABLE_TEXT : ${ENABLE_TEXT}") if(WITH_GPU) message(STATUS " CUDA_DIRECTORY : ${CUDA_DIRECTORY}") + message(STATUS " TRT_DIRECTORY : ${TRT_DIRECTORY}") endif() if(OPENCV_DIRECTORY) message(STATUS " OPENCV_DIRECTORY : ${OPENCV_DIRECTORY}") diff --git a/streamer/CMakeLists.txt b/streamer/CMakeLists.txt new file mode 100644 index 000000000..c5c7c2bd8 --- /dev/null +++ b/streamer/CMakeLists.txt @@ -0,0 +1,52 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +PROJECT(fd_streamer C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.10) + +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") +option(ENABLE_DEEPSTREAM "Enable NVIDIA DeepStream SDK" ON) + +file(GLOB_RECURSE ALL_STREAMER_SRCS ${PROJECT_SOURCE_DIR}/src/*.cc) +file(GLOB_RECURSE DEEPSTREAM_SRCS ${PROJECT_SOURCE_DIR}/src/deepstream/*.cc) +list(REMOVE_ITEM ALL_STREAMER_SRCS ${DEEPSTREAM_SRCS}) + +set(DEPEND_LIBS "") + +find_package(PkgConfig REQUIRED) +pkg_check_modules(GSTAPP gstreamer-app-1.0 REQUIRED) +include_directories(${GSTAPP_INCLUDE_DIRS}) +list(APPEND DEPEND_LIBS ${GSTAPP_LIBRARIES}) + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) +include_directories(${FASTDEPLOY_INCS}) +include_directories(${PROJECT_SOURCE_DIR}/src/) + +if(ENABLE_DEEPSTREAM) + add_definitions(-DENABLE_DEEPSTREAM) + include_directories(${CUDA_DIRECTORY}/include) + include_directories(/opt/nvidia/deepstream/deepstream/sources/includes/) + link_directories(/opt/nvidia/deepstream/deepstream/lib/) + list(APPEND ALL_STREAMER_SRCS ${DEEPSTREAM_SRCS}) + list(APPEND DEPEND_LIBS nvdsgst_meta nvds_meta) +else() + message(FATAL_ERROR "Currently, DeepStream is required, we will make it optional later.") +endif() + +# Link the yaml-cpp in system path, because deepstream also depends on yaml-cpp, +# If we link multiple yaml-cpp libs, strange error will occur. +list(APPEND DEPEND_LIBS yaml-cpp) + +add_library(fd_streamer SHARED ${ALL_STREAMER_SRCS}) +target_link_libraries(fd_streamer ${FASTDEPLOY_LIBS} ${DEPEND_LIBS}) diff --git a/streamer/README.md b/streamer/README.md new file mode 120000 index 000000000..f18766817 --- /dev/null +++ b/streamer/README.md @@ -0,0 +1 @@ +README_EN.md \ No newline at end of file diff --git a/streamer/README_CN.md b/streamer/README_CN.md new file mode 100644 index 000000000..fcf40bf6e --- /dev/null +++ b/streamer/README_CN.md @@ -0,0 +1,45 @@ +简体中文 | [English](README_EN.md) + +# FastDeploy Streamer + +## 简介 + +FastDeploy Streamer(FDStreamer)是一个AI多媒体流处理框架,以Pipeline的形式编排AI推理、音视频解码、编码、推流等功能, +赋能AI应用的端到端优化和部署。 + +目前FDStreamer只适配了NVIDIA GPU/Jetson平台,更多硬件和平台的支持敬请期待。 + +## 准备环境 + +### Jetson +- DeepStream 6.1+ + +### x86 GPU + +手动安装DeepStream 6.1.1及其依赖项,或使用以下docker: +``` +docker pull nvcr.io/nvidia/deepstream:6.1.1-devel +``` + +## 编译和运行 + +1. [编译FastDeploy](../../docs/cn/build_and_install), 或直接下载[FastDeploy预编译库](../../docs/cn/build_and_install/download_prebuilt_libraries.md) + +2. 编译Streamer +``` +cd FastDeploy/streamer/ +mkdir build && cd build/ + +# 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用 +wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz +tar xvf fastdeploy-linux-x64-x.x.x.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x +make -j +``` + +3. 编译和运行Example + +| Example | 简介 | +|:--|:--| +| [PP-YOLOE](./examples/ppyoloe) | 多路视频接入,PP-YOLOE目标检测,NVTracker跟踪,硬编解码,写入mp4文件 | +| [Video Decoder](./examples/video_decoder) | 视频硬解码 | diff --git a/streamer/README_EN.md b/streamer/README_EN.md new file mode 100644 index 000000000..16aea26c9 --- /dev/null +++ b/streamer/README_EN.md @@ -0,0 +1,44 @@ +English | [简体中文](README_CN.md) + +# FastDeploy Streamer + +## Introduction + +FastDeploy Streamer (FDStreamer) is an AI multimedia stream processing framework that arranges functions such as AI inference, audio and video decoding, encoding, and streaming in the form of pipeline, to enable end-to-end optimization and deployment of AI applications. + +Currently FDStreamer is only compatible with NVIDIA GPU/Jetson platform, please look forward to more hardware and platform support. + +## Environment + +### Jetson +- DeepStream 6.1+ + +### x86 GPU + +Install DeepStream 6.1.1 and dependencies manually,or use below docker: +``` +docker pull nvcr.io/nvidia/deepstream:6.1.1-devel +``` + +## Build + +1. [Build FastDeploy](../../docs/en/build_and_install), or download [FastDeploy prebuilt libraries](../../docs/en/build_and_install/download_prebuilt_libraries.md) + +2. Build Streamer +``` +cd FastDeploy/streamer/ +mkdir build && cd build/ + +# Download FastDeploy prebuilt libraries, please check `FastDeploy prebuilt libraries` above. +wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz +tar xvf fastdeploy-linux-x64-x.x.x.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x +make -j +``` + +3. Build and Run Example + +| Example | Brief | +|:--|:--| +| [PP-YOLOE](./examples/ppyoloe) | Multiple input videos, PP-YOLOE object detection, NvTracker, Hardware codec, writing to mp4 file | +| [Video Decoder](./examples/video_decoder) | Video decoding using hardward | diff --git a/streamer/examples/ppyoloe/cpp/CMakeLists.txt b/streamer/examples/ppyoloe/cpp/CMakeLists.txt new file mode 100644 index 000000000..7cd3b2be3 --- /dev/null +++ b/streamer/examples/ppyoloe/cpp/CMakeLists.txt @@ -0,0 +1,30 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +PROJECT(streamer_ppyoloe C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.10) + +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) +include_directories(${FASTDEPLOY_INCS}) + +set(FDSTREAMER_INCLUDE_DIR ${PROJECT_SOURCE_DIR}/../../../src/) +include_directories(${FDSTREAMER_INCLUDE_DIR}) +link_directories(${PROJECT_SOURCE_DIR}/../../../build/) + +find_package(PkgConfig REQUIRED) +pkg_check_modules(GSTAPP gstreamer-app-1.0 REQUIRED) +include_directories(${GSTAPP_INCLUDE_DIRS}) + +add_executable(streamer_demo main.cc) +target_link_libraries(streamer_demo fd_streamer) diff --git a/streamer/examples/ppyoloe/cpp/README.md b/streamer/examples/ppyoloe/cpp/README.md new file mode 120000 index 000000000..f18766817 --- /dev/null +++ b/streamer/examples/ppyoloe/cpp/README.md @@ -0,0 +1 @@ +README_EN.md \ No newline at end of file diff --git a/streamer/examples/ppyoloe/cpp/README_CN.md b/streamer/examples/ppyoloe/cpp/README_CN.md new file mode 100644 index 000000000..7305edfee --- /dev/null +++ b/streamer/examples/ppyoloe/cpp/README_CN.md @@ -0,0 +1,44 @@ +简体中文 | [English](README_EN.md) + +# FastDeploy Streamer PP-YOLOE C++ Example + +## 编译和运行 + +1. 需要先FastDeploy Streamer, 请参考[README](../../../README.md) + +2. 编译Example +``` +mkdir build && cd build +cmake .. -DFASTDEPLOY_INSTALL_DIR=[PATH-OF-FASTDEPLOY-INSTALL-DIR] +make -j +``` + +3. 下载模型 +``` +wget https://bj.bcebos.com/paddlehub/fastdeploy/ppyoloe_crn_l_300e_coco_onnx_without_scale_factor.tgz +tar xvf ppyoloe_crn_l_300e_coco_onnx_without_scale_factor.tgz +mv ppyoloe_crn_l_300e_coco_onnx_without_scale_factor/ model/ +``` + +4. 运行 +``` +cp ../nvinfer_config.txt . +cp ../streamer_cfg.yml . +./streamer_demo +``` + +## 导出ONNX模型,不包含NMS和scale factor +``` +# 导出Paddle推理模型,exclude_nms=True and trt=True +git clone https://github.com/PaddlePaddle/PaddleDetection.git +cd PaddleDetection +python tools/export_model.py -c configs/ppyoloe/ppyoloe_crn_l_300e_coco.yml -o weights=https://paddledet.bj.bcebos.com/models/ppyoloe_crn_l_300e_coco.pdparams exclude_nms=True trt=True --output_dir inference_model + +# 转换为ONNX +paddle2onnx --model_dir inference_model/ppyoloe_crn_l_300e_coco/ --model_filename model.pdmodel --params_filename model.pdiparams --save_file ppyoloe.onnx --deploy_backend tensorrt --enable_dev_version True + +# 裁剪ONNX,删除scale factor +git clone https://github.com/PaddlePaddle/Paddle2ONNX.git +cd Paddle2ONNX +python tools/onnx/prune_onnx_model.py --model ../PaddleDetection/ppyoloe.onnx --output_names concat_14.tmp_0 p2o.Mul.245 --save_file ppyoloe_without_scale_factor.onnx +``` diff --git a/streamer/examples/ppyoloe/cpp/README_EN.md b/streamer/examples/ppyoloe/cpp/README_EN.md new file mode 100644 index 000000000..192bab502 --- /dev/null +++ b/streamer/examples/ppyoloe/cpp/README_EN.md @@ -0,0 +1,44 @@ +English | [简体中文](README_CN.md) + +# FastDeploy Streamer PP-YOLOE C++ Example + +## Build and Run + +1. Build FastDeploy Streamer first, [README](../../../README.md) + +2. Build Example +``` +mkdir build && cd build +cmake .. -DFASTDEPLOY_INSTALL_DIR=[PATH-OF-FASTDEPLOY-INSTALL-DIR] +make -j +``` + +3. Download model +``` +wget https://bj.bcebos.com/paddlehub/fastdeploy/ppyoloe_crn_l_300e_coco_onnx_without_scale_factor.tgz +tar xvf ppyoloe_crn_l_300e_coco_onnx_without_scale_factor.tgz +mv ppyoloe_crn_l_300e_coco_onnx_without_scale_factor/ model/ +``` + +4. Run +``` +cp ../nvinfer_config.txt . +cp ../streamer_cfg.yml . +./streamer_demo +``` + +## Export ONNX excluding scale_factor and NMS +``` +# Export inference model with exclude_nms=True and trt=True +git clone https://github.com/PaddlePaddle/PaddleDetection.git +cd PaddleDetection +python tools/export_model.py -c configs/ppyoloe/ppyoloe_crn_l_300e_coco.yml -o weights=https://paddledet.bj.bcebos.com/models/ppyoloe_crn_l_300e_coco.pdparams exclude_nms=True trt=True --output_dir inference_model + +# Convert to ONNX +paddle2onnx --model_dir inference_model/ppyoloe_crn_l_300e_coco/ --model_filename model.pdmodel --params_filename model.pdiparams --save_file ppyoloe.onnx --deploy_backend tensorrt --enable_dev_version True + +# Prune ONNX to delete scale factor +git clone https://github.com/PaddlePaddle/Paddle2ONNX.git +cd Paddle2ONNX +python tools/onnx/prune_onnx_model.py --model ../PaddleDetection/ppyoloe.onnx --output_names concat_14.tmp_0 p2o.Mul.245 --save_file ppyoloe_without_scale_factor.onnx +``` diff --git a/streamer/examples/ppyoloe/cpp/main.cc b/streamer/examples/ppyoloe/cpp/main.cc new file mode 100644 index 000000000..5e9a8c9ff --- /dev/null +++ b/streamer/examples/ppyoloe/cpp/main.cc @@ -0,0 +1,22 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fd_streamer.h" + +int main(int argc, char* argv[]) { + auto streamer = fastdeploy::streamer::FDStreamer(); + streamer.Init("streamer_cfg.yml"); + streamer.Run(); + return 0; +} diff --git a/streamer/examples/ppyoloe/cpp/nvinfer_config.txt b/streamer/examples/ppyoloe/cpp/nvinfer_config.txt new file mode 100644 index 000000000..c0a481f5c --- /dev/null +++ b/streamer/examples/ppyoloe/cpp/nvinfer_config.txt @@ -0,0 +1,23 @@ +[property] +batch-size=4 +net-scale-factor=0.0039215697906911373 +# 0=RGB, 1=BGR +model-color-format=0 +onnx-file=model/ppyoloe.onnx +model-engine-file=model/ppyoloe.onnx_b4_gpu0_fp32.engine +labelfile-path=model/labels.txt +## 0=FP32, 1=INT8, 2=FP16 mode +network-mode=0 +num-detected-classes=80 +gie-unique-id=1 +network-type=0 +## 1=DBSCAN, 2=NMS, 3= DBSCAN+NMS Hybrid, 4 = None(No clustering) +cluster-mode=2 +maintain-aspect-ratio=1 +parse-bbox-func-name=NvDsInferParseCustomPPYOLOE +custom-lib-path=../../../../build/libfd_streamer.so + +[class-attrs-all] +nms-iou-threshold=0.45 +pre-cluster-threshold=0.25 +topk=300 diff --git a/streamer/examples/ppyoloe/cpp/streamer_cfg.yml b/streamer/examples/ppyoloe/cpp/streamer_cfg.yml new file mode 100644 index 000000000..eb62e5959 --- /dev/null +++ b/streamer/examples/ppyoloe/cpp/streamer_cfg.yml @@ -0,0 +1,46 @@ +app: + type: video_analytics + enable-perf-measurement: true + perf-measurement-interval-sec: 5 + +nvurisrcbin_list: + uri-list: + - file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_ride_bike.mov + - file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_ride_bike.mov + - file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_ride_bike.mov + - file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_ride_bike.mov + pad-prefix: mux.sink_ + gpu-id: 0 + +nvstreammux: + name: mux + gpu-id: 0 + batch-size: 4 + width: 1920 + height: 1080 + batched-push-timeout: 40000 # 40ms + +nvinfer: + gpu-id: 0 + config-file-path: nvinfer_config.txt + +nvtracker: + gpu-id: 0 + tracker-width: 640 + tracker-height: 640 + ll-lib-file: /opt/nvidia/deepstream/deepstream/lib/libnvds_nvmultiobjecttracker.so + ll-config-file: /opt/nvidia/deepstream/deepstream/samples/configs/deepstream-app/config_tracker_NvDCF_perf.yml + enable-batch-process: true + +nvmultistreamtiler: + gpu-id: 0 + rows: 2 + columns: 2 + +nvosdbin: + gpu-id: 0 + +nvvideoencfilesinkbin: + gpu-id: 0 + bitrate: 4000 + output-file: out.mp4 diff --git a/streamer/examples/video_decoder/cpp/CMakeLists.txt b/streamer/examples/video_decoder/cpp/CMakeLists.txt new file mode 100644 index 000000000..2e551a341 --- /dev/null +++ b/streamer/examples/video_decoder/cpp/CMakeLists.txt @@ -0,0 +1,30 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +PROJECT(video_decoder C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.10) + +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) +include_directories(${FASTDEPLOY_INCS}) + +set(FDSTREAMER_INCLUDE_DIR ${PROJECT_SOURCE_DIR}/../../../src/) +include_directories(${FDSTREAMER_INCLUDE_DIR}) +link_directories(${PROJECT_SOURCE_DIR}/../../../build/) + +find_package(PkgConfig REQUIRED) +pkg_check_modules(GSTAPP gstreamer-app-1.0 REQUIRED) +include_directories(${GSTAPP_INCLUDE_DIRS}) + +add_executable(video_decoder main.cc) +target_link_libraries(video_decoder fd_streamer ${FASTDEPLOY_LIBS}) diff --git a/streamer/examples/video_decoder/cpp/README.md b/streamer/examples/video_decoder/cpp/README.md new file mode 120000 index 000000000..f18766817 --- /dev/null +++ b/streamer/examples/video_decoder/cpp/README.md @@ -0,0 +1 @@ +README_EN.md \ No newline at end of file diff --git a/streamer/examples/video_decoder/cpp/README_CN.md b/streamer/examples/video_decoder/cpp/README_CN.md new file mode 100644 index 000000000..1e91e6783 --- /dev/null +++ b/streamer/examples/video_decoder/cpp/README_CN.md @@ -0,0 +1,20 @@ +简体中文 | [English](README_EN.md) + +# FastDeploy Streamer Video Decoder Example + +## 编译和运行 + +1. 需要先FastDeploy Streamer, 请参考[README](../../../README.md) + +2. 编译Example +``` +mkdir build && cd build +cmake .. -DFASTDEPLOY_INSTALL_DIR=[PATH-OF-FASTDEPLOY-INSTALL-DIR] +make -j +``` + +3. 运行 +``` +cp ../streamer_cfg.yml . +./video_decoder +``` diff --git a/streamer/examples/video_decoder/cpp/README_EN.md b/streamer/examples/video_decoder/cpp/README_EN.md new file mode 100644 index 000000000..4b6a0905b --- /dev/null +++ b/streamer/examples/video_decoder/cpp/README_EN.md @@ -0,0 +1,20 @@ +English | [简体中文](README_CN.md) + +# FastDeploy Streamer Video Decoder Example + +## Build and Run + +1. Build FastDeploy Streamer first, [README](../../../README.md) + +2. Build Example +``` +mkdir build && cd build +cmake .. -DFASTDEPLOY_INSTALL_DIR=[PATH-OF-FASTDEPLOY-INSTALL-DIR] +make -j +``` + +3. Run +``` +cp ../streamer_cfg.yml . +./video_decoder +``` diff --git a/streamer/examples/video_decoder/cpp/main.cc b/streamer/examples/video_decoder/cpp/main.cc new file mode 100644 index 000000000..0b3d3435e --- /dev/null +++ b/streamer/examples/video_decoder/cpp/main.cc @@ -0,0 +1,43 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fd_streamer.h" +#include "fastdeploy/utils/perf.h" +#include + +int main(int argc, char* argv[]) { + auto streamer = fastdeploy::streamer::FDStreamer(); + streamer.Init("streamer_cfg.yml"); + streamer.RunAsync(); + int count = 0; + fastdeploy::FDTensor tensor; + fastdeploy::TimeCounter tc; + tc.Start(); + while (1) { + bool ret = streamer.TryPullFrame(tensor); + if (!ret) { + if (streamer.Destroyed()) break; + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + continue; + } + count++; + tensor.PrintInfo(); + cv::Mat mat(tensor.shape[0], tensor.shape[1], CV_8UC3, tensor.Data()); + cv::imwrite("out/" + std::to_string(count) + ".jpg", mat); + } + std::cout << "Total number of frames: " << count << std::endl; + tc.End(); + tc.PrintInfo(); + return 0; +} diff --git a/streamer/examples/video_decoder/cpp/streamer_cfg.yml b/streamer/examples/video_decoder/cpp/streamer_cfg.yml new file mode 100644 index 000000000..9ab1985fb --- /dev/null +++ b/streamer/examples/video_decoder/cpp/streamer_cfg.yml @@ -0,0 +1,19 @@ +app: + type: video_decoder + enable-perf-measurement: true + perf-measurement-interval-sec: 5 + +nvurisrcbin: + uri: file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_ride_bike.mov + gpu-id: 0 + +nvvideoconvert: + gpu-id: 0 + +capsfilter: + caps: video/x-raw,format=(string)BGR + +appsink: + sync: true + max-buffers: 60 + drop: false diff --git a/streamer/src/app/base_app.cc b/streamer/src/app/base_app.cc new file mode 100644 index 000000000..dee7c1929 --- /dev/null +++ b/streamer/src/app/base_app.cc @@ -0,0 +1,135 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "app/base_app.h" +#include "app/yaml_parser.h" +#include "gstreamer/utils.h" +#include "gstreamer/perf.h" + +namespace fastdeploy { +namespace streamer { + +static GMutex fps_lock; + +static gboolean bus_watch_callback(GstBus* bus, GstMessage* msg, gpointer data) { + GMainLoop* loop = (GMainLoop*)data; + switch (GST_MESSAGE_TYPE(msg)) { + case GST_MESSAGE_EOS: + g_print("End of stream\n"); + g_main_loop_quit(loop); + break; + case GST_MESSAGE_ERROR: { + gchar* debug; + GError* error; + gst_message_parse_error(msg, &error, &debug); + g_printerr("ERROR from element %s: %s\n", + GST_OBJECT_NAME(msg->src), error->message); + if (debug) + g_printerr("Error details: %s\n", debug); + g_free(debug); + g_error_free(error); + g_main_loop_quit(loop); + break; + } + default: + break; + } + return TRUE; +} + +static void PerfCallbackFunc(gpointer context, PerfResult* perf) { + g_mutex_lock(&fps_lock); + std::cout << "FPS: " << perf->fps + << ", total avg.: " << perf->fps_avg << std::endl; + g_mutex_unlock(&fps_lock); +} + +bool BaseApp::Init(const std::string& config_file) { + gst_init(NULL, NULL); + loop_ = g_main_loop_new(NULL, FALSE); + + YamlParser parser(config_file); + pipeline_ = parser.BuildPipelineFromConfig(); + + GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_)); + bus_watch_id_ = gst_bus_add_watch(bus, bus_watch_callback, loop_); + gst_object_unref(bus); + + SetupPerfMeasurement(); + return true; +} + +bool BaseApp::Run() { + gst_element_set_state(pipeline_, GST_STATE_PLAYING); + + /* Wait till pipeline encounters an error or EOS */ + g_print("Running...\n"); + g_main_loop_run(loop_); + + Destroy(); + return true; +} + +static void MainLoopThread(BaseApp* app) { + g_main_loop_run(app->GetLoop()); + app->Destroy(); +} + +bool BaseApp::RunAsync() { + gst_element_set_state(pipeline_, GST_STATE_PLAYING); + g_print("Running Asynchronous...\n"); + // std::thread t(MainLoopThread, this); + // thread_ = std::move(t); + std::future fut = std::async(std::launch::async, MainLoopThread, this); + future_ = std::move(fut); + return true; +} + +void BaseApp::SetupPerfMeasurement() { + if (!app_config_.enable_perf_measurement) return; + + GstElement* elem = NULL; + auto elem_names = GetSinkElemNames(GST_BIN(pipeline_)); + for (auto& elem_name : elem_names) { + std::cout << elem_name << std::endl; + if (elem_name.find("nvvideoencfilesinkbin") != std::string::npos) { + elem = gst_bin_get_by_name(GST_BIN(pipeline_), elem_name.c_str()); + } else if (elem_name.find("appsink") != std::string::npos) { + elem = gst_bin_get_by_name(GST_BIN(pipeline_), elem_name.c_str()); + } + } + FDASSERT(elem != NULL, "Can't find a properly sink bin in the pipeline"); + + GstPad* perf_pad = gst_element_get_static_pad(elem, "sink"); + FDASSERT(perf_pad != NULL, "Unable to get sink pad"); + + perf_ctx_.user_data = nullptr; + EnablePerfMeasurement(&perf_ctx_, perf_pad, + (gulong)(app_config_.perf_interval_sec), PerfCallbackFunc); + + gst_object_unref(perf_pad); +} + +void BaseApp::Destroy() { + g_print("Returned, stopping playback\n"); + gst_element_set_state(pipeline_, GST_STATE_NULL); + g_print("Deleting pipeline\n"); + gst_object_unref(GST_OBJECT(pipeline_)); + g_source_remove(bus_watch_id_); + g_main_loop_unref(loop_); + destroyed_ = true; +} + +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/app/base_app.h b/streamer/src/app/base_app.h new file mode 100644 index 000000000..db1827b5b --- /dev/null +++ b/streamer/src/app/base_app.h @@ -0,0 +1,86 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#pragma once + +#include "fastdeploy/utils/utils.h" +#include "gstreamer/types.h" + +#include +#include // NOLINT + +namespace fastdeploy { +namespace streamer { + +enum AppType { + VIDEO_ANALYTICS, ///< Video analytics app + VIDEO_DECODER, ///< Video decoder app +}; + +struct AppConfig { + AppType type; + bool enable_perf_measurement = false; + int perf_interval_sec = 5; +}; + +/*! @brief Base App class + */ +class BaseApp { + public: + BaseApp() {} + explicit BaseApp(AppConfig& app_config) { + app_config_ = app_config; + } + virtual ~BaseApp() = default; + + virtual bool Init(const std::string& config_file); + + bool Run(); + + bool RunAsync(); + + void Destroy(); + + void SetupPerfMeasurement(); + + AppConfig* GetAppConfig() { + return &app_config_; + } + + GstElement* GetPipeline() { + return pipeline_; + } + + GMainLoop* GetLoop() { + return loop_; + } + + guint GetBusId() { + return bus_watch_id_; + } + + bool Destroyed() { + return destroyed_; + } + + protected: + AppConfig app_config_; + GstElement* pipeline_; + GMainLoop* loop_; + guint bus_watch_id_; + PerfContext perf_ctx_; + std::future future_; + bool destroyed_ = false; +}; +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/app/video_analytics.cc b/streamer/src/app/video_analytics.cc new file mode 100644 index 000000000..54b5e7ae1 --- /dev/null +++ b/streamer/src/app/video_analytics.cc @@ -0,0 +1,21 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "app/video_analytics.h" + +namespace fastdeploy { +namespace streamer { + +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/app/video_analytics.h b/streamer/src/app/video_analytics.h new file mode 100644 index 000000000..e256be4b7 --- /dev/null +++ b/streamer/src/app/video_analytics.h @@ -0,0 +1,33 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#pragma once + +#include "app/base_app.h" +#include "fastdeploy/utils/utils.h" + +#include + +namespace fastdeploy { +namespace streamer { + +/*! @brief VideoAnalyticsApp class + */ +class FASTDEPLOY_DECL VideoAnalyticsApp : public BaseApp { + public: + explicit VideoAnalyticsApp(AppConfig& app_config) : BaseApp(app_config) {} + + private: +}; +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/app/video_decoder.cc b/streamer/src/app/video_decoder.cc new file mode 100644 index 000000000..7c11e7dd9 --- /dev/null +++ b/streamer/src/app/video_decoder.cc @@ -0,0 +1,69 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "app/video_decoder.h" +#include "gstreamer/utils.h" + +namespace fastdeploy { +namespace streamer { + +bool VideoDecoderApp::Init(const std::string& config_file) { + FDINFO << "this " << std::endl; + BaseApp::Init(config_file); + GetAppsinkFromPipeline(); + return true; +} + +bool VideoDecoderApp::TryPullFrame(FDTensor& tensor, int timeout_ms) { + GstSample* sample = gst_app_sink_try_pull_sample(appsink_, + timeout_ms * GST_MSECOND); + if (sample == NULL) { + return false; + } + GstCaps* caps = NULL; + uint8_t* data = nullptr; + Frame frame; + do { + bool ret = GetFrameFromSample(sample, frame); + if (!ret) { + FDERROR << "Failed to get buffer from sample." << std::endl; + break; + } + FDASSERT(frame.device == Device::CPU, + "Currently, only CPU frame is supported"); + + std::vector shape = GetFrameShape(frame); + tensor.Resize(shape, FDDataType::UINT8, "", frame.device); + FDTensor::CopyBuffer(tensor.Data(), frame.data, tensor.Nbytes(), + tensor.device); + } while (false); + + if (sample) gst_sample_unref(sample); + return true; +} + +void VideoDecoderApp::GetAppsinkFromPipeline() { + GstElement* elem = NULL; + auto elem_names = GetSinkElemNames(GST_BIN(pipeline_)); + for (auto& elem_name : elem_names) { + std::cout << elem_name << std::endl; + if (elem_name.find("appsink") != std::string::npos) { + elem = gst_bin_get_by_name(GST_BIN(pipeline_), elem_name.c_str()); + } + } + FDASSERT(elem != NULL, "Can't find a appsink in the pipeline"); + appsink_ = GST_APP_SINK_CAST(elem); +} +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/app/video_decoder.h b/streamer/src/app/video_decoder.h new file mode 100644 index 000000000..1bab3e7d6 --- /dev/null +++ b/streamer/src/app/video_decoder.h @@ -0,0 +1,41 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#pragma once + +#include "app/base_app.h" +#include "fastdeploy/utils/utils.h" +#include "fastdeploy/core/fd_tensor.h" + +#include +#include + +namespace fastdeploy { +namespace streamer { + +/*! @brief VideoDecoderApp class + */ +class FASTDEPLOY_DECL VideoDecoderApp : public BaseApp { + public: + explicit VideoDecoderApp(AppConfig& app_config) : BaseApp(app_config) {} + + bool Init(const std::string& config_file); + + bool TryPullFrame(FDTensor& tensor, int timeout_ms); + + private: + void GetAppsinkFromPipeline(); + GstAppSink* appsink_; +}; +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/app/yaml_parser.cc b/streamer/src/app/yaml_parser.cc new file mode 100644 index 000000000..92c385777 --- /dev/null +++ b/streamer/src/app/yaml_parser.cc @@ -0,0 +1,125 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "app/yaml_parser.h" +#include "gstreamer/utils.h" + +namespace fastdeploy { +namespace streamer { + +YamlParser::YamlParser(const std::string& config_file) { + try { + yaml_config_ = YAML::LoadFile(config_file); + } catch (YAML::BadFile& e) { + FDERROR << "Failed to load yaml file " << config_file + << ", maybe you should check this file." << std::endl; + } + config_file_ = config_file; +} + +void YamlParser::ParseAppConfg(AppConfig& app_config) { + ValidateConfig(); + auto elem = yaml_config_["app"]; + + auto type_str = elem["type"].as(); + if (type_str == "video_analytics") { + app_config.type = AppType::VIDEO_ANALYTICS; + } else if (type_str == "video_decoder") { + app_config.type = AppType::VIDEO_DECODER; + } else { + FDASSERT(false, "Unsupported app type: %s.", type_str.c_str()); + } + + app_config.enable_perf_measurement = elem["enable-perf-measurement"].as(); + if (app_config.enable_perf_measurement) { + app_config.perf_interval_sec = elem["perf-measurement-interval-sec"].as(); + } + app_config_ = app_config; +} + +void YamlParser::ValidateConfig() { + auto first_elem = yaml_config_.begin()->first.as(); + if (first_elem != "app") { + FDASSERT(false, "First config element must be app, but got %s.", + first_elem.c_str()); + } +} + +GstElement* YamlParser::BuildPipelineFromConfig() { + auto pipeline_desc = YamlToPipelineDescStr(); + pipeline_ = CreatePipeline(pipeline_desc); + return pipeline_; +} + +std::string YamlParser::YamlToPipelineDescStr() { + for (const auto& elem : yaml_config_) { + std::string elem_name = elem.first.as(); + std::cout << elem_name << std::endl; + ParseElement(elem_name, elem.second); + } + std::string pipeline_desc = ""; + for (size_t i = 0; i < elem_descs_.size(); i++) { + pipeline_desc += elem_descs_[i]; + if (elem_descs_[i].find('!') != std::string::npos) continue; + if (i >= elem_descs_.size() - 1) continue; + pipeline_desc += "! "; + } + return pipeline_desc; +} + +void YamlParser::ParseElement(const std::string& name, const YAML::Node& properties) { + if (name == "app") return; + + if (name == "nvurisrcbin_list") { + ParseNvUriSrcBinList(name, properties); + return; + } + + std::string elem_desc = name + " "; + for (auto it = properties.begin(); it != properties.end(); it++) { + elem_desc += ParseProperty(it->first, it->second) + " "; + } + elem_descs_.push_back(elem_desc); +} + +void YamlParser::ParseNvUriSrcBinList(const std::string& name, const YAML::Node& properties) { + std::string elem_name = "nvurisrcbin"; + + auto uri_list = properties["uri-list"].as>(); + auto pad_prefix = properties["pad-prefix"].as(); + for (size_t i = 0; i < uri_list.size(); i++) { + std::string elem_desc = elem_name + " "; + elem_desc += "uri=" + uri_list[i] + " "; + for (auto it = properties.begin(); it != properties.end(); it++) { + auto prop_name = it->first.as(); + if (prop_name == "uri-list" || prop_name == "pad-prefix") continue; + elem_desc += ParseProperty(it->first, it->second) + " "; + } + elem_desc += "! " + pad_prefix + std::to_string(i) + " "; + elem_descs_.push_back(elem_desc); + } +} + +std::string YamlParser::ParseProperty(const YAML::Node& name, const YAML::Node& value) { + std::string prop_name = name.as(); + std::string prop_value = value.as(); + + if (prop_name == "_link_to") { + return "! " + prop_value + " "; + } + + return prop_name + "=" + prop_value; +} +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/app/yaml_parser.h b/streamer/src/app/yaml_parser.h new file mode 100644 index 000000000..966a9b623 --- /dev/null +++ b/streamer/src/app/yaml_parser.h @@ -0,0 +1,58 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "yaml-cpp/yaml.h" +#include "app/base_app.h" +#include + +namespace fastdeploy { +namespace streamer { + +/*! @brief YAML Parser class, to parse stream configs from yaml file + */ +class YamlParser { + public: + /** \brief Create a YAML parser + * + * \param[in] config_file Path of configuration file + */ + explicit YamlParser(const std::string& config_file); + + void ParseAppConfg(AppConfig& app_config); + + void ValidateConfig(); + + GstElement* BuildPipelineFromConfig(); + + private: + std::string YamlToPipelineDescStr(); + + void ParseElement(const std::string& name, const YAML::Node& properties); + + void ParseNvUriSrcBinList(const std::string& name, + const YAML::Node& properties); + + static std::string ParseProperty(const YAML::Node& name, + const YAML::Node& value); + + AppConfig app_config_; + std::string config_file_; + YAML::Node yaml_config_; + GstElement* pipeline_; + std::vector elem_descs_; +}; +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/deepstream/bbox_parser.cc b/streamer/src/deepstream/bbox_parser.cc new file mode 100644 index 000000000..7a32de40e --- /dev/null +++ b/streamer/src/deepstream/bbox_parser.cc @@ -0,0 +1,76 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "nvdsinfer_custom_impl.h" +#include +#include +#include +#include +#include + +static float clamp(const float val, const float min_val, const float max_val) { + assert(min_val <= max_val); + return std::min(max_val, std::max(min_val, val)); +} + +extern "C" bool NvDsInferParseCustomPPYOLOE( + std::vector const& outputLayersInfo, + NvDsInferNetworkInfo const& networkInfo, + NvDsInferParseDetectionParams const& detectionParams, + std::vector& objectList) { + if (outputLayersInfo.empty()) { + std::cerr << "Could not find output layer in bbox parsing" << std::endl;; + return false; + } + + int num_classes = outputLayersInfo[0].inferDims.d[0]; + if (num_classes != detectionParams.numClassesConfigured) { + std::cerr << "WARNING: Num classes mismatch. Configured:" + << detectionParams.numClassesConfigured + << ", detected by network: " << num_classes << std::endl; + assert(-1); + } + + int num_obj = outputLayersInfo[0].inferDims.d[1]; + float* score_data = (float*)outputLayersInfo[0].buffer; + float* bbox_data = (float*)outputLayersInfo[1].buffer; + + for (int i = 0; i < num_obj; i++) { + float max_score = -1.0f; + int class_id = -1; + for (int j = 0; j < num_classes; j++) { + float score = score_data[num_obj * j + i]; + if (score > max_score) { + max_score = score; + class_id = j; + } + } + NvDsInferParseObjectInfo obj; + obj.classId = (uint32_t)class_id; + obj.detectionConfidence = max_score; + obj.left = bbox_data[4 * i]; + obj.top = bbox_data[4 * i + 1]; + obj.width = bbox_data[4 * i + 2] - bbox_data[4 * i]; + obj.height = bbox_data[4 * i + 3] - bbox_data[4 * i + 1]; + obj.left = clamp(obj.left, 0, networkInfo.width); + obj.top = clamp(obj.top, 0, networkInfo.height); + obj.width = clamp(obj.width, 0, networkInfo.width); + obj.height = clamp(obj.height, 0, networkInfo.height); + objectList.push_back(obj); + } + return true; +} + +/* Check that the custom function has been defined correctly */ +CHECK_CUSTOM_PARSE_FUNC_PROTOTYPE(NvDsInferParseCustomPPYOLOE); diff --git a/streamer/src/fd_streamer.cc b/streamer/src/fd_streamer.cc new file mode 100644 index 000000000..ecd78a499 --- /dev/null +++ b/streamer/src/fd_streamer.cc @@ -0,0 +1,56 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fd_streamer.h" +#include "app/yaml_parser.h" +#include "app/video_analytics.h" +#include "app/video_decoder.h" +#include "fastdeploy/utils/unique_ptr.h" + +namespace fastdeploy { +namespace streamer { + +bool FDStreamer::Init(const std::string& config_file) { + AppConfig app_config; + YamlParser parser(config_file); + parser.ParseAppConfg(app_config); + if (app_config.type == AppType::VIDEO_ANALYTICS) { + app_ = utils::make_unique(app_config); + auto casted_app = dynamic_cast(app_.get()); + casted_app->Init(config_file); + } else if (app_config.type == AppType::VIDEO_DECODER) { + app_ = utils::make_unique(app_config); + auto casted_app = dynamic_cast(app_.get()); + casted_app->Init(config_file); + } else { + FDASSERT(false, "Unsupported app type: %d.", app_config.type); + } + return true; +} + +bool FDStreamer::Run() { + return app_->Run(); +} + +bool FDStreamer::RunAsync() { + return app_->RunAsync(); +} + +bool FDStreamer::TryPullFrame(FDTensor& tensor, int timeout_ms) { + auto casted_app = dynamic_cast(app_.get()); + return casted_app->TryPullFrame(tensor, timeout_ms); +} + +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/fd_streamer.h b/streamer/src/fd_streamer.h new file mode 100644 index 000000000..c5ec40dbf --- /dev/null +++ b/streamer/src/fd_streamer.h @@ -0,0 +1,52 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#pragma once + +#include "app/base_app.h" +#include "fastdeploy/utils/utils.h" +#include "fastdeploy/core/fd_tensor.h" + +#include + +namespace fastdeploy { +namespace streamer { + +/*! @brief FDStreamer class, user inferfaces for FastDeploy Streamer + */ +class FASTDEPLOY_DECL FDStreamer { + public: + /** \brief Init FD streamer + * + * \param[in] config_file config file path + * \return true if the streamer is initialized, otherwise false + */ + bool Init(const std::string& config_file); + + bool Run(); + + bool RunAsync(); + + void SetupCallback(); + + bool TryPullFrame(FDTensor& tensor, int timeout_ms = 1); + + bool Destroyed() { + return app_->Destroyed(); + } + + private: + std::unique_ptr app_; +}; +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/gstreamer/perf.cc b/streamer/src/gstreamer/perf.cc new file mode 100644 index 000000000..50d0d6994 --- /dev/null +++ b/streamer/src/gstreamer/perf.cc @@ -0,0 +1,114 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "gstreamer/perf.h" +#include + +namespace fastdeploy { +namespace streamer { + +static GstPadProbeReturn SinkBinBufProbe(GstPad* pad, GstPadProbeInfo* info, gpointer u_data) { + PerfContext* ctx = (PerfContext*)u_data; + if (ctx->stop) return GST_PAD_PROBE_OK; + + g_mutex_lock(&ctx->lock); + if (ctx->buffer_cnt == 0) { + ctx->tc.Start(); + } + ctx->tc.End(); + ctx->buffer_cnt++; + + if (!ctx->first_buffer_arrived) { + ctx->total_tc.Start(); + ctx->first_buffer_arrived = true; + std::cout << "The first buffer after resuming arrives." << std::endl; + } + ctx->total_tc.End(); + ctx->total_buffer_cnt++; + g_mutex_unlock(&ctx->lock); + return GST_PAD_PROBE_OK; +} + +static gboolean PerfMeasurementCallback(gpointer data) { + PerfContext* ctx = (PerfContext*)data; + PerfResult perf_result; + + g_mutex_lock(&ctx->lock); + if (ctx->stop) { + g_mutex_unlock(&ctx->lock); + return FALSE; + } + + if (ctx->buffer_cnt < 10) { + g_mutex_unlock(&ctx->lock); + return TRUE; + } + + double duration = ctx->tc.Duration(); + double total_duration = ctx->total_tc.Duration() + ctx->total_played_duration; + + perf_result.fps = ctx->buffer_cnt / duration; + perf_result.fps_avg = ctx->total_buffer_cnt / total_duration; + ctx->buffer_cnt = 0; + g_mutex_unlock(&ctx->lock); + + ctx->callback(ctx->user_data, &perf_result); + return TRUE; +} + +void PausePerfMeasurement(PerfContext* ctx) { + g_mutex_lock(&ctx->lock); + ctx->stop = true; + ctx->total_played_duration += ctx->total_tc.Duration(); + g_mutex_unlock(&ctx->lock); +} + +void ResumePerfMeasurement(PerfContext* ctx) { + g_mutex_lock(&ctx->lock); + if (!ctx->stop) { + g_mutex_unlock(&ctx->lock); + return; + } + + ctx->stop = false; + ctx->buffer_cnt = 0; + ctx->first_buffer_arrived = false; + if (!ctx->perf_measurement_timeout_id) { + ctx->perf_measurement_timeout_id = g_timeout_add( + ctx->measurement_interval_ms, PerfMeasurementCallback, ctx); + } + g_mutex_unlock(&ctx->lock); +} + +bool EnablePerfMeasurement(PerfContext* ctx, GstPad* sink_bin_pad, + gulong interval_sec, PerfCallback callback) { + if (!callback) { + return false; + } + + g_mutex_init(&ctx->lock); + ctx->perf_measurement_timeout_id = 0; + ctx->measurement_interval_ms = interval_sec * 1000; + ctx->callback = callback; + ctx->stop = TRUE; + ctx->sink_bin_pad = sink_bin_pad; + ctx->fps_measure_probe_id = gst_pad_add_probe(sink_bin_pad, + GST_PAD_PROBE_TYPE_BUFFER, SinkBinBufProbe, ctx, NULL); + + ResumePerfMeasurement(ctx); + return true; +} + +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/gstreamer/perf.h b/streamer/src/gstreamer/perf.h new file mode 100644 index 000000000..7ceb2cd67 --- /dev/null +++ b/streamer/src/gstreamer/perf.h @@ -0,0 +1,29 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#pragma once + +#include "gstreamer/types.h" +#include + +namespace fastdeploy { +namespace streamer { + +bool EnablePerfMeasurement(PerfContext* ctx, GstPad* sink_bin_pad, + gulong interval_sec, PerfCallback callback); + +void PausePerfMeasurement(PerfContext* ctx); +void ResumePerfMeasurement(PerfContext* ctx); + +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/gstreamer/types.h b/streamer/src/gstreamer/types.h new file mode 100644 index 000000000..6f6ff68d1 --- /dev/null +++ b/streamer/src/gstreamer/types.h @@ -0,0 +1,60 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#pragma once + +#include "fastdeploy/core/fd_type.h" +#include "fastdeploy/utils/perf.h" +#include + +namespace fastdeploy { +namespace streamer { +enum PixelFormat { + I420, + BGR +}; + +struct Frame { + int width; + int height; + PixelFormat format; + uint8_t* data = nullptr; + Device device = Device::CPU; +}; + +struct PerfResult { + double fps = 0.0; + double fps_avg = 0.0; +}; + +typedef void (*PerfCallback)(gpointer ctx, PerfResult* str); + +struct PerfContext { + gulong measurement_interval_ms; + gulong perf_measurement_timeout_id; + bool stop; + gpointer user_data; + GMutex lock; + PerfCallback callback; + GstPad* sink_bin_pad; + gulong fps_measure_probe_id; + uint64_t buffer_cnt = 0; + uint64_t total_buffer_cnt = 0; + TimeCounter tc; + TimeCounter total_tc; + double total_played_duration = 0.0; + bool first_buffer_arrived = false; +}; + +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/gstreamer/utils.cc b/streamer/src/gstreamer/utils.cc new file mode 100644 index 000000000..21494e30a --- /dev/null +++ b/streamer/src/gstreamer/utils.cc @@ -0,0 +1,141 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "gstreamer/utils.h" + +namespace fastdeploy { +namespace streamer { +std::string GetElementName(GstElement* elem) { + gchar* name = gst_element_get_name(elem); + std::string res(name); + g_free(name); + return res; +} + +std::vector GetSinkElemNames(GstBin* bin) { + GstIterator *it; + GValue val = G_VALUE_INIT; + gboolean done = FALSE; + std::vector names; + + it = gst_bin_iterate_sinks(bin); + do { + switch (gst_iterator_next(it, &val)) { + case GST_ITERATOR_OK: { + GstElement* sink = static_cast(g_value_get_object(&val)); + names.push_back(GetElementName(sink)); + g_value_reset(&val); + break; + } + case GST_ITERATOR_RESYNC: + gst_iterator_resync(it); + break; + case GST_ITERATOR_ERROR: + GST_ERROR("Error iterating over %s's sink elements", + GST_ELEMENT_NAME(bin)); + case GST_ITERATOR_DONE: + g_value_unset(&val); + done = TRUE; + break; + } + } while (!done); + + gst_iterator_free(it); + return names; +} + +GstElement* CreatePipeline(const std::string& pipeline_desc) { + GError *error = NULL; + FDINFO << "Trying to launch pipeline: " << pipeline_desc << std::endl; + GstElement* pipeline = gst_parse_launch(pipeline_desc.c_str(), &error); + FDASSERT(pipeline != NULL, "Failed parse pipeline, error: %s", + error->message); + return pipeline; +} + +std::vector GetFrameShape(const Frame& frame) { + if (frame.format == PixelFormat::I420) { + return { frame.height * 3 / 2, frame.width, 1 }; + } else if (frame.format == PixelFormat::BGR) { + return { frame.height, frame.width, 3 }; + } else { + FDASSERT(false, "Unsupported format: %d.", frame.format); + } +} + +PixelFormat GetPixelFormat(const std::string& format) { + if (format == "I420") { + return PixelFormat::I420; + } else if (format == "BGR") { + return PixelFormat::BGR; + } else { + FDASSERT(false, "Unsupported format: %s.", format.c_str()); + } +} + +void GetFrameInfo(GstCaps* caps, Frame& frame) { + const GstStructure* struc = gst_caps_get_structure(caps, 0); + std::string name = gst_structure_get_name(struc); + + if (name.rfind("video", 0) != 0) { + FDASSERT(false, "GetFrameInfo only support video caps."); + } + + GstCapsFeatures* features = gst_caps_get_features(caps, 0); + if (gst_caps_features_contains(features, "memory:NVMM")) { + frame.device = Device::GPU; + } else { + frame.device = Device::CPU; + } + gst_structure_get_int(struc, "width", &frame.width); + gst_structure_get_int(struc, "height", &frame.height); + std::string format_str = gst_structure_get_string(struc, "format"); + frame.format = GetPixelFormat(format_str); +} + +bool GetFrameFromSample(GstSample* sample, Frame& frame) { + GstBuffer* buffer = NULL; + GstMapInfo map; + const GstStructure* info = NULL; + GstCaps* caps = NULL; + int sample_width = 0; + int sample_height = 0; + do { + buffer = gst_sample_get_buffer(sample); + if (buffer == NULL) { + FDERROR << "Failed to get buffer from sample." << std::endl; + break; + } + + gst_buffer_map(buffer, &map, GST_MAP_READ); + + if (map.data == NULL) { + FDERROR << "Appsink buffer data is empty." << std::endl; + break; + } + + caps = gst_sample_get_caps(sample); + if (caps == NULL) { + FDERROR << "Failed to get caps from sample." << std::endl; + break; + } + frame.data = map.data; + GetFrameInfo(caps, frame); + } while (false); + if (buffer) gst_buffer_unmap(buffer, &map); + return true; +} + +} // namespace streamer +} // namespace fastdeploy diff --git a/streamer/src/gstreamer/utils.h b/streamer/src/gstreamer/utils.h new file mode 100644 index 000000000..18fde7532 --- /dev/null +++ b/streamer/src/gstreamer/utils.h @@ -0,0 +1,28 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#pragma once + +#include "gstreamer/types.h" +#include "fastdeploy/utils/utils.h" +#include + +namespace fastdeploy { +namespace streamer { +std::string GetElementName(GstElement* elem); +std::vector GetSinkElemNames(GstBin* bin); +GstElement* CreatePipeline(const std::string& pipeline_desc); +std::vector GetFrameShape(const Frame& frame); +bool GetFrameFromSample(GstSample* sample, Frame& frame); +} // namespace streamer +} // namespace fastdeploy