[Other] add code and docs for ppclas examples (#1312)

* add code and docs for ppclas examples

* fix doc

* add code for printing results

* add ppcls demo and docs

* modify example according to refined c api

* modify example code and docs for ppcls and ppdet

* modify example code and docs for ppcls and ppdet

* update ppdet demo

* fix demo codes

* fix doc

* release resource when failed

* fix

* fix name

* fix name
This commit is contained in:
chenjian
2023-02-17 15:43:21 +08:00
committed by GitHub
parent 7c4e0d7ba0
commit 859896cd2c
14 changed files with 932 additions and 144 deletions

View File

@@ -33,7 +33,7 @@ public class PPYOLOE {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_ppyoloe_wrapper = fd_ppyoloe_wrapper =
FD_C_CreatesPPYOLOEWrapper(model_file, params_file, config_file, FD_C_CreatePPYOLOEWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -84,8 +84,8 @@ public class PPYOLOE {
// below are underlying C api // below are underlying C api
private IntPtr fd_ppyoloe_wrapper; private IntPtr fd_ppyoloe_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPPYOLOEWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePPYOLOEWrapper")]
private static extern IntPtr FD_C_CreatesPPYOLOEWrapper( private static extern IntPtr FD_C_CreatePPYOLOEWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPPYOLOEWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPPYOLOEWrapper")]
@@ -136,7 +136,7 @@ public class PicoDet {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_picodet_wrapper = fd_picodet_wrapper =
FD_C_CreatesPicoDetWrapper(model_file, params_file, config_file, FD_C_CreatePicoDetWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -187,8 +187,8 @@ public class PicoDet {
// below are underlying C api // below are underlying C api
private IntPtr fd_picodet_wrapper; private IntPtr fd_picodet_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPicoDetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePicoDetWrapper")]
private static extern IntPtr FD_C_CreatesPicoDetWrapper( private static extern IntPtr FD_C_CreatePicoDetWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPicoDetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPicoDetWrapper")]
@@ -241,7 +241,7 @@ public class PPYOLO {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_ppyolo_wrapper = fd_ppyolo_wrapper =
FD_C_CreatesPPYOLOWrapper(model_file, params_file, config_file, FD_C_CreatePPYOLOWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -292,8 +292,8 @@ public class PPYOLO {
// below are underlying C api // below are underlying C api
private IntPtr fd_ppyolo_wrapper; private IntPtr fd_ppyolo_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPPYOLOWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePPYOLOWrapper")]
private static extern IntPtr FD_C_CreatesPPYOLOWrapper( private static extern IntPtr FD_C_CreatePPYOLOWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPPYOLOWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPPYOLOWrapper")]
@@ -345,7 +345,7 @@ public class YOLOv3 {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_yolov3_wrapper = fd_yolov3_wrapper =
FD_C_CreatesYOLOv3Wrapper(model_file, params_file, config_file, FD_C_CreateYOLOv3Wrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -396,8 +396,8 @@ public class YOLOv3 {
// below are underlying C api // below are underlying C api
private IntPtr fd_yolov3_wrapper; private IntPtr fd_yolov3_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesYOLOv3Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateYOLOv3Wrapper")]
private static extern IntPtr FD_C_CreatesYOLOv3Wrapper( private static extern IntPtr FD_C_CreateYOLOv3Wrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyYOLOv3Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyYOLOv3Wrapper")]
@@ -449,7 +449,7 @@ public class PaddleYOLOX {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_paddleyolox_wrapper = fd_paddleyolox_wrapper =
FD_C_CreatesPaddleYOLOXWrapper(model_file, params_file, config_file, FD_C_CreatePaddleYOLOXWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -500,8 +500,8 @@ public class PaddleYOLOX {
// below are underlying C api // below are underlying C api
private IntPtr fd_paddleyolox_wrapper; private IntPtr fd_paddleyolox_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPaddleYOLOXWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePaddleYOLOXWrapper")]
private static extern IntPtr FD_C_CreatesPaddleYOLOXWrapper( private static extern IntPtr FD_C_CreatePaddleYOLOXWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOXWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOXWrapper")]
@@ -553,7 +553,7 @@ public class FasterRCNN {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_fasterrcnn_wrapper = fd_fasterrcnn_wrapper =
FD_C_CreatesFasterRCNNWrapper(model_file, params_file, config_file, FD_C_CreateFasterRCNNWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -604,8 +604,8 @@ public class FasterRCNN {
// below are underlying C api // below are underlying C api
private IntPtr fd_fasterrcnn_wrapper; private IntPtr fd_fasterrcnn_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesFasterRCNNWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateFasterRCNNWrapper")]
private static extern IntPtr FD_C_CreatesFasterRCNNWrapper( private static extern IntPtr FD_C_CreateFasterRCNNWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyFasterRCNNWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyFasterRCNNWrapper")]
@@ -657,7 +657,7 @@ public class MaskRCNN {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_maskrcnn_wrapper = fd_maskrcnn_wrapper =
FD_C_CreatesMaskRCNNWrapper(model_file, params_file, config_file, FD_C_CreateMaskRCNNWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -708,8 +708,8 @@ public class MaskRCNN {
// below are underlying C api // below are underlying C api
private IntPtr fd_maskrcnn_wrapper; private IntPtr fd_maskrcnn_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesMaskRCNNWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateMaskRCNNWrapper")]
private static extern IntPtr FD_C_CreatesMaskRCNNWrapper( private static extern IntPtr FD_C_CreateMaskRCNNWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyMaskRCNNWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyMaskRCNNWrapper")]
@@ -761,7 +761,7 @@ public class SSD {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_ssd_wrapper = fd_ssd_wrapper =
FD_C_CreatesSSDWrapper(model_file, params_file, config_file, FD_C_CreateSSDWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -812,8 +812,8 @@ public class SSD {
// below are underlying C api // below are underlying C api
private IntPtr fd_ssd_wrapper; private IntPtr fd_ssd_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesSSDWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateSSDWrapper")]
private static extern IntPtr FD_C_CreatesSSDWrapper( private static extern IntPtr FD_C_CreateSSDWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroySSDWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroySSDWrapper")]
@@ -865,7 +865,7 @@ public class PaddleYOLOv5 {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_paddleyolov5_wrapper = fd_paddleyolov5_wrapper =
FD_C_CreatesPaddleYOLOv5Wrapper(model_file, params_file, config_file, FD_C_CreatePaddleYOLOv5Wrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -916,8 +916,8 @@ public class PaddleYOLOv5 {
// below are underlying C api // below are underlying C api
private IntPtr fd_paddleyolov5_wrapper; private IntPtr fd_paddleyolov5_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPaddleYOLOv5Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePaddleYOLOv5Wrapper")]
private static extern IntPtr FD_C_CreatesPaddleYOLOv5Wrapper( private static extern IntPtr FD_C_CreatePaddleYOLOv5Wrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOv5Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOv5Wrapper")]
@@ -969,7 +969,7 @@ public class PaddleYOLOv6 {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_paddleyolov6_wrapper = fd_paddleyolov6_wrapper =
FD_C_CreatesPaddleYOLOv6Wrapper(model_file, params_file, config_file, FD_C_CreatePaddleYOLOv6Wrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1020,8 +1020,8 @@ public class PaddleYOLOv6 {
// below are underlying C api // below are underlying C api
private IntPtr fd_paddleyolov6_wrapper; private IntPtr fd_paddleyolov6_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPaddleYOLOv6Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePaddleYOLOv6Wrapper")]
private static extern IntPtr FD_C_CreatesPaddleYOLOv6Wrapper( private static extern IntPtr FD_C_CreatePaddleYOLOv6Wrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOv6Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOv6Wrapper")]
@@ -1073,7 +1073,7 @@ public class PaddleYOLOv7 {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_paddleyolov7_wrapper = fd_paddleyolov7_wrapper =
FD_C_CreatesPaddleYOLOv7Wrapper(model_file, params_file, config_file, FD_C_CreatePaddleYOLOv7Wrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1124,8 +1124,8 @@ public class PaddleYOLOv7 {
// below are underlying C api // below are underlying C api
private IntPtr fd_paddleyolov7_wrapper; private IntPtr fd_paddleyolov7_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPaddleYOLOv7Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePaddleYOLOv7Wrapper")]
private static extern IntPtr FD_C_CreatesPaddleYOLOv7Wrapper( private static extern IntPtr FD_C_CreatePaddleYOLOv7Wrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOv7Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOv7Wrapper")]
@@ -1177,7 +1177,7 @@ public class PaddleYOLOv8 {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_paddleyolov8_wrapper = fd_paddleyolov8_wrapper =
FD_C_CreatesPaddleYOLOv8Wrapper(model_file, params_file, config_file, FD_C_CreatePaddleYOLOv8Wrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1228,8 +1228,8 @@ public class PaddleYOLOv8 {
// below are underlying C api // below are underlying C api
private IntPtr fd_paddleyolov8_wrapper; private IntPtr fd_paddleyolov8_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPaddleYOLOv8Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePaddleYOLOv8Wrapper")]
private static extern IntPtr FD_C_CreatesPaddleYOLOv8Wrapper( private static extern IntPtr FD_C_CreatePaddleYOLOv8Wrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOv8Wrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPaddleYOLOv8Wrapper")]
@@ -1281,7 +1281,7 @@ public class RTMDet {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_rtmdet_wrapper = fd_rtmdet_wrapper =
FD_C_CreatesRTMDetWrapper(model_file, params_file, config_file, FD_C_CreateRTMDetWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1332,8 +1332,8 @@ public class RTMDet {
// below are underlying C api // below are underlying C api
private IntPtr fd_rtmdet_wrapper; private IntPtr fd_rtmdet_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesRTMDetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateRTMDetWrapper")]
private static extern IntPtr FD_C_CreatesRTMDetWrapper( private static extern IntPtr FD_C_CreateRTMDetWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyRTMDetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyRTMDetWrapper")]
@@ -1385,7 +1385,7 @@ public class CascadeRCNN {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_cascadercnn_wrapper = fd_cascadercnn_wrapper =
FD_C_CreatesCascadeRCNNWrapper(model_file, params_file, config_file, FD_C_CreateCascadeRCNNWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1436,8 +1436,8 @@ public class CascadeRCNN {
// below are underlying C api // below are underlying C api
private IntPtr fd_cascadercnn_wrapper; private IntPtr fd_cascadercnn_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesCascadeRCNNWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateCascadeRCNNWrapper")]
private static extern IntPtr FD_C_CreatesCascadeRCNNWrapper( private static extern IntPtr FD_C_CreateCascadeRCNNWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyCascadeRCNNWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyCascadeRCNNWrapper")]
@@ -1489,7 +1489,7 @@ public class PSSDet {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_pssdet_wrapper = fd_pssdet_wrapper =
FD_C_CreatesPSSDetWrapper(model_file, params_file, config_file, FD_C_CreatePSSDetWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1540,8 +1540,8 @@ public class PSSDet {
// below are underlying C api // below are underlying C api
private IntPtr fd_pssdet_wrapper; private IntPtr fd_pssdet_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesPSSDetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatePSSDetWrapper")]
private static extern IntPtr FD_C_CreatesPSSDetWrapper( private static extern IntPtr FD_C_CreatePSSDetWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPSSDetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyPSSDetWrapper")]
@@ -1593,7 +1593,7 @@ public class RetinaNet {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_retinanet_wrapper = fd_retinanet_wrapper =
FD_C_CreatesRetinaNetWrapper(model_file, params_file, config_file, FD_C_CreateRetinaNetWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1644,8 +1644,8 @@ public class RetinaNet {
// below are underlying C api // below are underlying C api
private IntPtr fd_retinanet_wrapper; private IntPtr fd_retinanet_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesRetinaNetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateRetinaNetWrapper")]
private static extern IntPtr FD_C_CreatesRetinaNetWrapper( private static extern IntPtr FD_C_CreateRetinaNetWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyRetinaNetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyRetinaNetWrapper")]
@@ -1697,7 +1697,7 @@ public class FCOS {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_fcos_wrapper = fd_fcos_wrapper =
FD_C_CreatesFCOSWrapper(model_file, params_file, config_file, FD_C_CreateFCOSWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1748,8 +1748,8 @@ public class FCOS {
// below are underlying C api // below are underlying C api
private IntPtr fd_fcos_wrapper; private IntPtr fd_fcos_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesFCOSWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateFCOSWrapper")]
private static extern IntPtr FD_C_CreatesFCOSWrapper( private static extern IntPtr FD_C_CreateFCOSWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyFCOSWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyFCOSWrapper")]
@@ -1801,7 +1801,7 @@ public class TTFNet {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_ttfnet_wrapper = fd_ttfnet_wrapper =
FD_C_CreatesTTFNetWrapper(model_file, params_file, config_file, FD_C_CreateTTFNetWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1852,8 +1852,8 @@ public class TTFNet {
// below are underlying C api // below are underlying C api
private IntPtr fd_ttfnet_wrapper; private IntPtr fd_ttfnet_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesTTFNetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateTTFNetWrapper")]
private static extern IntPtr FD_C_CreatesTTFNetWrapper( private static extern IntPtr FD_C_CreateTTFNetWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyTTFNetWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyTTFNetWrapper")]
@@ -1905,7 +1905,7 @@ public class TOOD {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_tood_wrapper = fd_tood_wrapper =
FD_C_CreatesTOODWrapper(model_file, params_file, config_file, FD_C_CreateTOODWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -1956,8 +1956,8 @@ public class TOOD {
// below are underlying C api // below are underlying C api
private IntPtr fd_tood_wrapper; private IntPtr fd_tood_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesTOODWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateTOODWrapper")]
private static extern IntPtr FD_C_CreatesTOODWrapper( private static extern IntPtr FD_C_CreateTOODWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyTOODWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyTOODWrapper")]
@@ -2009,7 +2009,7 @@ public class GFL {
custom_option = new RuntimeOption(); custom_option = new RuntimeOption();
} }
fd_gfl_wrapper = fd_gfl_wrapper =
FD_C_CreatesGFLWrapper(model_file, params_file, config_file, FD_C_CreateGFLWrapper(model_file, params_file, config_file,
custom_option.GetWrapperPtr(), model_format); custom_option.GetWrapperPtr(), model_format);
} }
@@ -2060,8 +2060,8 @@ public class GFL {
// below are underlying C api // below are underlying C api
private IntPtr fd_gfl_wrapper; private IntPtr fd_gfl_wrapper;
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreatesGFLWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_CreateGFLWrapper")]
private static extern IntPtr FD_C_CreatesGFLWrapper( private static extern IntPtr FD_C_CreateGFLWrapper(
string model_file, string params_file, string config_file, string model_file, string params_file, string config_file,
IntPtr fd_runtime_option_wrapper, ModelFormat model_format); IntPtr fd_runtime_option_wrapper, ModelFormat model_format);
[DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyGFLWrapper")] [DllImport("fastdeploy.dll", EntryPoint = "FD_C_DestroyGFLWrapper")]

View File

@@ -0,0 +1,13 @@
PROJECT(infer_demo C)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake)
# 添加FastDeploy依赖头文件
include_directories(${FASTDEPLOY_INCS})
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.c)
target_link_libraries(infer_demo ${FASTDEPLOY_LIBS})

View File

@@ -0,0 +1,183 @@
English | [简体中文](README_CN.md)
# PaddleClas C Deployment Example
This directory provides examples that `infer.c` fast finishes the deployment of PaddleClas models on CPU/GPU.
Before deployment, two steps require confirmation.
- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md).
- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md).
Taking ResNet50_vd inference on Linux as an example, the compilation test can be completed by executing the following command in this directory. FastDeploy version 1.0.4 or above (x.x.x>=1.0.4) is required to support this model.
```bash
mkdir build
cd build
# Download FastDeploy precompiled library. Users can choose your appropriate version in the`FastDeploy Precompiled Library` mentioned above
wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz
tar xvf fastdeploy-linux-x64-x.x.x.tgz
cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x
make -j
# Download ResNet50_vd model file and test images
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# CPU inference
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 0
# GPU inference
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 1
```
The above command works for Linux or MacOS. Refer to
- [How to use FastDeploy C++ SDK in Windows](../../../../../docs/cn/faq/use_sdk_on_windows.md) for SDK use-pattern in Windows
## PaddleClas C Interface
### RuntimeOption
```c
FD_C_RuntimeOptionWrapper* FD_C_CreateRuntimeOptionWrapper()
```
> Create a RuntimeOption object, and return a pointer to manipulate it.
>
> **Return**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object.
```c
void FD_C_RuntimeOptionWrapperUseCpu(
FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper)
```
> Enable Cpu inference.
>
> **Params**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object.
```c
void FD_C_RuntimeOptionWrapperUseGpu(
FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper,
int gpu_id)
```
> 开启GPU推理
>
> **参数**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object.
> * **gpu_id**(int): gpu id
### Model
```c
FD_C_PaddleClasModelWrapper* FD_C_CreatePaddleClasModelWrapper(
const char* model_file, const char* params_file, const char* config_file,
FD_C_RuntimeOptionWrapper* runtime_option,
const FD_C_ModelFormat model_format)
```
> Create a PaddleClas model object, and return a pointer to manipulate it.
>
> **Params**
>
> * **model_file**(const char*): Model file path
> * **params_file**(const char*): Parameter file path
> * **config_file**(const char*): Configuration file path, which is the deployment yaml file exported by PaddleClas.
> * **runtime_option**(FD_C_RuntimeOptionWrapper*): Backend inference configuration. None by default, which is the default configuration
> * **model_format**(FD_C_ModelFormat): Model format. Paddle format by default
>
> **Return**
> * **fd_c_ppclas_wrapper**(FD_C_PaddleClasModelWrapper*): Pointer to manipulate PaddleClas object.
#### Read and write image
```c
FD_C_Mat FD_C_Imread(const char* imgpath)
```
> Read an image, and return a pointer to cv::Mat.
>
> **Params**
>
> * **imgpath**(const char*): image path
>
> **Return**
>
> * **imgmat**(FD_C_Mat): pointer to cv::Mat object which holds the image.
```c
FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img);
```
> Write image to a file.
>
> **Params**
>
> * **savepath**(const char*): save path
> * **img**(FD_C_Mat): pointer to cv::Mat object
>
> **Return**
>
> * **result**(FD_C_Bool): bool to indicate success or failure
#### Prediction
```c
FD_C_Bool FD_C_PaddleClasModelWrapperPredict(
__fd_take FD_C_PaddleClasModelWrapper* fd_c_ppclas_wrapper, FD_C_Mat img,
FD_C_ClassifyResult* fd_c_ppclas_result)
```
>
> Predict an image, and generate classification result.
>
> **Params**
> * **fd_c_ppclas_wrapper**(FD_C_PaddleClasModelWrapper*): pointer to manipulate PaddleClas object
> * **img**FD_C_Mat: pointer to cv::Mat object, which can be obained by FD_C_Imread interface
> * **fd_c_ppclas_result** (FD_C_ClassifyResult*): The classification result, including label_id, and the corresponding confidence. Refer to [Visual Model Prediction Results](../../../../../docs/api/vision_results/) for the description of ClassifyResult
#### Result
```c
FD_C_ClassifyResultWrapper* FD_C_CreateClassifyResultWrapperFromData(
FD_C_ClassifyResult* fd_c_classify_result)
```
>
> Create a pointer to FD_C_ClassifyResultWrapper structure, which contains `fastdeploy::vision::ClassifyResult` object in C++. You can call methods in C++ ClassifyResult object by C API with this pointer.
>
> **Params**
> * **fd_c_classify_result**(FD_C_ClassifyResult*): pointer to FD_C_ClassifyResult structure
>
> **Return**
> * **fd_c_classify_result_wrapper**(FD_C_ClassifyResultWrapper*): pointer to FD_C_ClassifyResultWrapper structure
```c
char* FD_C_ClassifyResultWrapperStr(
FD_C_ClassifyResultWrapper* fd_c_classify_result_wrapper);
```
>
> Call Str() methods in `fastdeploy::vision::ClassifyResult` object contained in FD_C_ClassifyResultWrapper structureand return a string to describe information in result.
>
> **Params**
> * **fd_c_classify_result_wrapper**(FD_C_ClassifyResultWrapper*): pointer to FD_C_ClassifyResultWrapper structure
>
> **Return**
> * **str**(char*): a string to describe information in result
- [Model Description](../../)
- [Python Deployment](../python)
- [Visual Model prediction results](../../../../../docs/api/vision_results/)
- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md)

View File

@@ -0,0 +1,189 @@
[English](README.md) | 简体中文
# PaddleClas C 部署示例
本目录下提供`infer_xxx.c`来调用C API快速完成PaddleClas系列模型在CPU/GPU上部署的示例。
在部署前,需确认以下两个步骤
- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md)
- 2. 根据开发环境下载预编译部署库和samples代码参考[FastDeploy预编译库](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md)
以Linux上ResNet50_vd推理为例在本目录执行如下命令即可完成编译测试支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4)
```bash
mkdir build
cd build
# 下载FastDeploy预编译库用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用
wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz
tar xvf fastdeploy-linux-x64-x.x.x.tgz
cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x
make -j
# 下载ResNet50_vd模型文件和测试图片
wget https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz
tar -xvf ResNet50_vd_infer.tgz
wget https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
# CPU推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 0
# GPU推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 1
```
以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考:
- [如何在Windows中使用FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md)
如果用户使用华为昇腾NPU部署, 请参考以下方式在部署前初始化部署环境:
- [如何使用华为昇腾NPU部署](../../../../../docs/cn/faq/use_sdk_on_ascend.md)
## PaddleClas C API接口
### 配置
```c
FD_C_RuntimeOptionWrapper* FD_C_CreateRuntimeOptionWrapper()
```
> 创建一个RuntimeOption的配置对象并且返回操作它的指针。
>
> **返回**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针
```c
void FD_C_RuntimeOptionWrapperUseCpu(
FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper)
```
> 开启CPU推理
>
> **参数**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针
```c
void FD_C_RuntimeOptionWrapperUseGpu(
FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper,
int gpu_id)
```
> 开启GPU推理
>
> **参数**
>
> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针
> * **gpu_id**(int): 显卡号
### 模型
```c
FD_C_PaddleClasModelWrapper* FD_C_CreatePaddleClasModelWrapper(
const char* model_file, const char* params_file, const char* config_file,
FD_C_RuntimeOptionWrapper* runtime_option,
const FD_C_ModelFormat model_format)
```
> 创建一个PaddleClas的模型并且返回操作它的指针。
>
> **参数**
>
> * **model_file**(const char*): 模型文件路径
> * **params_file**(const char*): 参数文件路径
> * **config_file**(const char*): 配置文件路径即PaddleClas导出的部署yaml文件
> * **runtime_option**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption的指针表示后端推理配置
> * **model_format**(FD_C_ModelFormat): 模型格式
>
> **返回**
> * **fd_c_ppclas_wrapper**(FD_C_PaddleClasModelWrapper*): 指向PaddleClas模型对象的指针
#### 读写图像
```c
FD_C_Mat FD_C_Imread(const char* imgpath)
```
> 读取一个图像并且返回cv::Mat的指针。
>
> **参数**
>
> * **imgpath**(const char*): 图像文件路径
>
> **返回**
>
> * **imgmat**(FD_C_Mat): 指向图像数据cv::Mat的指针。
```c
FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img);
```
> 将图像写入文件中。
>
> **参数**
>
> * **savepath**(const char*): 保存图像的路径
> * **img**(FD_C_Mat): 指向图像数据的指针
>
> **返回**
>
> * **result**(FD_C_Bool): 表示操作是否成功
#### Predict函数
```c
FD_C_Bool FD_C_PaddleClasModelWrapperPredict(
__fd_take FD_C_PaddleClasModelWrapper* fd_c_ppclas_wrapper, FD_C_Mat img,
FD_C_ClassifyResult* fd_c_ppclas_result)
```
>
> 模型预测接口,输入图像直接并生成分类结果。
>
> **参数**
> * **fd_c_ppclas_wrapper**(FD_C_PaddleClasModelWrapper*): 指向PaddleClas模型的指针
> * **img**FD_C_Mat: 输入图像的指针指向cv::Mat对象可以调用FD_C_Imread读取图像获取
> * **fd_c_ppclas_result**FD_C_ClassifyResult*): 分类结果包括label_id以及相应的置信度, ClassifyResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/)
#### Predict结果
```c
FD_C_ClassifyResultWrapper* FD_C_CreateClassifyResultWrapperFromData(
FD_C_ClassifyResult* fd_c_classify_result)
```
>
> 创建一个FD_C_ClassifyResultWrapper对象的指针FD_C_ClassifyResultWrapper中包含了C++的`fastdeploy::vision::ClassifyResult`对象通过该指针使用C API可以访问调用对应C++中的函数。
>
>
> **参数**
> * **fd_c_classify_result**(FD_C_ClassifyResult*): 指向FD_C_ClassifyResult对象的指针
>
> **返回**
> * **fd_c_classify_result_wrapper**(FD_C_ClassifyResultWrapper*): 指向FD_C_ClassifyResultWrapper的指针
```c
char* FD_C_ClassifyResultWrapperStr(
FD_C_ClassifyResultWrapper* fd_c_classify_result_wrapper);
```
>
> 调用FD_C_ClassifyResultWrapper所包含的`fastdeploy::vision::ClassifyResult`对象的Str()方法,返回相关结果内数据信息的字符串。
>
> **参数**
> * **fd_c_classify_result_wrapper**(FD_C_ClassifyResultWrapper*): 指向FD_C_ClassifyResultWrapper对象的指针
>
> **返回**
> * **str**(char*): 表示结果数据信息的字符串
- [模型介绍](../../)
- [Python部署](../python)
- [视觉模型预测结果](../../../../../docs/api/vision_results/)
- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md)

View File

@@ -0,0 +1,156 @@
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <stdlib.h>
#include "fastdeploy_capi/vision.h"
#ifdef WIN32
const char sep = '\\';
#else
const char sep = '/';
#endif
void CpuInfer(const char* model_dir, const char* image_file) {
char model_file[100];
char params_file[100];
char config_file[100];
int max_size = 99;
snprintf(model_file, max_size, "%s%c%s", model_dir, sep, "inference.pdmodel");
snprintf(params_file, max_size, "%s%c%s", model_dir, sep,
"inference.pdiparams");
snprintf(config_file, max_size, "%s%c%s", model_dir, sep,
"inference_cls.yaml");
FD_C_RuntimeOptionWrapper* option = FD_C_CreateRuntimeOptionWrapper();
FD_C_RuntimeOptionWrapperUseCpu(option);
FD_C_PaddleClasModelWrapper* model = FD_C_CreatePaddleClasModelWrapper(
model_file, params_file, config_file, option, PADDLE);
if (!FD_C_PaddleClasModelWrapperInitialized(model)) {
printf("Failed to initialize.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
return;
}
FD_C_Mat im = FD_C_Imread(image_file);
FD_C_ClassifyResult* result =
(FD_C_ClassifyResult*)malloc(sizeof(FD_C_ClassifyResult));
if (!FD_C_PaddleClasModelWrapperPredict(model, im, result)) {
printf("Failed to predict.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyMat(im);
free(result);
return;
}
// print res
// You can directly access fields in FD_C_ClassifyResult and print it refer to
// ClassifyResult API Doc Or you can wrap it using
// FD_C_ClassifyResult_Wrapper, which containes C++ structure
// fastdeploy::vision::ClassifyResult, and using C API
// FD_C_ClassifyResultWrapperStr to call
// fastdeploy::vision::ClassifyResult::Str() in it. For convenience, we choose
// this method to print it.
FD_C_ClassifyResultWrapper* result_wrapper =
FD_C_CreateClassifyResultWrapperFromData(result);
printf("%s", FD_C_ClassifyResultWrapperStr(result_wrapper));
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyClassifyResultWrapper(result_wrapper);
FD_C_DestroyClassifyResult(result);
FD_C_DestroyMat(im);
}
void GpuInfer(const char* model_dir, const char* image_file) {
char model_file[100];
char params_file[100];
char config_file[100];
int max_size = 99;
snprintf(model_file, max_size, "%s%c%s", model_dir, sep, "inference.pdmodel");
snprintf(params_file, max_size, "%s%c%s", model_dir, sep,
"inference.pdiparams");
snprintf(config_file, max_size, "%s%c%s", model_dir, sep,
"inference_cls.yaml");
FD_C_RuntimeOptionWrapper* option = FD_C_CreateRuntimeOptionWrapper();
FD_C_RuntimeOptionWrapperUseGpu(option, 0);
FD_C_PaddleClasModelWrapper* model = FD_C_CreatePaddleClasModelWrapper(
model_file, params_file, config_file, option, PADDLE);
if (!FD_C_PaddleClasModelWrapperInitialized(model)) {
printf("Failed to initialize.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
return;
}
FD_C_Mat im = FD_C_Imread(image_file);
FD_C_ClassifyResult* result =
(FD_C_ClassifyResult*)malloc(sizeof(FD_C_ClassifyResult));
if (!FD_C_PaddleClasModelWrapperPredict(model, im, result)) {
printf("Failed to predict.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyMat(im);
free(result);
return;
}
// print res
// You can directly access fields in FD_C_ClassifyResult and print it refer to
// ClassifyResult API Doc Or you can wrap it using
// FD_C_ClassifyResult_Wrapper, which containes C++ structure
// fastdeploy::vision::ClassifyResult, and using C API
// FD_C_ClassifyResultWrapperStr to call
// fastdeploy::vision::ClassifyResult::Str() in it. For convenience, we choose
// this method to print it.
FD_C_ClassifyResultWrapper* result_wrapper =
FD_C_CreateClassifyResultWrapperFromData(result);
printf("%s", FD_C_ClassifyResultWrapperStr(result_wrapper));
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyClassifyResultWrapper(result_wrapper);
FD_C_DestroyClassifyResult(result);
FD_C_DestroyMat(im);
}
int main(int argc, char* argv[]) {
if (argc < 4) {
printf(
"Usage: infer_demo path/to/model_dir path/to/image run_option, "
"e.g ./infer_model ./ppyoloe_model_dir ./test.jpeg 0"
"\n");
printf(
"The data type of run_option is int, 0: run with cpu; 1: run with gpu"
"\n");
return -1;
}
if (atoi(argv[3]) == 0) {
CpuInfer(argv[1], argv[2]);
} else if (atoi(argv[3]) == 1) {
GpuInfer(argv[1], argv[2]);
}
return 0;
}

View File

@@ -0,0 +1,22 @@
PROJECT(infer_demo CSharp)
CMAKE_MINIMUM_REQUIRED (VERSION 3.10)
# Set the C# language version (defaults to 3.0 if not set).
set(CMAKE_CSharp_FLAGS "/langversion:10")
set(CMAKE_DOTNET_TARGET_FRAMEWORK "net6.0")
set(CMAKE_DOTNET_SDK "Microsoft.NET.Sdk")
# 指定下载解压后的fastdeploy库路径
option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.")
include(${FASTDEPLOY_INSTALL_DIR}/FastDeployCSharp.cmake)
add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cs)
set_property(TARGET infer_demo PROPERTY VS_DOTNET_REFERENCES
${FASTDEPLOY_DOTNET_REFERENCES}
)
set_property(TARGET infer_demo
PROPERTY VS_PACKAGE_REFERENCES ${FASTDEPLOY_PACKAGE_REFERENCES})

View File

@@ -0,0 +1,99 @@
English | [简体中文](README_CN.md)
# PaddleClas C# Deployment Example
This directory provides example `infer.cs` to fastly finish the deployment of PaddleClas models on CPU/GPU.
Before deployment, two steps require confirmation
- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md)
- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md)
Please follow below instructions to compile and test in Windows. FastDeploy version 1.0.4 or above (x.x.x>=1.0.4) is required to support this model.
## 1. Download C# package management tool nuget client
> https://dist.nuget.org/win-x86-commandline/v6.4.0/nuget.exe
Add nuget program into system variable **PATH**
## 2. Download model and image for test
> https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz # (下载后解压缩)
> https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
## 3. Compile example code
Open `x64 Native Tools Command Prompt for VS 2019` command tool on Windows, cd to the demo path of ppyoloe and execute commands
```shell
cd D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\classification\paddleclas\csharp
mkdir build && cd build
cmake .. -G "Visual Studio 16 2019" -A x64 -DFASTDEPLOY_INSTALL_DIR=D:\Download\fastdeploy-win-x64-gpu-x.x.x -DCUDA_DIRECTORY="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.2"
nuget restore
msbuild infer_demo.sln /m:4 /p:Configuration=Release /p:Platform=x64
```
For more information about how to use FastDeploy SDK to compile a project with Visual Studio 2019. Please refer to
- [Using the FastDeploy C++ SDK on Windows Platform](../../../../../docs/en/faq/use_sdk_on_windows.md)
## 4. Execute compiled program
fastdeploy.dll and related dynamic libraries are required by the program. FastDeploy provide a script to copy all required dll to your program path.
```shell
cd D:\Download\fastdeploy-win-x64-gpu-x.x.x
fastdeploy_init.bat install %cd% D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\classification\paddleclas\csharp\build\Release
```
Then you can run your program and test the model with image
```shell
cd Release
# CPU inference
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 0
# GPU inference
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 1
```
## PaddleClas C# Interface
### Model Class
```c#
fastdeploy.vision.classification.PaddleClasModel(
string model_file,
string params_file,
string config_file
fastdeploy.RuntimeOption runtime_option = null,
fastdeploy.ModelFormat model_format = ModelFormat.PADDLE)
```
> PaddleClasModel initilization.
> **Params**
>> * **model_file**(str): Model file path
>> * **params_file**(str): Parameter file path
>> * **config_file**(str): Configuration file path, which is the deployment yaml file exported by PaddleClas
>> * **runtime_option**(RuntimeOption): Backend inference configuration. null by default, which is the default configuration
>> * **model_format**(ModelFormat): Model format. Paddle format by default
#### Predict Function
```c#
fastdeploy.ClassifyResult Predict(OpenCvSharp.Mat im)
```
> Model prediction interface. Input images and output results directly.
>
> **Params**
>
>> * **im**(Mat): Input images in HWC or BGR format
>
> **Return**
>
>> * **result**(ClassifyResult): The classification result, including label_id, and the corresponding confidence. Refer to [Visual Model Prediction Results](../../../../../docs/api/vision_results/) for the description of ClassifyResult
- [Model Description](../../)
- [Python Deployment](../python)
- [Vision Model prediction results](../../../../../docs/api/vision_results/)
- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md)

View File

@@ -0,0 +1,101 @@
[English](README.md) | 简体中文
# PaddleClas C#部署示例
本目录下提供`infer.cs`来调用C# API快速完成PaddleClas系列模型在CPU/GPU上部署的示例。
在部署前,需确认以下两个步骤
- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md)
- 2. 根据开发环境下载预编译部署库和samples代码参考[FastDeploy预编译库](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md)
以Linux上ResNet50_vd推理为例在本目录执行如下命令即可完成编译测试支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4)
## 1. 下载C#包管理程序nuget客户端
> https://dist.nuget.org/win-x86-commandline/v6.4.0/nuget.exe
下载完成后将该程序添加到环境变量**PATH**中
## 2. 下载模型文件和测试图片
> https://bj.bcebos.com/paddlehub/fastdeploy/ResNet50_vd_infer.tgz # (下载后解压缩)
> https://gitee.com/paddlepaddle/PaddleClas/raw/release/2.4/deploy/images/ImageNet/ILSVRC2012_val_00000010.jpeg
## 3. 编译示例代码
本文档编译的示例代码可在解压的库中找到编译工具依赖VS 2019的安装**Windows打开x64 Native Tools Command Prompt for VS 2019命令工具**,通过如下命令开始编译
```shell
cd D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\classification\paddleclas\csharp
mkdir build && cd build
cmake .. -G "Visual Studio 16 2019" -A x64 -DFASTDEPLOY_INSTALL_DIR=D:\Download\fastdeploy-win-x64-gpu-x.x.x -DCUDA_DIRECTORY="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.2"
nuget restore
msbuild infer_demo.sln /m:4 /p:Configuration=Release /p:Platform=x64
```
关于使用Visual Studio 2019创建sln工程或者CMake工程等方式编译的更详细信息可参考如下文档
- [在 Windows 使用 FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md)
- [FastDeploy C++库在Windows上的多种使用方式](../../../../../docs/cn/faq/use_sdk_on_windows_build.md)
## 4. 运行可执行程序
注意Windows上运行时需要将FastDeploy依赖的库拷贝至可执行程序所在目录, 或者配置环境变量。FastDeploy提供了工具帮助我们快速将所有依赖库拷贝至可执行程序所在目录,通过如下命令将所有依赖的dll文件拷贝至可执行程序所在的目录(可能生成的可执行文件在Release下还有一层目录这里假设生成的可执行文件在Release处)
```shell
cd D:\Download\fastdeploy-win-x64-gpu-x.x.x
fastdeploy_init.bat install %cd% D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\classification\paddleclas\csharp\build\Release
```
将dll拷贝到当前路径后准备好模型和图片使用如下命令运行可执行程序即可
```shell
cd Release
# CPU推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 0
# GPU推理
./infer_demo ResNet50_vd_infer ILSVRC2012_val_00000010.jpeg 1
```
## PaddleClas C#接口
### 模型
```c#
fastdeploy.vision.classification.PaddleClasModel(
string model_file,
string params_file,
string config_file,
fastdeploy.RuntimeOption runtime_option = null,
fastdeploy.ModelFormat model_format = ModelFormat.PADDLE)
```
> PaddleClasModel模型加载和初始化。
> **参数**
>> * **model_file**(str): 模型文件路径
>> * **params_file**(str): 参数文件路径
>> * **config_file**(str): 配置文件路径即PaddleClas导出的部署yaml文件
>> * **runtime_option**(RuntimeOption): 后端推理配置默认为null即采用默认配置
>> * **model_format**(ModelFormat): 模型格式默认为PADDLE格式
#### Predict函数
```c#
fastdeploy.ClassifyResult Predict(OpenCvSharp.Mat im)
```
> 模型预测接口,输入图像直接输出检测结果。
>
> **参数**
>
>> * **im**(Mat): 输入图像注意需为HWCBGR格式
>>
> **返回值**
>
>> * **result**: 分类结果包括label_id以及相应的置信度, ClassifyResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/)
- [模型介绍](../../)
- [Python部署](../python)
- [视觉模型预测结果](../../../../../docs/api/vision_results/)
- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md)

View File

@@ -0,0 +1,58 @@
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.IO;
using System.Runtime.InteropServices;
using OpenCvSharp;
using fastdeploy;
namespace Test
{
public class TestPaddleClas
{
public static void Main(string[] args)
{
if (args.Length < 3) {
Console.WriteLine(
"Usage: infer_demo path/to/model_dir path/to/image run_option, " +
"e.g ./infer_model ./ppyolo_dirname ./test.jpeg 0"
);
Console.WriteLine( "The data type of run_option is int, 0: run with cpu; 1: run with gpu");
return;
}
string model_dir = args[0];
string image_path = args[1];
string model_file = model_dir + "\\" + "inference.pdmodel";
string params_file = model_dir + "\\" + "inference.pdiparams";
string config_file = model_dir + "\\" + "inference_cls.yaml";
RuntimeOption runtimeoption = new RuntimeOption();
int device_option = Int32.Parse(args[2]);
if(device_option==0){
runtimeoption.UseCpu();
}else{
runtimeoption.UseGpu();
}
fastdeploy.vision.classification.PaddleClasModel model = new fastdeploy.vision.classification.PaddleClasModel(model_file, params_file, config_file, runtimeoption, ModelFormat.PADDLE);
if(!model.Initialized()){
Console.WriteLine("Failed to initialize.\n");
}
Mat image = Cv2.ImRead(image_path);
fastdeploy.vision.ClassifyResult res = model.Predict(image);
Console.WriteLine(res.ToString());
}
}
}

View File

@@ -78,7 +78,7 @@ void FD_C_RuntimeOptionWrapperUseGpu(
```c ```c
FD_C_PPYOLOEWrapper* FD_C_CreatesPPYOLOEWrapper( FD_C_PPYOLOEWrapper* FD_C_CreatePPYOLOEWrapper(
const char* model_file, const char* params_file, const char* config_file, const char* model_file, const char* params_file, const char* config_file,
FD_C_RuntimeOptionWrapper* runtime_option, FD_C_RuntimeOptionWrapper* runtime_option,
const FD_C_ModelFormat model_format) const FD_C_ModelFormat model_format)
@@ -137,7 +137,7 @@ FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img);
```c ```c
FD_C_Bool FD_C_PPYOLOEWrapperPredict( FD_C_Bool FD_C_PPYOLOEWrapperPredict(
__fd_take FD_C_PPYOLOEWrapper* fd_c_ppyoloe_wrapper, FD_C_Mat img, __fd_take FD_C_PPYOLOEWrapper* fd_c_ppyoloe_wrapper, FD_C_Mat img,
FD_C_DetectionResultWrapper* fd_c_detection_result_wrapper) FD_C_DetectionResult* fd_c_detection_result)
``` ```
> >
> Predict an image, and generate detection result. > Predict an image, and generate detection result.
@@ -145,37 +145,11 @@ FD_C_Bool FD_C_PPYOLOEWrapperPredict(
> **Params** > **Params**
> * **fd_c_ppyoloe_wrapper**(FD_C_PPYOLOEWrapper*): pointer to manipulate PPYOLOE object > * **fd_c_ppyoloe_wrapper**(FD_C_PPYOLOEWrapper*): pointer to manipulate PPYOLOE object
> * **img**FD_C_Mat: pointer to cv::Mat object, which can be obained by FD_C_Imread interface > * **img**FD_C_Mat: pointer to cv::Mat object, which can be obained by FD_C_Imread interface
> * **result**FD_C_DetectionResultWrapper*): Detection result, including detection box and confidence of each box. Refer to [Vision Model Prediction Result](../../../../../docs/api/vision_results/) for DetectionResult > * **fd_c_detection_result**FD_C_DetectionResult*): Detection result, including detection box and confidence of each box. Refer to [Vision Model Prediction Result](../../../../../docs/api/vision_results/) for DetectionResult
#### Result #### Result
```c
FD_C_DetectionResultWrapper* FD_C_CreateDetectionResultWrapper();
```
>
> Create a DetectionResult object to keep the detection resultreturn a pointer to manipulate it.
>
> **Return**
> * **fd_c_detection_result_wrapper**(FD_C_DetectionResultWrapper*): pointer to manipulate DetectionResult object
```c
FD_C_DetectionResult* FD_C_DetectionResultWrapperGetData(
FD_C_DetectionResultWrapper* fd_c_detection_result_wrapper)
```
>
> Get the C DetectionResult structure from FD_C_DetectionResultWrapper, which can access the fileds directly.
>
> **Params**
> * **fd_c_detection_result_wrapper**(FD_C_DetectionResultWrapper*): pointer to manipulate DetectionResult object
>
> **Return**
> * **fd_c_detection_result**(FD_C_DetectionResult*): pointer to C DetectionResult structure
```c ```c
FD_C_Mat FD_C_VisDetection(FD_C_Mat im, FD_C_DetectionResult* fd_detection_result, FD_C_Mat FD_C_VisDetection(FD_C_Mat im, FD_C_DetectionResult* fd_detection_result,
float score_threshold, int line_size, float font_size); float score_threshold, int line_size, float font_size);

View File

@@ -82,7 +82,7 @@ void FD_C_RuntimeOptionWrapperUseGpu(
```c ```c
FD_C_PPYOLOEWrapper* FD_C_CreatesPPYOLOEWrapper( FD_C_PPYOLOEWrapper* FD_C_CreatePPYOLOEWrapper(
const char* model_file, const char* params_file, const char* config_file, const char* model_file, const char* params_file, const char* config_file,
FD_C_RuntimeOptionWrapper* runtime_option, FD_C_RuntimeOptionWrapper* runtime_option,
const FD_C_ModelFormat model_format) const FD_C_ModelFormat model_format)
@@ -141,7 +141,7 @@ FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img);
```c ```c
FD_C_Bool FD_C_PPYOLOEWrapperPredict( FD_C_Bool FD_C_PPYOLOEWrapperPredict(
__fd_take FD_C_PPYOLOEWrapper* fd_c_ppyoloe_wrapper, FD_C_Mat img, __fd_take FD_C_PPYOLOEWrapper* fd_c_ppyoloe_wrapper, FD_C_Mat img,
FD_C_DetectionResultWrapper* fd_c_detection_result_wrapper) FD_C_DetectionResult* fd_c_detection_result)
``` ```
> >
> 模型预测接口,输入图像直接并生成检测结果。 > 模型预测接口,输入图像直接并生成检测结果。
@@ -149,37 +149,11 @@ FD_C_Bool FD_C_PPYOLOEWrapperPredict(
> **参数** > **参数**
> * **fd_c_ppyoloe_wrapper**(FD_C_PPYOLOEWrapper*): 指向PPYOLOE模型的指针 > * **fd_c_ppyoloe_wrapper**(FD_C_PPYOLOEWrapper*): 指向PPYOLOE模型的指针
> * **img**FD_C_Mat: 输入图像的指针指向cv::Mat对象可以调用FD_C_Imread读取图像获取 > * **img**FD_C_Mat: 输入图像的指针指向cv::Mat对象可以调用FD_C_Imread读取图像获取
> * **result**FD_C_DetectionResultWrapper*): 指向检测结果的指针,检测结果包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) > * **fd_c_detection_result**FD_C_DetectionResult*): 指向检测结果的指针,检测结果包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/)
#### Predict结果 #### Predict结果
```c
FD_C_DetectionResultWrapper* FD_C_CreateDetectionResultWrapper();
```
>
> 创建一个DetectionResult对象用来保存推理的结果并返回所创建的DetectionResult对象的指针。
>
> **返回**
> * **fd_c_detection_result_wrapper**(FD_C_DetectionResultWrapper*): 指向DetectionResult对象的指针
```c
FD_C_DetectionResult* FD_C_DetectionResultWrapperGetData(
FD_C_DetectionResultWrapper* fd_c_detection_result_wrapper)
```
>
> 从DetectionResult对象中提取纯C结构的DetectionResult结果并返回结构指针通过该指针可直接返回结构中的字段。
>
> **参数**
> * **fd_c_detection_result_wrapper**(FD_C_DetectionResultWrapper*): 指向DetectionResult对象的指针
>
> **返回**
> * **fd_c_detection_result**(FD_C_DetectionResult*): 指向纯C结构的DetectionResult的指针
```c ```c
FD_C_Mat FD_C_VisDetection(FD_C_Mat im, FD_C_DetectionResult* fd_detection_result, FD_C_Mat FD_C_VisDetection(FD_C_Mat im, FD_C_DetectionResult* fd_detection_result,
float score_threshold, int line_size, float font_size); float score_threshold, int line_size, float font_size);
@@ -189,7 +163,7 @@ FD_C_Mat FD_C_VisDetection(FD_C_Mat im, FD_C_DetectionResult* fd_detection_resul
> >
> **参数** > **参数**
> * **im**(FD_C_Mat): 指向输入图像的指针 > * **im**(FD_C_Mat): 指向输入图像的指针
> * **fd_detection_result**(FD_C_DetectionResult*): 指向纯C结构DetectionResult的指针 > * **fd_detection_result**(FD_C_DetectionResult*): 指向FD_C_DetectionResult结构的指针
> * **score_threshold**(float): 检测阈值 > * **score_threshold**(float): 检测阈值
> * **line_size**(int): 检测框线大小 > * **line_size**(int): 检测框线大小
> * **font_size**(float): 检测框字体大小 > * **font_size**(float): 检测框字体大小

View File

@@ -35,21 +35,30 @@ void CpuInfer(const char* model_dir, const char* image_file) {
FD_C_RuntimeOptionWrapper* option = FD_C_CreateRuntimeOptionWrapper(); FD_C_RuntimeOptionWrapper* option = FD_C_CreateRuntimeOptionWrapper();
FD_C_RuntimeOptionWrapperUseCpu(option); FD_C_RuntimeOptionWrapperUseCpu(option);
FD_C_PPYOLOEWrapper* model = FD_C_CreatesPPYOLOEWrapper( FD_C_PPYOLOEWrapper* model = FD_C_CreatePPYOLOEWrapper(
model_file, params_file, config_file, option, PADDLE); model_file, params_file, config_file, option, PADDLE);
FD_C_Mat im = FD_C_Imread(image_file); if (!FD_C_PPYOLOEWrapperInitialized(model)) {
printf("Failed to initialize.\n");
FD_C_DetectionResultWrapper* result_wrapper = FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_CreateDetectionResultWrapper(); FD_C_DestroyPaddleClasModelWrapper(model);
if (!FD_C_PPYOLOEWrapperPredict(model, im, result_wrapper)) {
printf("Failed to predict.\n");
return; return;
} }
FD_C_Mat im = FD_C_Imread(image_file);
FD_C_DetectionResult* result = FD_C_DetectionResult* result =
FD_C_DetectionResultWrapperGetData(result_wrapper); (FD_C_DetectionResult*)malloc(sizeof(FD_C_DetectionResult));
if (!FD_C_PPYOLOEWrapperPredict(model, im, result)) {
printf("Failed to predict.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyMat(im);
free(result);
return;
}
FD_C_Mat vis_im = FD_C_VisDetection(im, result, 0.5, 1, 0.5); FD_C_Mat vis_im = FD_C_VisDetection(im, result, 0.5, 1, 0.5);
FD_C_Imwrite("vis_result.jpg", vis_im); FD_C_Imwrite("vis_result.jpg", vis_im);
@@ -57,7 +66,6 @@ void CpuInfer(const char* model_dir, const char* image_file) {
FD_C_DestroyRuntimeOptionWrapper(option); FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPPYOLOEWrapper(model); FD_C_DestroyPPYOLOEWrapper(model);
FD_C_DestroyDetectionResultWrapper(result_wrapper);
FD_C_DestroyDetectionResult(result); FD_C_DestroyDetectionResult(result);
FD_C_DestroyMat(im); FD_C_DestroyMat(im);
FD_C_DestroyMat(vis_im); FD_C_DestroyMat(vis_im);
@@ -75,21 +83,30 @@ void GpuInfer(const char* model_dir, const char* image_file) {
FD_C_RuntimeOptionWrapper* option = FD_C_CreateRuntimeOptionWrapper(); FD_C_RuntimeOptionWrapper* option = FD_C_CreateRuntimeOptionWrapper();
FD_C_RuntimeOptionWrapperUseGpu(option, 0); FD_C_RuntimeOptionWrapperUseGpu(option, 0);
FD_C_PPYOLOEWrapper* model = FD_C_CreatesPPYOLOEWrapper( FD_C_PPYOLOEWrapper* model = FD_C_CreatePPYOLOEWrapper(
model_file, params_file, config_file, option, PADDLE); model_file, params_file, config_file, option, PADDLE);
FD_C_Mat im = FD_C_Imread(image_file); if (!FD_C_PPYOLOEWrapperInitialized(model)) {
printf("Failed to initialize.\n");
FD_C_DetectionResultWrapper* result_wrapper = FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_CreateDetectionResultWrapper(); FD_C_DestroyPaddleClasModelWrapper(model);
if (!FD_C_PPYOLOEWrapperPredict(model, im, result_wrapper)) {
printf("Failed to predict.\n");
return; return;
} }
FD_C_Mat im = FD_C_Imread(image_file);
FD_C_DetectionResult* result = FD_C_DetectionResult* result =
FD_C_DetectionResultWrapperGetData(result_wrapper); (FD_C_DetectionResult*)malloc(sizeof(FD_C_DetectionResult));
if (!FD_C_PPYOLOEWrapperPredict(model, im, result)) {
printf("Failed to predict.\n");
FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPaddleClasModelWrapper(model);
FD_C_DestroyMat(im);
free(result);
return;
}
FD_C_Mat vis_im = FD_C_VisDetection(im, result, 0.5, 1, 0.5); FD_C_Mat vis_im = FD_C_VisDetection(im, result, 0.5, 1, 0.5);
FD_C_Imwrite("vis_result.jpg", vis_im); FD_C_Imwrite("vis_result.jpg", vis_im);
@@ -97,7 +114,6 @@ void GpuInfer(const char* model_dir, const char* image_file) {
FD_C_DestroyRuntimeOptionWrapper(option); FD_C_DestroyRuntimeOptionWrapper(option);
FD_C_DestroyPPYOLOEWrapper(model); FD_C_DestroyPPYOLOEWrapper(model);
FD_C_DestroyDetectionResultWrapper(result_wrapper);
FD_C_DestroyDetectionResult(result); FD_C_DestroyDetectionResult(result);
FD_C_DestroyMat(im); FD_C_DestroyMat(im);
FD_C_DestroyMat(vis_im); FD_C_DestroyMat(vis_im);

View File

@@ -39,7 +39,7 @@ msbuild infer_demo.sln /m:4 /p:Configuration=Release /p:Platform=x64
## 4. 运行可执行程序 ## 4. 运行可执行程序
注意Windows上运行时需要将FastDeploy依赖的库拷贝至可执行程序所在目录, 或者配置环境变量。FastDeploy提供了工具帮助我们快速将所有依赖库拷贝至可执行程序所在目录,通过如下命令将所有依赖的dll文件拷贝至可执行程序所在的目录 注意Windows上运行时需要将FastDeploy依赖的库拷贝至可执行程序所在目录, 或者配置环境变量。FastDeploy提供了工具帮助我们快速将所有依赖库拷贝至可执行程序所在目录,通过如下命令将所有依赖的dll文件拷贝至可执行程序所在的目录(可能生成的可执行文件在Release下还有一层目录这里假设生成的可执行文件在Release处)
```shell ```shell
cd D:\Download\fastdeploy-win-x64-gpu-x.x.x cd D:\Download\fastdeploy-win-x64-gpu-x.x.x

View File

@@ -44,13 +44,16 @@ namespace Test
}else{ }else{
runtimeoption.UseGpu(); runtimeoption.UseGpu();
} }
vision.detection.PPYOLOE model = new vision.detection.PPYOLOE(model_file, params_file, config_file, runtimeoption, ModelFormat.PADDLE); fastdeploy.vision.detection.PPYOLOE model = new fastdeploy.vision.detection.PPYOLOE(model_file, params_file, config_file, runtimeoption, ModelFormat.PADDLE);
if(!model.Initialized()){
Console.WriteLine("Failed to initialize.\n");
}
Mat image = Cv2.ImRead(image_path); Mat image = Cv2.ImRead(image_path);
vision.DetectionResult res = model.Predict(image); fastdeploy.vision.DetectionResult res = model.Predict(image);
Mat res_img = vision.Visualize.VisDetection(image, res, 0, 1, 0.5f); Console.WriteLine(res.ToString());
Mat res_img = fastdeploy.vision.Visualize.VisDetection(image, res, 0, 1, 0.5f);
Cv2.ImShow("result.png", res_img); Cv2.ImShow("result.png", res_img);
Cv2.WaitKey(0); Cv2.WaitKey(0);
} }
} }