mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-17 06:00:59 +08:00
[Other] Update PP-OCRv2/v3 example. (#838)
* Fix links in readme * Fix links in readme * Update PPOCRv2/v3 examples
This commit is contained in:
@@ -33,13 +33,19 @@ void InitAndInfer(const std::string& det_model_dir, const std::string& cls_model
|
||||
auto cls_option = option;
|
||||
auto rec_option = option;
|
||||
|
||||
// The cls and rec model can inference a batch of images now.
|
||||
// User could initialize the inference batch size and set them after create PPOCR model.
|
||||
int cls_batch_size = 1;
|
||||
int rec_batch_size = 6;
|
||||
|
||||
// If use TRT backend, the dynamic shape will be set as follow.
|
||||
// We recommend that users set the length and height of the detection model to a multiple of 32.
|
||||
// We also recommend that users set the Trt input shape as follow.
|
||||
det_option.SetTrtInputShape("x", {1, 3, 64,64}, {1, 3, 640, 640},
|
||||
{1, 3, 960, 960});
|
||||
cls_option.SetTrtInputShape("x", {1, 3, 48, 10}, {10, 3, 48, 320}, {64, 3, 48, 1024});
|
||||
rec_option.SetTrtInputShape("x", {1, 3, 48, 10}, {10, 3, 48, 320},
|
||||
{64, 3, 48, 2304});
|
||||
cls_option.SetTrtInputShape("x", {1, 3, 48, 10}, {cls_batch_size, 3, 48, 320}, {cls_batch_size, 3, 48, 1024});
|
||||
rec_option.SetTrtInputShape("x", {1, 3, 48, 10}, {rec_batch_size, 3, 48, 320},
|
||||
{rec_batch_size, 3, 48, 2304});
|
||||
|
||||
// Users could save TRT cache file to disk as follow.
|
||||
// det_option.SetTrtCacheFile(det_model_dir + sep + "det_trt_cache.trt");
|
||||
@@ -57,6 +63,12 @@ void InitAndInfer(const std::string& det_model_dir, const std::string& cls_model
|
||||
// The classification model is optional, so the PP-OCR can also be connected in series as follows
|
||||
// auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &rec_model);
|
||||
auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &cls_model, &rec_model);
|
||||
|
||||
// Set inference batch size for cls model and rec model, the value could be -1 and 1 to positive infinity.
|
||||
// When inference batch size is set to -1, it means that the inference batch size
|
||||
// of the cls and rec models will be the same as the number of boxes detected by the det model.
|
||||
ppocr_v3.SetClsBatchSize(cls_batch_size);
|
||||
ppocr_v3.SetRecBatchSize(rec_batch_size);
|
||||
|
||||
if(!ppocr_v3.Initialized()){
|
||||
std::cerr << "Failed to initialize PP-OCR." << std::endl;
|
||||
|
Reference in New Issue
Block a user