[Android] Support segmentation and facedet in Android (#567)

* [FlyCV] Add global SetProcLibCpuNumThreads method

* [Android] Support segmentation and facedet in Android

* [Android] add JNI instance check to j_runtime_option_obj

* [Model] fixed ppseg flycv resize error

* [FlyCV] fix FlyCV resize flags

* [cmake] remove un-need lite compile option

* [Android] add PaddleSegModel JNI and fix some bugs

* [Android] bind PaddleSegModel via JNI

* [Android] bind VisSegmentation via JNI

* [Android] bind YOLOv5Face and SCRFD via JNI

* [Android] fix NewJavaFaceDetectionResultFromCxx error
This commit is contained in:
DefTruth
2022-11-13 17:47:50 +08:00
committed by GitHub
parent 98cab48f8a
commit 6a368f3448
67 changed files with 3578 additions and 1733 deletions

View File

@@ -63,14 +63,11 @@ option(ENABLE_LITE_BACKEND "Whether to enable paddle lite backend." OFF)
option(ENABLE_VISION "Whether to enable vision models usage." OFF) option(ENABLE_VISION "Whether to enable vision models usage." OFF)
option(ENABLE_TEXT "Whether to enable text models usage." OFF) option(ENABLE_TEXT "Whether to enable text models usage." OFF)
option(ENABLE_FLYCV "Whether to enable flycv to boost image preprocess." OFF) option(ENABLE_FLYCV "Whether to enable flycv to boost image preprocess." OFF)
option(ENABLE_TIMVX "Whether to compile for TIMVX deploy." OFF)
option(WITH_TESTING "Whether to compile with unittest." OFF) option(WITH_TESTING "Whether to compile with unittest." OFF)
############################# Options for Android cross compiling ######################### ############################# Options for Android cross compiling #########################
option(WITH_OPENCV_STATIC "Use OpenCV static lib for Android." OFF) option(WITH_OPENCV_STATIC "Use OpenCV static lib for Android." OFF)
option(WITH_LITE_STATIC "Use Paddle Lite static lib for Android." OFF) option(WITH_LITE_STATIC "Use Paddle Lite static lib for Android." OFF)
option(WITH_LITE_FULL_API "Use Paddle Lite full API lib for Android." ON)
option(WITH_LITE_FP16 "Use Paddle Lite lib with fp16 enabled for Android." OFF)
option(ENABLE_TIMVX "Whether to compile for TIMVX deploy." OFF)
# Please don't open this flag now, some bugs exists. # Please don't open this flag now, some bugs exists.
# Only support Linux Now # Only support Linux Now

View File

@@ -40,9 +40,6 @@ if(ANDROID)
if(WITH_LITE_STATIC) if(WITH_LITE_STATIC)
message(FATAL_ERROR "Doesn't support WTIH_LITE_STATIC=ON for Paddle Lite now.") message(FATAL_ERROR "Doesn't support WTIH_LITE_STATIC=ON for Paddle Lite now.")
endif() endif()
if(NOT WITH_LITE_FULL_API)
message(FATAL_ERROR "Doesn't support WITH_LITE_FULL_API=OFF for Paddle Lite now.")
endif()
# check ABI, toolchain # check ABI, toolchain
if((NOT ANDROID_ABI MATCHES "armeabi-v7a") AND (NOT ANDROID_ABI MATCHES "arm64-v8a")) if((NOT ANDROID_ABI MATCHES "armeabi-v7a") AND (NOT ANDROID_ABI MATCHES "arm64-v8a"))
message(FATAL_ERROR "FastDeploy with Paddle Lite only support armeabi-v7a, arm64-v8a now.") message(FATAL_ERROR "FastDeploy with Paddle Lite only support armeabi-v7a, arm64-v8a now.")
@@ -56,12 +53,8 @@ if(WIN32 OR APPLE OR IOS)
message(FATAL_ERROR "Doesn't support windows/mac/ios platform with backend Paddle Lite now.") message(FATAL_ERROR "Doesn't support windows/mac/ios platform with backend Paddle Lite now.")
elseif(ANDROID) elseif(ANDROID)
set(PADDLELITE_URL "${PADDLELITE_URL_PREFIX}/lite-android-${ANDROID_ABI}-latest.tgz") set(PADDLELITE_URL "${PADDLELITE_URL_PREFIX}/lite-android-${ANDROID_ABI}-latest.tgz")
if(WITH_LITE_FP16)
if(ANDROID_ABI MATCHES "arm64-v8a") if(ANDROID_ABI MATCHES "arm64-v8a")
set(PADDLELITE_URL "${PADDLELITE_URL_PREFIX}/lite-android-${ANDROID_ABI}-fp16-latest.tgz") set(PADDLELITE_URL "${PADDLELITE_URL_PREFIX}/lite-android-${ANDROID_ABI}-fp16-latest.tgz")
else()
message(FATAL_ERROR "Doesn't support fp16 for ${ANDROID_ABI} now !")
endif()
endif() endif()
else() # Linux else() # Linux
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "aarch64") if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "aarch64")
@@ -76,21 +69,11 @@ endif()
if(WIN32 OR APPLE OR IOS) if(WIN32 OR APPLE OR IOS)
message(FATAL_ERROR "Doesn't support windows/mac/ios platform with backend Paddle Lite now.") message(FATAL_ERROR "Doesn't support windows/mac/ios platform with backend Paddle Lite now.")
elseif(ANDROID AND WITH_LITE_STATIC) elseif(ANDROID AND WITH_LITE_STATIC)
if(WITH_LITE_FULL_API)
set(PADDLELITE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_api_full_bundled..a") set(PADDLELITE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_api_full_bundled..a")
set(PADDLELITE_REMOVE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_api_light_bundled.a") set(PADDLELITE_REMOVE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_api_light_bundled.a")
else() else()
set(PADDLELITE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_api_light_bundled.a")
set(PADDLELITE_REMOVE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_api_full_bundled.a")
endif()
else()
if(WITH_LITE_FULL_API)
set(PADDLELITE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_full_api_shared.so") set(PADDLELITE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_full_api_shared.so")
set(PADDLELITE_REMOVE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_light_api_shared.so") set(PADDLELITE_REMOVE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_light_api_shared.so")
else()
set(PADDLELITE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_light_api_shared.so")
set(PADDLELITE_REMOVE_LIB "${PADDLELITE_LIB_DIR}/libpaddle_full_api_shared.so")
endif()
endif() endif()
include_directories(${PADDLELITE_INC_DIR}) include_directories(${PADDLELITE_INC_DIR})

View File

@@ -58,8 +58,10 @@ bool LimitByStride::ImplByFlyCV(Mat* mat) {
interp_method = fcv::InterpolationType::INTER_LINEAR; interp_method = fcv::InterpolationType::INTER_LINEAR;
} else if (interp_ == 2) { } else if (interp_ == 2) {
interp_method = fcv::InterpolationType::INTER_CUBIC; interp_method = fcv::InterpolationType::INTER_CUBIC;
} else if (interp_ == 3) {
interp_method = fcv::InterpolationType::INTER_AREA;
} else { } else {
FDERROR << "LimitByStride: Only support interp_ be 0/1/2 with FlyCV, but " FDERROR << "LimitByStride: Only support interp_ be 0/1/2/3 with FlyCV, but "
"now it's " "now it's "
<< interp_ << "." << std::endl; << interp_ << "." << std::endl;
return false; return false;

View File

@@ -64,8 +64,10 @@ bool LimitShort::ImplByFlyCV(Mat* mat) {
interp_method = fcv::InterpolationType::INTER_LINEAR; interp_method = fcv::InterpolationType::INTER_LINEAR;
} else if (interp_ == 2) { } else if (interp_ == 2) {
interp_method = fcv::InterpolationType::INTER_CUBIC; interp_method = fcv::InterpolationType::INTER_CUBIC;
} else if (interp_ == 3) {
interp_method = fcv::InterpolationType::INTER_AREA;
} else { } else {
FDERROR << "LimitByShort: Only support interp_ be 0/1/2 with FlyCV, but " FDERROR << "LimitByShort: Only support interp_ be 0/1/2/3 with FlyCV, but "
"now it's " "now it's "
<< interp_ << "." << std::endl; << interp_ << "." << std::endl;
return false; return false;

View File

@@ -78,8 +78,10 @@ bool Resize::ImplByFlyCV(Mat* mat) {
interp_method = fcv::InterpolationType::INTER_LINEAR; interp_method = fcv::InterpolationType::INTER_LINEAR;
} else if (interp_ == 2) { } else if (interp_ == 2) {
interp_method = fcv::InterpolationType::INTER_CUBIC; interp_method = fcv::InterpolationType::INTER_CUBIC;
} else if (interp_ == 3) {
interp_method = fcv::InterpolationType::INTER_AREA;
} else { } else {
FDERROR << "Resize: Only support interp_ be 0/1/2 with FlyCV, but " FDERROR << "Resize: Only support interp_ be 0/1/2/3 with FlyCV, but "
"now it's " "now it's "
<< interp_ << "." << std::endl; << interp_ << "." << std::endl;
return false; return false;

View File

@@ -50,8 +50,10 @@ bool ResizeByShort::ImplByFlyCV(Mat* mat) {
interp_method = fcv::InterpolationType::INTER_LINEAR; interp_method = fcv::InterpolationType::INTER_LINEAR;
} else if (interp_ == 2) { } else if (interp_ == 2) {
interp_method = fcv::InterpolationType::INTER_CUBIC; interp_method = fcv::InterpolationType::INTER_CUBIC;
} else if (interp_ == 3) {
interp_method = fcv::InterpolationType::INTER_AREA;
} else { } else {
FDERROR << "LimitByShort: Only support interp_ be 0/1/2 with FlyCV, but " FDERROR << "LimitByShort: Only support interp_ be 0/1/2/3 with FlyCV, but "
"now it's " "now it's "
<< interp_ << "." << std::endl; << interp_ << "." << std::endl;
return false; return false;

View File

@@ -114,7 +114,7 @@ bool ScaledYOLOv4::Preprocess(
// process after image load // process after image load
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;

View File

@@ -112,7 +112,7 @@ bool YOLOR::Preprocess(Mat* mat, FDTensor* output,
// process after image load // process after image load
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;

View File

@@ -145,7 +145,7 @@ bool YOLOv5::Preprocess(Mat* mat, FDTensor* output,
// process after image load // process after image load
double ratio = (size[0] * 1.0) / std::max(static_cast<float>(mat->Height()), double ratio = (size[0] * 1.0) / std::max(static_cast<float>(mat->Height()),
static_cast<float>(mat->Width())); static_cast<float>(mat->Width()));
if (ratio != 1.0) { if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;

View File

@@ -164,7 +164,7 @@ bool YOLOv5Lite::Preprocess(
// process after image load // process after image load
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;

View File

@@ -136,7 +136,7 @@ bool YOLOv6::Preprocess(Mat* mat, FDTensor* output,
// process after image load // process after image load
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;

View File

@@ -134,7 +134,7 @@ bool YOLOv7::Preprocess(Mat* mat, FDTensor* output,
// process after image load // process after image load
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;

View File

@@ -115,7 +115,7 @@ bool YOLOv7End2EndORT::Preprocess(
std::map<std::string, std::array<float, 2>>* im_info) { std::map<std::string, std::array<float, 2>>* im_info) {
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;

View File

@@ -146,7 +146,7 @@ bool YOLOv7End2EndTRT::Preprocess(
std::map<std::string, std::array<float, 2>>* im_info) { std::map<std::string, std::array<float, 2>>* im_info) {
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;

View File

@@ -66,7 +66,7 @@ SCRFD::SCRFD(const std::string& model_file, const std::string& params_file,
valid_cpu_backends = {Backend::ORT}; valid_cpu_backends = {Backend::ORT};
valid_gpu_backends = {Backend::ORT, Backend::TRT}; valid_gpu_backends = {Backend::ORT, Backend::TRT};
} else { } else {
valid_cpu_backends = {Backend::PDINFER, Backend::ORT}; valid_cpu_backends = {Backend::PDINFER, Backend::ORT, Backend::LITE};
valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT}; valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT};
valid_rknpu_backends = {Backend::RKNPU2}; valid_rknpu_backends = {Backend::RKNPU2};
} }
@@ -119,7 +119,11 @@ bool SCRFD::Preprocess(Mat* mat, FDTensor* output,
std::map<std::string, std::array<float, 2>>* im_info) { std::map<std::string, std::array<float, 2>>* im_info) {
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { #ifndef __ANDROID__
// Because of the low CPU performance on the Android device,
// we decided to hide this extra resize. It won't make much
// difference to the final result.
if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;
@@ -128,6 +132,7 @@ bool SCRFD::Preprocess(Mat* mat, FDTensor* output,
int resize_w = int(mat->Width() * ratio); int resize_w = int(mat->Width() * ratio);
Resize::Run(mat, resize_w, resize_h, -1, -1, interp); Resize::Run(mat, resize_w, resize_h, -1, -1, interp);
} }
#endif
// scrfd's preprocess steps // scrfd's preprocess steps
// 1. letterbox // 1. letterbox
// 2. BGR->RGB // 2. BGR->RGB
@@ -153,7 +158,7 @@ bool SCRFD::Preprocess(Mat* mat, FDTensor* output,
(*im_info)["output_shape"] = {static_cast<float>(mat->Height()), (*im_info)["output_shape"] = {static_cast<float>(mat->Height()),
static_cast<float>(mat->Width())}; static_cast<float>(mat->Width())};
mat->ShareWithTensor(output); mat->ShareWithTensor(output);
output->shape.insert(output->shape.begin(), 1); // reshape to n, h, w, c output->shape.insert(output->shape.begin(), 1); // reshape to n, c, h, w
return true; return true;
} }
@@ -226,7 +231,13 @@ bool SCRFD::Postprocess(
pad_w = static_cast<float>(static_cast<int>(pad_w) % stride); pad_w = static_cast<float>(static_cast<int>(pad_w) % stride);
} }
// must be setup landmarks_per_face before reserve // must be setup landmarks_per_face before reserve
if (use_kps) {
result->landmarks_per_face = landmarks_per_face; result->landmarks_per_face = landmarks_per_face;
} else {
// force landmarks_per_face = 0, if use_kps has been set as 'false'.
result->landmarks_per_face = 0;
}
result->Reserve(total_num_boxes); result->Reserve(total_num_boxes);
unsigned int count = 0; unsigned int count = 0;
// loop each stride // loop each stride
@@ -310,12 +321,14 @@ bool SCRFD::Postprocess(
result->boxes[i][3] = std::min(result->boxes[i][3], ipt_h - 1.0f); result->boxes[i][3] = std::min(result->boxes[i][3], ipt_h - 1.0f);
} }
// scale and clip landmarks // scale and clip landmarks
if (use_kps) {
for (size_t i = 0; i < result->landmarks.size(); ++i) { for (size_t i = 0; i < result->landmarks.size(); ++i) {
result->landmarks[i][0] = std::max(result->landmarks[i][0], 0.0f); result->landmarks[i][0] = std::max(result->landmarks[i][0], 0.0f);
result->landmarks[i][1] = std::max(result->landmarks[i][1], 0.0f); result->landmarks[i][1] = std::max(result->landmarks[i][1], 0.0f);
result->landmarks[i][0] = std::min(result->landmarks[i][0], ipt_w - 1.0f); result->landmarks[i][0] = std::min(result->landmarks[i][0], ipt_w - 1.0f);
result->landmarks[i][1] = std::min(result->landmarks[i][1], ipt_h - 1.0f); result->landmarks[i][1] = std::min(result->landmarks[i][1], ipt_h - 1.0f);
} }
}
return true; return true;
} }

View File

@@ -67,7 +67,7 @@ YOLOv5Face::YOLOv5Face(const std::string& model_file,
valid_cpu_backends = {Backend::ORT}; valid_cpu_backends = {Backend::ORT};
valid_gpu_backends = {Backend::ORT, Backend::TRT}; valid_gpu_backends = {Backend::ORT, Backend::TRT};
} else { } else {
valid_cpu_backends = {Backend::PDINFER, Backend::ORT}; valid_cpu_backends = {Backend::PDINFER, Backend::ORT, Backend::LITE};
valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT}; valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT};
} }
runtime_option = custom_option; runtime_option = custom_option;
@@ -115,7 +115,11 @@ bool YOLOv5Face::Preprocess(
// process after image load // process after image load
float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()), float ratio = std::min(size[1] * 1.0f / static_cast<float>(mat->Height()),
size[0] * 1.0f / static_cast<float>(mat->Width())); size[0] * 1.0f / static_cast<float>(mat->Width()));
if (ratio != 1.0) { // always true #ifndef __ANDROID__
// Because of the low CPU performance on the Android device,
// we decided to hide this extra resize. It won't make much
// difference to the final result.
if (std::fabs(ratio - 1.0f) > 1e-06) {
int interp = cv::INTER_AREA; int interp = cv::INTER_AREA;
if (ratio > 1.0) { if (ratio > 1.0) {
interp = cv::INTER_LINEAR; interp = cv::INTER_LINEAR;
@@ -124,6 +128,7 @@ bool YOLOv5Face::Preprocess(
int resize_w = int(round(static_cast<float>(mat->Width()) * ratio)); int resize_w = int(round(static_cast<float>(mat->Width()) * ratio));
Resize::Run(mat, resize_w, resize_h, -1, -1, interp); Resize::Run(mat, resize_w, resize_h, -1, -1, interp);
} }
#endif
// yolov5face's preprocess steps // yolov5face's preprocess steps
// 1. letterbox // 1. letterbox
// 2. BGR->RGB // 2. BGR->RGB

View File

@@ -263,8 +263,8 @@ bool PaddleSegModel::Postprocess(
infer_result->shape, FDDataType::FP32, infer_result->shape, FDDataType::FP32,
static_cast<void*>(fp32_result_buffer->data())); static_cast<void*>(fp32_result_buffer->data()));
} }
mat = new Mat(Mat::Create(*infer_result)); mat = new Mat(Mat::Create(*infer_result, ProcLib::OPENCV));
Resize::Run(mat, ipt_w, ipt_h, -1.0f, -1.0f, 1); Resize::Run(mat, ipt_w, ipt_h, -1.0f, -1.0f, 1, false, ProcLib::OPENCV);
mat->ShareWithTensor(&new_infer_result); mat->ShareWithTensor(&new_infer_result);
result->shape = new_infer_result.shape; result->shape = new_infer_result.shape;
} else { } else {

View File

@@ -11,7 +11,7 @@
<application <application
android:allowBackup="true" android:allowBackup="true"
android:icon="@mipmap/ic_launcher" android:icon="@mipmap/ic_launcher"
android:label="@string/ocr_app_name" android:label="@string/detection_app_name"
android:roundIcon="@mipmap/ic_launcher_round" android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme"> android:theme="@style/AppTheme">

View File

@@ -205,15 +205,13 @@ public class OcrMainActivity extends Activity implements View.OnClickListener, C
} }
public void checkAndUpdateSettings() { public void checkAndUpdateSettings() {
if (OcrSettingsActivity.checkAndUpdateSettings(this)) { if (SettingsActivity.checkAndUpdateSettings(this)) {
String realModelDir = getCacheDir() + "/" + OcrSettingsActivity.modelDir; String realModelDir = getCacheDir() + "/" + SettingsActivity.modelDir;
// String detModelName = "ch_PP-OCRv2_det_infer"; String detModelName = "ch_PP-OCRv2_det_infer";
String detModelName = "ch_PP-OCRv3_det_infer";
// String detModelName = "ch_ppocr_mobile_v2.0_det_infer"; // String detModelName = "ch_ppocr_mobile_v2.0_det_infer";
String clsModelName = "ch_ppocr_mobile_v2.0_cls_infer"; String clsModelName = "ch_ppocr_mobile_v2.0_cls_infer";
// String recModelName = "ch_ppocr_mobile_v2.0_rec_infer"; // String recModelName = "ch_ppocr_mobile_v2.0_rec_infer";
String recModelName = "ch_PP-OCRv3_rec_infer"; String recModelName = "ch_PP-OCRv2_rec_infer";
// String recModelName = "ch_PP-OCRv2_rec_infer";
String realDetModelDir = realModelDir + "/" + detModelName; String realDetModelDir = realModelDir + "/" + detModelName;
String realClsModelDir = realModelDir + "/" + clsModelName; String realClsModelDir = realModelDir + "/" + clsModelName;
String realRecModelDir = realModelDir + "/" + recModelName; String realRecModelDir = realModelDir + "/" + recModelName;
@@ -236,16 +234,13 @@ public class OcrMainActivity extends Activity implements View.OnClickListener, C
RuntimeOption detOption = new RuntimeOption(); RuntimeOption detOption = new RuntimeOption();
RuntimeOption clsOption = new RuntimeOption(); RuntimeOption clsOption = new RuntimeOption();
RuntimeOption recOption = new RuntimeOption(); RuntimeOption recOption = new RuntimeOption();
detOption.setCpuThreadNum(OcrSettingsActivity.cpuThreadNum); detOption.setCpuThreadNum(SettingsActivity.cpuThreadNum);
clsOption.setCpuThreadNum(OcrSettingsActivity.cpuThreadNum); clsOption.setCpuThreadNum(SettingsActivity.cpuThreadNum);
recOption.setCpuThreadNum(OcrSettingsActivity.cpuThreadNum); recOption.setCpuThreadNum(SettingsActivity.cpuThreadNum);
detOption.setLitePowerMode(OcrSettingsActivity.cpuPowerMode); detOption.setLitePowerMode(SettingsActivity.cpuPowerMode);
clsOption.setLitePowerMode(OcrSettingsActivity.cpuPowerMode); clsOption.setLitePowerMode(SettingsActivity.cpuPowerMode);
recOption.setLitePowerMode(OcrSettingsActivity.cpuPowerMode); recOption.setLitePowerMode(SettingsActivity.cpuPowerMode);
detOption.enableRecordTimeOfRuntime(); if (Boolean.parseBoolean(SettingsActivity.enableLiteFp16)) {
clsOption.enableRecordTimeOfRuntime();
recOption.enableRecordTimeOfRuntime();
if (Boolean.parseBoolean(OcrSettingsActivity.enableLiteFp16)) {
detOption.enableLiteFp16(); detOption.enableLiteFp16();
clsOption.enableLiteFp16(); clsOption.enableLiteFp16();
recOption.enableLiteFp16(); recOption.enableLiteFp16();

View File

@@ -52,7 +52,6 @@
android:text="@string/action_bar_realtime" android:text="@string/action_bar_realtime"
android:textAlignment="center" /> android:textAlignment="center" />
</LinearLayout> </LinearLayout>
</com.baidu.paddle.fastdeploy.app.ui.layout.ActionBarLayout>
<!-- 实时--> <!-- 实时-->
<com.baidu.paddle.fastdeploy.app.ui.view.CameraSurfaceView <com.baidu.paddle.fastdeploy.app.ui.view.CameraSurfaceView

View File

@@ -26,7 +26,7 @@
<string name="OCR_REC_LABEL_DEFAULT">labels/ppocr_keys_v1.txt</string> <string name="OCR_REC_LABEL_DEFAULT">labels/ppocr_keys_v1.txt</string>
<!-- Other resources values--> <!-- Other resources values-->
<string name="action_bar_take_photo">拍照识别</string> <string name="action_bar_take_photo">拍照识别</string>
<string name="action_bar_realtime">FD 实时识别</string> <string name="action_bar_realtime">EasyEdge 实时识别</string>
<string name="action_bar_back">&lt;</string> <string name="action_bar_back">&lt;</string>
<string name="action_bar_model_name">模型名称</string> <string name="action_bar_model_name">模型名称</string>
<string name="result_label">识别结果</string> <string name="result_label">识别结果</string>

View File

@@ -16,7 +16,8 @@ set(FastDeploy_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../../libs/fastdeploy-android
find_package(FastDeploy REQUIRED) find_package(FastDeploy REQUIRED)
include_directories(.) # include_directories(.)
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${FastDeploy_INCLUDE_DIRS}) include_directories(${FastDeploy_INCLUDE_DIRS})
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ffast-math -Ofast -Os -DNDEBUG -fomit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ffast-math -Ofast -Os -DNDEBUG -fomit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables")
@@ -26,13 +27,23 @@ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--gc-sections -W
add_library( add_library(
fastdeploy_jni fastdeploy_jni
SHARED SHARED
utils_jni.cc fastdeploy_jni/bitmap_jni.cc
bitmap_jni.cc fastdeploy_jni/assets_loader_jni.cc
pipeline/ppocr_jni.cc fastdeploy_jni/runtime_option_jni.cc
vision/results_jni.cc fastdeploy_jni/vision/results_jni.cc
vision/visualize_jni.cc fastdeploy_jni/vision/visualize_jni.cc
vision/detection/picodet_jni.cc fastdeploy_jni/pipeline/ppocr_jni.cc
vision/classification/paddleclas_model_jni.cc) fastdeploy_jni/pipeline/pipeline_utils_jni.cc
fastdeploy_jni/vision/detection/picodet_jni.cc
fastdeploy_jni/vision/detection/detection_utils_jni.cc
fastdeploy_jni/vision/classification/paddleclas_model_jni.cc
fastdeploy_jni/vision/classification/classification_utils_jni.cc
fastdeploy_jni/vision/segmentation/paddleseg_model_jni.cc
fastdeploy_jni/vision/segmentation/segmentation_utils_jni.cc
fastdeploy_jni/vision/facedet/scrfd_jni.cc
fastdeploy_jni/vision/facedet/yolov5face_jni.cc
fastdeploy_jni/vision/facedet/facedet_utils_jni.cc
)
# Searches for a specified prebuilt library and stores the path as a # Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by # variable. Because CMake includes system libraries in the search path by

View File

@@ -1,7 +1,3 @@
//
// Created by qiuyanjun on 2022/10/19.
//
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. // Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,34 +12,34 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "utils_jni.h" #include "fastdeploy_jni/assets_loader_jni.h"
namespace fastdeploy { namespace fastdeploy {
namespace jni { namespace jni {
// Assets Loader Utils. /// Assets loader
bool AssetsLoaderUtils::detection_labels_loaded_ = false; bool AssetsLoader::detection_labels_loaded_ = false;
bool AssetsLoaderUtils::classification_labels_loaded_ = false; bool AssetsLoader::classification_labels_loaded_ = false;
std::vector<std::string> AssetsLoaderUtils::detection_labels_ = {}; std::vector<std::string> AssetsLoader::detection_labels_ = {};
std::vector<std::string> AssetsLoaderUtils::classification_labels_ = {}; std::vector<std::string> AssetsLoader::classification_labels_ = {};
bool AssetsLoaderUtils::IsDetectionLabelsLoaded() { bool AssetsLoader::IsDetectionLabelsLoaded() {
return detection_labels_loaded_; return detection_labels_loaded_;
} }
bool AssetsLoaderUtils::IsClassificationLabelsLoaded() { bool AssetsLoader::IsClassificationLabelsLoaded() {
return classification_labels_loaded_; return classification_labels_loaded_;
} }
const std::vector<std::string>& AssetsLoaderUtils::GetDetectionLabels() { const std::vector<std::string>& AssetsLoader::GetDetectionLabels() {
return detection_labels_; return detection_labels_;
} }
const std::vector<std::string>& AssetsLoaderUtils::GetClassificationLabels() { const std::vector<std::string>& AssetsLoader::GetClassificationLabels() {
return classification_labels_; return classification_labels_;
} }
void AssetsLoaderUtils::LoadClassificationLabels(const std::string& path, void AssetsLoader::LoadClassificationLabels(const std::string& path,
bool force_reload) { bool force_reload) {
if (force_reload || (!classification_labels_loaded_)) { if (force_reload || (!classification_labels_loaded_)) {
classification_labels_loaded_ = classification_labels_loaded_ =
@@ -51,14 +47,14 @@ void AssetsLoaderUtils::LoadClassificationLabels(const std::string& path,
} }
} }
void AssetsLoaderUtils::LoadDetectionLabels(const std::string& path, void AssetsLoader::LoadDetectionLabels(const std::string& path,
bool force_reload) { bool force_reload) {
if (force_reload || (!detection_labels_loaded_)) { if (force_reload || (!detection_labels_loaded_)) {
detection_labels_loaded_ = LoadLabelsFromTxt(path, &detection_labels_); detection_labels_loaded_ = LoadLabelsFromTxt(path, &detection_labels_);
} }
} }
bool AssetsLoaderUtils::LoadLabelsFromTxt(const std::string& txt_path, bool AssetsLoader::LoadLabelsFromTxt(const std::string& txt_path,
std::vector<std::string>* labels) { std::vector<std::string>* labels) {
labels->clear(); labels->clear();
std::ifstream file; std::ifstream file;
@@ -75,7 +71,7 @@ bool AssetsLoaderUtils::LoadLabelsFromTxt(const std::string& txt_path,
} }
file.clear(); file.clear();
file.close(); file.close();
return labels->size() > 0; return !labels->empty();
} }
} // namespace jni } // namespace jni

View File

@@ -13,48 +13,15 @@
// limitations under the License. // limitations under the License.
#pragma once #pragma once
#ifdef __ANDROID__
#include <android/log.h> // NOLINT
#endif
#include <fstream> // NOLINT #include <fstream> // NOLINT
#include <string> // NOLINT #include <string> // NOLINT
#include <vector> // NOLINT #include <vector> // NOLINT
#define TAG "[FastDeploy][JNI]"
#ifdef __ANDROID__
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
#define LOGF(...) __android_log_print(ANDROID_LOG_FATAL, TAG, __VA_ARGS__)
#else
#define LOGD(...) \
{}
#define LOGI(...) \
{}
#define LOGW(...) \
{}
#define LOGE(...) \
{}
#define LOGF(...) \
{}
#endif
namespace fastdeploy { namespace fastdeploy {
namespace jni { namespace jni {
inline int64_t GetCurrentTime() { /// Assets loader
struct timeval time; class AssetsLoader {
gettimeofday(&time, NULL);
return 1000000LL * (int64_t)time.tv_sec + (int64_t)time.tv_usec;
}
inline double GetElapsedTime(int64_t time) {
return (GetCurrentTime() - time) / 1000.0f;
}
class AssetsLoaderUtils {
public: public:
static bool detection_labels_loaded_; static bool detection_labels_loaded_;
static bool classification_labels_loaded_; static bool classification_labels_loaded_;

View File

@@ -12,11 +12,9 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "bitmap_jni.h" // NOLINT
#include <android/bitmap.h> // NOLINT #include <android/bitmap.h> // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
#include "utils_jni.h" // NOLINT #include "fastdeploy_jni/perf_jni.h" // NOLINT
namespace fastdeploy { namespace fastdeploy {
namespace jni { namespace jni {

View File

@@ -28,10 +28,13 @@ namespace jni {
// to get the more details about Bitmap.Config.ARGB8888 // to get the more details about Bitmap.Config.ARGB8888
jboolean ARGB888Bitmap2RGBA(JNIEnv *env, jobject j_argb8888_bitmap, jboolean ARGB888Bitmap2RGBA(JNIEnv *env, jobject j_argb8888_bitmap,
cv::Mat *c_rgba); cv::Mat *c_rgba);
jboolean RGBA2ARGB888Bitmap(JNIEnv *env, jobject j_argb8888_bitmap, jboolean RGBA2ARGB888Bitmap(JNIEnv *env, jobject j_argb8888_bitmap,
const cv::Mat &c_rgba); const cv::Mat &c_rgba);
jboolean ARGB888Bitmap2BGR(JNIEnv *env, jobject j_argb8888_bitmap, jboolean ARGB888Bitmap2BGR(JNIEnv *env, jobject j_argb8888_bitmap,
cv::Mat *c_bgr); cv::Mat *c_bgr);
jboolean BGR2ARGB888Bitmap(JNIEnv *env, jobject j_argb8888_bitmap, jboolean BGR2ARGB888Bitmap(JNIEnv *env, jobject j_argb8888_bitmap,
const cv::Mat &c_bgr); const cv::Mat &c_bgr);

View File

@@ -41,8 +41,8 @@ inline std::string ConvertTo(JNIEnv *env, jstring jstr) {
const jclass jstring_clazz = env->GetObjectClass(jstr); const jclass jstring_clazz = env->GetObjectClass(jstr);
const jmethodID getBytesID = const jmethodID getBytesID =
env->GetMethodID(jstring_clazz, "getBytes", "(Ljava/lang/String;)[B"); env->GetMethodID(jstring_clazz, "getBytes", "(Ljava/lang/String;)[B");
const jbyteArray jstring_bytes = (jbyteArray)env->CallObjectMethod( const jbyteArray jstring_bytes = reinterpret_cast<jbyteArray>(
jstr, getBytesID, env->NewStringUTF("UTF-8")); env->CallObjectMethod(jstr, getBytesID, env->NewStringUTF("UTF-8")));
size_t length = static_cast<size_t>(env->GetArrayLength(jstring_bytes)); size_t length = static_cast<size_t>(env->GetArrayLength(jstring_bytes));
jbyte *jstring_bytes_ptr = env->GetByteArrayElements(jstring_bytes, NULL); jbyte *jstring_bytes_ptr = env->GetByteArrayElements(jstring_bytes, NULL);
@@ -56,6 +56,28 @@ inline std::string ConvertTo(JNIEnv *env, jstring jstr) {
return res; return res;
} }
/// jstring s-> std::vector<std::string>
template <>
inline std::vector<std::string> ConvertTo(JNIEnv *env, jobjectArray jstrs) {
// In java, a unicode char will be encoded using 2 bytes (utf16).
// so jstring will contain characters utf16. std::string in c++ is
// essentially a string of bytes, not characters, so if we want to
// pass jstring from JNI to c++, we have convert utf16 to bytes.
if (!jstrs) {
return {};
}
std::vector<std::string> res;
const int len = env->GetArrayLength(jstrs);
if (len > 0) {
for (int i = 0; i < len; ++i) {
auto j_str =
reinterpret_cast<jstring>(env->GetObjectArrayElement(jstrs, i));
res.push_back(fastdeploy::jni::ConvertTo<std::string>(env, j_str));
}
}
return res;
}
/// std::string -> jstring /// std::string -> jstring
template <> template <>
inline jstring ConvertTo(JNIEnv *env, std::string str) { inline jstring ConvertTo(JNIEnv *env, std::string str) {
@@ -69,8 +91,8 @@ inline jstring ConvertTo(JNIEnv *env, std::string str) {
reinterpret_cast<const jbyte *>(cstr_data_ptr)); reinterpret_cast<const jbyte *>(cstr_data_ptr));
jstring jstring_encoding = env->NewStringUTF("UTF-8"); jstring jstring_encoding = env->NewStringUTF("UTF-8");
jstring res = (jstring)(env->NewObject(jstring_clazz, initID, jstring_bytes, jstring res = reinterpret_cast<jstring>(
jstring_encoding)); env->NewObject(jstring_clazz, initID, jstring_bytes, jstring_encoding));
env->DeleteLocalRef(jstring_clazz); env->DeleteLocalRef(jstring_clazz);
env->DeleteLocalRef(jstring_bytes); env->DeleteLocalRef(jstring_bytes);
@@ -89,6 +111,16 @@ inline std::vector<int64_t> ConvertTo(JNIEnv *env, jlongArray jdata) {
return res; return res;
} }
/// jintArray -> std::vector<int>
template <>
inline std::vector<int> ConvertTo(JNIEnv *env, jintArray jdata) {
int jdata_size = env->GetArrayLength(jdata);
jint *jdata_ptr = env->GetIntArrayElements(jdata, nullptr);
std::vector<int> res(jdata_ptr, jdata_ptr + jdata_size);
env->ReleaseIntArrayElements(jdata, jdata_ptr, 0);
return res;
}
/// jfloatArray -> std::vector<float> /// jfloatArray -> std::vector<float>
template <> template <>
inline std::vector<float> ConvertTo(JNIEnv *env, jfloatArray jdata) { inline std::vector<float> ConvertTo(JNIEnv *env, jfloatArray jdata) {
@@ -105,7 +137,7 @@ inline jlongArray ConvertTo(JNIEnv *env, const std::vector<int64_t> &cvec) {
jlongArray res = env->NewLongArray(cvec.size()); jlongArray res = env->NewLongArray(cvec.size());
jlong *jbuf = new jlong[cvec.size()]; jlong *jbuf = new jlong[cvec.size()];
for (size_t i = 0; i < cvec.size(); ++i) { for (size_t i = 0; i < cvec.size(); ++i) {
jbuf[i] = (jlong)cvec[i]; jbuf[i] = static_cast<jlong>(cvec[i]);
} }
env->SetLongArrayRegion(res, 0, cvec.size(), jbuf); env->SetLongArrayRegion(res, 0, cvec.size(), jbuf);
delete[] jbuf; delete[] jbuf;

View File

@@ -0,0 +1,89 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h>
#ifdef __ANDROID__
#include <android/log.h> // NOLINT
#endif
#include <fstream> // NOLINT
#include <string> // NOLINT
#include <vector> // NOLINT
#include "fastdeploy/vision.h"
#define TAG "[FastDeploy][JNI]"
#ifdef __ANDROID__
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
#define LOGF(...) __android_log_print(ANDROID_LOG_FATAL, TAG, __VA_ARGS__)
#else
#define LOGD(...) {}
#define LOGI(...) {}
#define LOGW(...) {}
#define LOGE(...) {}
#define LOGF(...) {}
#endif
#define ENABLE_RUNTIME_PERF
namespace fastdeploy {
namespace jni {
/// Time counter
inline int64_t GetCurrentTime() {
struct timeval time;
gettimeofday(&time, NULL);
return 1000000LL * static_cast<int64_t>(time.tv_sec) +
static_cast<int64_t>(time.tv_usec);
}
inline double GetElapsedTime(int64_t time) {
return (GetCurrentTime() - time) / 1000.0f;
}
/// Show the performance of Runtime
inline void PerfTimeOfRuntime(
FastDeployModel *c_model_ptr, int64_t start = -1) {
#ifdef ENABLE_RUNTIME_PERF
if (c_model_ptr == nullptr) {
return;
}
if (start > 0) {
auto tc = GetElapsedTime(start);
LOGD("Predict from native costs %f ms", tc);
}
if (c_model_ptr->EnabledRecordTimeOfRuntime()) {
auto info_of_runtime = c_model_ptr->PrintStatisInfoOfRuntime();
const float avg_time = info_of_runtime["avg_time"] * 1000.0f;
LOGD("Avg runtime costs %f ms", avg_time);
}
#endif
}
#define INITIALIZED_OR_RETURN(c_model_ptr) \
if (!(c_model_ptr)->Initialized()) { \
LOGE("Failed to initialize!"); \
delete (c_model_ptr); \
return 0; \
}
#define PERF_TIME_OF_RUNTIME(c_model_ptr, start) \
fastdeploy::jni::PerfTimeOfRuntime((c_model_ptr), (start));
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,160 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/pipeline/pipeline_utils_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
/// Handle the native PP-OCR pipeline resources.
PPOCRHandler::PPOCRHandler(vision::ocr::DBDetector *det_model,
vision::ocr::Classifier *cls_model,
vision::ocr::Recognizer *rec_model,
pipeline::PPOCRv2 *ppocr_v2)
: detector_(det_model),
classifier_(cls_model),
recognizer_(rec_model),
ppocr_v2_(ppocr_v2) {
if (detector_ != nullptr && classifier_ != nullptr &&
recognizer_ != nullptr && ppocr_v2_ != nullptr) {
initialized_ = true;
}
}
PPOCRHandler::PPOCRHandler(vision::ocr::DBDetector *det_model,
vision::ocr::Recognizer *rec_model,
pipeline::PPOCRv2 *ppocr_v2)
: detector_(det_model), recognizer_(rec_model), ppocr_v2_(ppocr_v2) {
if (detector_ != nullptr && recognizer_ != nullptr && ppocr_v2_ != nullptr) {
initialized_ = true;
}
}
PPOCRHandler::PPOCRHandler(vision::ocr::DBDetector *det_model,
vision::ocr::Classifier *cls_model,
vision::ocr::Recognizer *rec_model,
pipeline::PPOCRv3 *ppocr_v3)
: detector_(det_model),
classifier_(cls_model),
recognizer_(rec_model),
ppocr_v3_(ppocr_v3) {
if (detector_ != nullptr && classifier_ != nullptr &&
recognizer_ != nullptr && ppocr_v3_ != nullptr) {
initialized_ = true;
}
}
PPOCRHandler::PPOCRHandler(vision::ocr::DBDetector *det_model,
vision::ocr::Recognizer *rec_model,
pipeline::PPOCRv3 *ppocr_v3)
: detector_(det_model), recognizer_(rec_model), ppocr_v3_(ppocr_v3) {
if (detector_ != nullptr && recognizer_ != nullptr && ppocr_v3_ != nullptr) {
initialized_ = true;
}
}
void PPOCRHandler::SetPPOCRVersion(PPOCRVersion version_tag) {
ppocr_version_tag_ = version_tag;
}
bool PPOCRHandler::Predict(cv::Mat *img, vision::OCRResult *result) {
if (ppocr_version_tag_ == PPOCRVersion::OCR_V2) {
if (ppocr_v2_ != nullptr) {
return ppocr_v2_->Predict(img, result);
}
return false;
} else if (ppocr_version_tag_ == PPOCRVersion::OCR_V3) {
if (ppocr_v3_ != nullptr) {
return ppocr_v3_->Predict(img, result);
}
return false;
}
return false;
}
bool PPOCRHandler::Initialized() {
if (!initialized_) {
return false;
}
if (ppocr_version_tag_ == PPOCRVersion::OCR_V2) {
if (ppocr_v2_ != nullptr) {
return ppocr_v2_->Initialized();
}
return false;
} else if (ppocr_version_tag_ == PPOCRVersion::OCR_V3) {
if (ppocr_v3_ != nullptr) {
return ppocr_v3_->Initialized();
}
return false;
}
return false;
}
bool PPOCRHandler::ReleaseAllocatedOCRMemories() {
if (!Initialized()) {
return false;
}
if (detector_ != nullptr) {
delete detector_;
detector_ = nullptr;
LOGD("[End] Release DBDetector in native !");
}
if (classifier_ != nullptr) {
delete classifier_;
classifier_ = nullptr;
LOGD("[End] Release Classifier in native !");
}
if (recognizer_ != nullptr) {
delete recognizer_;
recognizer_ = nullptr;
LOGD("[End] Release Recognizer in native !");
}
if (ppocr_v2_ != nullptr) {
delete ppocr_v2_;
ppocr_v2_ = nullptr;
LOGD("[End] Release PP-OCRv2 in native !");
}
if (ppocr_v3_ != nullptr) {
delete ppocr_v3_;
ppocr_v3_ = nullptr;
LOGD("[End] Release PP-OCRv3 in native !");
}
initialized_ = false;
return true;
}
/// Rendering OCRResult to ARGB888Bitmap
void RenderingOCR(JNIEnv *env, const cv::Mat &c_bgr,
const vision::OCRResult &c_result, jobject argb8888_bitmap,
bool save_image, jstring saved_path) {
if (!c_result.boxes.empty()) {
auto t = GetCurrentTime();
cv::Mat c_vis_im;
c_vis_im = vision::VisOcr(c_bgr, c_result);
LOGD("Visualize from native costs %f ms", GetElapsedTime(t));
if (!BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
LOGD("Write to bitmap from native failed!");
}
auto c_saved_image_path = ConvertTo<std::string>(env, saved_path);
if (!c_saved_image_path.empty() && save_image) {
cv::imwrite(c_saved_image_path, c_bgr);
}
}
}
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,78 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
#include "fastdeploy_jni/perf_jni.h" // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
enum PPOCRVersion { OCR_V1 = 0, OCR_V2 = 1, OCR_V3 = 2 };
/// Handle the native PP-OCR pipeline resources.
class PPOCRHandler {
public:
PPOCRHandler() = default;
PPOCRHandler(vision::ocr::DBDetector *det_model,
vision::ocr::Classifier *cls_model,
vision::ocr::Recognizer *rec_model,
pipeline::PPOCRv2 *ppocr_v2);
PPOCRHandler(vision::ocr::DBDetector *det_model,
vision::ocr::Recognizer *rec_model,
pipeline::PPOCRv2 *ppocr_v2);
PPOCRHandler(vision::ocr::DBDetector *det_model,
vision::ocr::Classifier *cls_model,
vision::ocr::Recognizer *rec_model,
pipeline::PPOCRv3 *ppocr_v3);
PPOCRHandler(vision::ocr::DBDetector *det_model,
vision::ocr::Recognizer *rec_model,
pipeline::PPOCRv3 *ppocr_v3);
void SetPPOCRVersion(PPOCRVersion version_tag);
bool Predict(cv::Mat *img, vision::OCRResult *result);
bool Initialized();
// Call init manually if you want to release the allocated
// PP-OCRv2/v3's memory by 'new' operator via 'delete'.
bool ReleaseAllocatedOCRMemories();
public:
vision::ocr::DBDetector *detector_ = nullptr;
vision::ocr::Classifier *classifier_ = nullptr;
vision::ocr::Recognizer *recognizer_ = nullptr;
pipeline::PPOCRv2 *ppocr_v2_ = nullptr;
pipeline::PPOCRv3 *ppocr_v3_ = nullptr;
private:
bool initialized_ = false;
PPOCRVersion ppocr_version_tag_ = PPOCRVersion::OCR_V2;
};
void RenderingOCR(JNIEnv *env, const cv::Mat &c_bgr,
const vision::OCRResult &c_result,
jobject argb8888_bitmap, bool save_image,
jstring saved_path);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,185 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
#include "fastdeploy_jni/pipeline/pipeline_utils_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
namespace ocr = fastdeploy::vision::ocr;
namespace pipeline = fastdeploy::pipeline;
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_pipeline_PPOCRBase_bindNative(
JNIEnv *env, jobject thiz, jint ppocr_version_tag, jstring det_model_file,
jstring det_params_file, jstring cls_model_file, jstring cls_params_file,
jstring rec_model_file, jstring rec_params_file, jstring rec_label_path,
jobject det_runtime_option, jobject cls_runtime_option,
jobject rec_runtime_option, jboolean have_cls_model) {
auto c_ocr_version_tag = static_cast<fni::PPOCRVersion>(ppocr_version_tag);
if (c_ocr_version_tag == fni::PPOCRVersion::OCR_V1) {
LOGE("Not support for PPOCRVersion::OCR_V1 now!");
return 0;
}
// TODO(qiuyanjun): Allows users to set model parameters, such as
// det_db_box_thresh, det_db_thresh, use_dilation, etc. These
// parameters should be passed in via JNI.
auto c_det_model_file = fni::ConvertTo<std::string>(env, det_model_file);
auto c_det_params_file = fni::ConvertTo<std::string>(env, det_params_file);
auto c_cls_model_file = fni::ConvertTo<std::string>(env, cls_model_file);
auto c_cls_params_file = fni::ConvertTo<std::string>(env, cls_params_file);
auto c_rec_model_file = fni::ConvertTo<std::string>(env, rec_model_file);
auto c_rec_params_file = fni::ConvertTo<std::string>(env, rec_params_file);
auto c_rec_label_path = fni::ConvertTo<std::string>(env, rec_label_path);
auto c_det_runtime_option = fni::NewCxxRuntimeOption(env, det_runtime_option);
auto c_cls_runtime_option = fni::NewCxxRuntimeOption(env, cls_runtime_option);
auto c_rec_runtime_option = fni::NewCxxRuntimeOption(env, rec_runtime_option);
auto c_have_cls_model = static_cast<bool>(have_cls_model);
// Init PP-OCR pipeline
auto c_det_model_ptr = new ocr::DBDetector(
c_det_model_file, c_det_params_file, c_det_runtime_option);
INITIALIZED_OR_RETURN(c_det_model_ptr)
auto c_rec_model_ptr = new ocr::Recognizer(
c_rec_model_file, c_rec_params_file, c_rec_label_path, c_rec_runtime_option);
INITIALIZED_OR_RETURN(c_rec_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_det_model_ptr->EnableRecordTimeOfRuntime();
c_rec_model_ptr->EnableRecordTimeOfRuntime();
#endif
// PP-OCRv2
if (c_ocr_version_tag == fni::PPOCRVersion::OCR_V2) {
if (c_have_cls_model) {
auto c_cls_model_ptr = new ocr::Classifier(
c_cls_model_file, c_cls_params_file, c_cls_runtime_option);
INITIALIZED_OR_RETURN(c_cls_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_cls_model_ptr->EnableRecordTimeOfRuntime();
#endif
auto c_ppocr_pipeline_ptr = new pipeline::PPOCRv2(
c_det_model_ptr, c_cls_model_ptr, c_rec_model_ptr);
// PP-OCRv2 handler with cls model
auto c_ppocr_handler_ptr =
new fni::PPOCRHandler(c_det_model_ptr, c_cls_model_ptr,
c_rec_model_ptr, c_ppocr_pipeline_ptr);
c_ppocr_handler_ptr->SetPPOCRVersion(c_ocr_version_tag);
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(
c_ppocr_handler_ptr); // native handler context
} else {
auto c_ppocr_pipeline_ptr =
new pipeline::PPOCRv2(c_det_model_ptr, c_rec_model_ptr);
// PP-OCRv2 handler without cls model
auto c_ppocr_handler_ptr = new fni::PPOCRHandler(
c_det_model_ptr, c_rec_model_ptr, c_ppocr_pipeline_ptr);
c_ppocr_handler_ptr->SetPPOCRVersion(c_ocr_version_tag);
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(
c_ppocr_handler_ptr); // native handler context
}
} // PP-OCRv3
else if (c_ocr_version_tag == fni::PPOCRVersion::OCR_V3) {
if (c_have_cls_model) {
auto c_cls_model_ptr = new ocr::Classifier(
c_cls_model_file, c_cls_params_file, c_cls_runtime_option);
INITIALIZED_OR_RETURN(c_cls_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_cls_model_ptr->EnableRecordTimeOfRuntime();
#endif
auto c_ppocr_pipeline_ptr = new pipeline::PPOCRv3(
c_det_model_ptr, c_cls_model_ptr, c_rec_model_ptr);
// PP-OCRv3 handler with cls model
auto c_ppocr_handler_ptr =
new fni::PPOCRHandler(c_det_model_ptr, c_cls_model_ptr,
c_rec_model_ptr, c_ppocr_pipeline_ptr);
c_ppocr_handler_ptr->SetPPOCRVersion(c_ocr_version_tag);
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(
c_ppocr_handler_ptr); // native handler context
} else {
auto c_ppocr_pipeline_ptr =
new pipeline::PPOCRv3(c_det_model_ptr, c_rec_model_ptr);
// PP-OCRv3 handler without cls model
auto c_ppocr_handler_ptr = new fni::PPOCRHandler(
c_det_model_ptr, c_rec_model_ptr, c_ppocr_pipeline_ptr);
c_ppocr_handler_ptr->SetPPOCRVersion(c_ocr_version_tag);
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(
c_ppocr_handler_ptr); // native handler context
}
}
return 0;
}
JNIEXPORT jobject JNICALL
Java_com_baidu_paddle_fastdeploy_pipeline_PPOCRBase_predictNative(
JNIEnv *env, jobject thiz, jlong cxx_context,
jobject argb8888_bitmap, jboolean save_image,
jstring save_path, jboolean rendering) {
if (cxx_context == 0) {
return NULL;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return NULL;
}
auto c_ppocr_handler_ptr = reinterpret_cast<fni::PPOCRHandler *>(cxx_context);
vision::OCRResult c_result;
c_ppocr_handler_ptr->Predict(&c_bgr, &c_result);
LOGD("OCR Result: %s", c_result.Str().c_str());
PERF_TIME_OF_RUNTIME(c_ppocr_handler_ptr->detector_, -1)
PERF_TIME_OF_RUNTIME(c_ppocr_handler_ptr->classifier_, -1)
PERF_TIME_OF_RUNTIME(c_ppocr_handler_ptr->recognizer_, -1)
if (rendering) {
fni::RenderingOCR(env, c_bgr, c_result, argb8888_bitmap,
save_image, save_path);
}
return fni::NewJavaResultFromCxx(env, reinterpret_cast<void *>(&c_result),
vision::ResultType::OCR);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_pipeline_PPOCRBase_releaseNative(
JNIEnv *env, jobject thiz, jlong cxx_context) {
if (cxx_context == 0) {
return JNI_FALSE;
}
auto c_ppocr_handler_ptr = reinterpret_cast<fni::PPOCRHandler *>(cxx_context);
if (!c_ppocr_handler_ptr->ReleaseAllocatedOCRMemories()) {
delete c_ppocr_handler_ptr;
return JNI_FALSE;
}
delete c_ppocr_handler_ptr;
LOGD("[End] Release PPOCRHandler in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -0,0 +1,91 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
fastdeploy::RuntimeOption NewCxxRuntimeOption(
JNIEnv *env, jobject j_runtime_option_obj) {
// WARN: Please make sure 'j_runtime_option_obj' param is a
// ref of Java RuntimeOption.
// Field signatures of Java RuntimeOption.
// (1) mCpuThreadNum int: I
// (2) mEnableLiteFp16 boolean: Z
// (3) mLitePowerMode LitePowerMode: com/baidu/paddle/fastdeploy/LitePowerMode
// (4) mLiteOptimizedModelDir String: java/lang/String
const jclass j_runtime_option_clazz = env->FindClass(
"com/baidu/paddle/fastdeploy/RuntimeOption");
const jfieldID j_cpu_num_thread_id = env->GetFieldID(
j_runtime_option_clazz, "mCpuThreadNum", "I");
const jfieldID j_enable_lite_fp16_id = env->GetFieldID(
j_runtime_option_clazz, "mEnableLiteFp16", "Z");
const jfieldID j_lite_power_mode_id = env->GetFieldID(
j_runtime_option_clazz, "mLitePowerMode",
"Lcom/baidu/paddle/fastdeploy/LitePowerMode;");
const jfieldID j_lite_optimized_model_dir_id = env->GetFieldID(
j_runtime_option_clazz, "mLiteOptimizedModelDir", "Ljava/lang/String;");
// mLitePowerMode is Java Enum.
const jclass j_lite_power_mode_clazz = env->FindClass(
"com/baidu/paddle/fastdeploy/LitePowerMode");
const jmethodID j_lite_power_mode_ordinal_id = env->GetMethodID(
j_lite_power_mode_clazz, "ordinal", "()I");
fastdeploy::RuntimeOption c_runtime_option;
c_runtime_option.UseCpu();
c_runtime_option.UseLiteBackend();
if (!env->IsInstanceOf(j_runtime_option_obj, j_runtime_option_clazz)) {
return c_runtime_option;
}
// Get values from Java RuntimeOption.
jint j_cpu_num_thread = env->GetIntField(
j_runtime_option_obj, j_cpu_num_thread_id);
jboolean j_enable_lite_fp16 = env->GetBooleanField(
j_runtime_option_obj, j_enable_lite_fp16_id);
jstring j_lite_optimized_model_dir = static_cast<jstring>(
env->GetObjectField(j_runtime_option_obj, j_lite_optimized_model_dir_id));
jobject j_lite_power_mode_obj = env->GetObjectField(
j_runtime_option_obj, j_lite_power_mode_id);
jint j_lite_power_mode = env->CallIntMethod(
j_lite_power_mode_obj, j_lite_power_mode_ordinal_id);
int c_cpu_num_thread = static_cast<int>(j_cpu_num_thread);
bool c_enable_lite_fp16 = static_cast<bool>(j_enable_lite_fp16);
fastdeploy::LitePowerMode c_lite_power_mode =
static_cast<fastdeploy::LitePowerMode>(j_lite_power_mode);
std::string c_lite_optimized_model_dir =
ConvertTo<std::string>(env, j_lite_optimized_model_dir);
// Setup Cxx RuntimeOption
c_runtime_option.SetCpuThreadNum(c_cpu_num_thread);
c_runtime_option.SetLitePowerMode(c_lite_power_mode);
c_runtime_option.SetLiteOptimizedModelDir(c_lite_optimized_model_dir);
if (c_enable_lite_fp16) {
c_runtime_option.EnableLiteFP16();
}
env->DeleteLocalRef(j_runtime_option_clazz);
env->DeleteLocalRef(j_lite_power_mode_clazz);
return c_runtime_option;
}
} // namespace jni
} // namespace fastdeploy

View File

@@ -13,6 +13,20 @@
// limitations under the License. // limitations under the License.
#pragma once #pragma once
#include "bitmap_jni.h" // NOLINT
#include "convert_jni.h" // NOLINT #include <jni.h> // NOLINT
#include "utils_jni.h" // NOLINT
#include <string> // NOLINT
#include <vector> // NOLINT
#include "fastdeploy/vision.h"
namespace fastdeploy {
namespace jni {
/// Create a C++ RuntimeOption from Java RuntimeOption.
fastdeploy::RuntimeOption NewCxxRuntimeOption(
JNIEnv *env, jobject j_runtime_option_obj);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,51 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/vision/classification/classification_utils_jni.h"
namespace fastdeploy {
namespace jni {
/// Rendering ClassifyResult to ARGB888Bitmap
void RenderingClassify(JNIEnv *env, const cv::Mat &c_bgr,
const vision::ClassifyResult &c_result,
jobject argb8888_bitmap, bool save_image,
float score_threshold, jstring save_path) {
if (!c_result.scores.empty()) {
auto t = GetCurrentTime();
cv::Mat c_vis_im;
if (AssetsLoader::IsClassificationLabelsLoaded()) {
c_vis_im = vision::VisClassification(
c_bgr, c_result, AssetsLoader::GetClassificationLabels(), 5,
score_threshold, 1.0f);
} else {
c_vis_im =
vision::VisClassification(c_bgr, c_result, 5, score_threshold, 1.0f);
}
LOGD("Visualize from native costs %f ms", GetElapsedTime(t));
if (!BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
LOGD("Write to bitmap from native failed!");
}
auto c_saved_image_path = ConvertTo<std::string>(env, save_path);
if (!c_saved_image_path.empty() && save_image) {
cv::imwrite(c_saved_image_path, c_bgr);
}
}
}
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,31 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
#include "fastdeploy_jni/perf_jni.h" // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
void RenderingClassify(JNIEnv *env, const cv::Mat &c_bgr,
const vision::ClassifyResult &c_result,
jobject argb8888_bitmap, bool save_image,
float score_threshold, jstring save_path);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,95 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
#include "fastdeploy_jni/vision/classification/classification_utils_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
namespace classification = fastdeploy::vision::classification;
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_classification_PaddleClasModel_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jstring config_file, jobject runtime_option, jstring label_file) {
auto c_model_file = fni::ConvertTo<std::string>(env, model_file);
auto c_params_file = fni::ConvertTo<std::string>(env, params_file);
auto c_config_file = fni::ConvertTo<std::string>(env, config_file);
auto c_label_file = fni::ConvertTo<std::string>(env, label_file);
auto c_runtime_option = fni::NewCxxRuntimeOption(env, runtime_option);
auto c_model_ptr = new classification::PaddleClasModel(
c_model_file, c_params_file, c_config_file, c_runtime_option);
INITIALIZED_OR_RETURN(c_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_model_ptr->EnableRecordTimeOfRuntime();
#endif
if (!c_label_file.empty()) {
fni::AssetsLoader::LoadClassificationLabels(c_label_file);
}
vision::EnableFlyCV();
return reinterpret_cast<jlong>(c_model_ptr); // native model context
}
JNIEXPORT jobject JNICALL
Java_com_baidu_paddle_fastdeploy_vision_classification_PaddleClasModel_predictNative(
JNIEnv *env, jobject thiz, jlong cxx_context, jobject argb8888_bitmap,
jboolean save_image, jstring save_path, jboolean rendering,
jfloat score_threshold) {
if (cxx_context == 0) {
return NULL;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return NULL;
}
auto c_model_ptr =
reinterpret_cast<classification::PaddleClasModel *>(cxx_context);
vision::ClassifyResult c_result;
auto t = fni::GetCurrentTime();
c_model_ptr->Predict(&c_bgr, &c_result);
PERF_TIME_OF_RUNTIME(c_model_ptr, t)
if (rendering) {
fni::RenderingClassify(env, c_bgr, c_result, argb8888_bitmap, save_image,
score_threshold, save_path);
}
return fni::NewJavaResultFromCxx(env, reinterpret_cast<void *>(&c_result),
vision::ResultType::CLASSIFY);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_classification_PaddleClasModel_releaseNative(
JNIEnv *env, jobject thiz, jlong cxx_context) {
auto c_model_ptr =
reinterpret_cast<classification::PaddleClasModel *>(cxx_context);
PERF_TIME_OF_RUNTIME(c_model_ptr, -1)
delete c_model_ptr;
LOGD("[End] Release PaddleClasModel in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -0,0 +1,51 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/vision/detection/detection_utils_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
/// Rendering DetectionResult to ARGB888Bitmap
void RenderingDetection(JNIEnv *env, const cv::Mat &c_bgr,
const vision::DetectionResult &c_result,
jobject argb8888_bitmap, bool save_image,
float score_threshold, jstring save_path) {
if (!c_result.boxes.empty()) {
auto t = GetCurrentTime();
cv::Mat c_vis_im;
if (AssetsLoader::IsDetectionLabelsLoaded()) {
c_vis_im = vision::VisDetection(c_bgr, c_result,
AssetsLoader::GetDetectionLabels(),
score_threshold, 2, 1.0f);
} else {
c_vis_im =
vision::VisDetection(c_bgr, c_result, score_threshold, 2, 1.0f);
}
LOGD("Visualize from native costs %f ms", GetElapsedTime(t));
if (!BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
LOGD("Write to bitmap from native failed!");
}
auto c_saved_image_path = ConvertTo<std::string>(env, save_path);
if (!c_saved_image_path.empty() && save_image) {
cv::imwrite(c_saved_image_path, c_vis_im);
}
}
}
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,31 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
#include "fastdeploy_jni/perf_jni.h" // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
void RenderingDetection(JNIEnv *env, const cv::Mat &c_bgr,
const vision::DetectionResult &c_result,
jobject argb8888_bitmap, bool save_image,
float score_threshold, jstring save_path);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,96 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
#include "fastdeploy_jni/vision/detection/detection_utils_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
namespace detection = fastdeploy::vision::detection;
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_detection_PicoDet_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jstring config_file, jobject runtime_option, jstring label_file) {
auto c_model_file = fni::ConvertTo<std::string>(env, model_file);
auto c_params_file = fni::ConvertTo<std::string>(env, params_file);
auto c_config_file = fni::ConvertTo<std::string>(env, config_file);
auto c_label_file = fni::ConvertTo<std::string>(env, label_file);
auto c_runtime_option = fni::NewCxxRuntimeOption(env, runtime_option);
auto c_model_ptr = new detection::PicoDet(
c_model_file, c_params_file, c_config_file, c_runtime_option);
INITIALIZED_OR_RETURN(c_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_model_ptr->EnableRecordTimeOfRuntime();
#endif
if (!c_label_file.empty()) {
fni::AssetsLoader::LoadDetectionLabels(c_label_file);
}
vision::EnableFlyCV();
return reinterpret_cast<jlong>(c_model_ptr);
}
JNIEXPORT jobject JNICALL
Java_com_baidu_paddle_fastdeploy_vision_detection_PicoDet_predictNative(
JNIEnv *env, jobject thiz, jlong cxx_context, jobject argb8888_bitmap,
jboolean save_image, jstring save_path, jboolean rendering,
jfloat score_threshold) {
if (cxx_context == 0) {
return NULL;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return NULL;
}
auto c_model_ptr = reinterpret_cast<detection::PicoDet *>(cxx_context);
vision::DetectionResult c_result;
auto t = fni::GetCurrentTime();
c_model_ptr->Predict(&c_bgr, &c_result);
PERF_TIME_OF_RUNTIME(c_model_ptr, t)
if (rendering) {
fni::RenderingDetection(env, c_bgr, c_result, argb8888_bitmap, save_image,
score_threshold, save_path);
}
return fni::NewJavaResultFromCxx(env, reinterpret_cast<void *>(&c_result),
vision::ResultType::DETECTION);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_detection_PicoDet_releaseNative(
JNIEnv *env, jobject thiz, jlong cxx_context) {
if (cxx_context == 0) {
return JNI_FALSE;
}
auto c_model_ptr = reinterpret_cast<detection::PicoDet *>(cxx_context);
PERF_TIME_OF_RUNTIME(c_model_ptr, -1)
delete c_model_ptr;
LOGD("[End] Release PicoDet in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -0,0 +1,43 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/vision/facedet/facedet_utils_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
/// Rendering FaceDetectionResult to ARGB888Bitmap
void RenderingFaceDetection(JNIEnv *env, const cv::Mat &c_bgr,
const vision::FaceDetectionResult &c_result,
jobject argb8888_bitmap, bool save_image,
jstring save_path) {
if (!c_result.boxes.empty()) {
auto t = GetCurrentTime();
auto c_vis_im = vision::VisFaceDetection(c_bgr, c_result, 2, 0.5f);
LOGD("Visualize from native costs %f ms", GetElapsedTime(t));
if (!BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
LOGD("Write to bitmap from native failed!");
}
auto c_saved_image_path = ConvertTo<std::string>(env, save_path);
if (!c_saved_image_path.empty() && save_image) {
cv::imwrite(c_saved_image_path, c_vis_im);
}
}
}
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,31 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
#include "fastdeploy_jni/perf_jni.h" // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
void RenderingFaceDetection(JNIEnv *env, const cv::Mat &c_bgr,
const vision::FaceDetectionResult &c_result,
jobject argb8888_bitmap, bool save_image,
jstring save_path);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,98 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
#include "fastdeploy_jni/vision/facedet/facedet_utils_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
namespace facedet = fastdeploy::vision::facedet;
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_facedet_SCRFD_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jobject runtime_option) {
auto c_model_file = fni::ConvertTo<std::string>(env, model_file);
auto c_params_file = fni::ConvertTo<std::string>(env, params_file);
auto c_runtime_option = fni::NewCxxRuntimeOption(env, runtime_option);
auto c_model_ptr = new facedet::SCRFD(
c_model_file, c_params_file, c_runtime_option, fastdeploy::ModelFormat::PADDLE);
INITIALIZED_OR_RETURN(c_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_model_ptr->EnableRecordTimeOfRuntime();
#endif
// Setup input size, such as (320, 320), H x W
const jclass j_scrfd_clazz = env->GetObjectClass(thiz);
const jfieldID j_scrfd_size_id = env->GetFieldID(
j_scrfd_clazz, "mSize", "[I");
jintArray j_scrfd_size = reinterpret_cast<jintArray>(
env->GetObjectField(thiz, j_scrfd_size_id));
const auto c_size = fni::ConvertTo<std::vector<int>>(env, j_scrfd_size);
c_model_ptr->size = c_size; // e.g (320, 320)
env->DeleteLocalRef(j_scrfd_clazz); // release local Refs
vision::EnableFlyCV();
return reinterpret_cast<jlong>(c_model_ptr);
}
JNIEXPORT jobject JNICALL
Java_com_baidu_paddle_fastdeploy_vision_facedet_SCRFD_predictNative(
JNIEnv *env, jobject thiz, jlong cxx_context,
jobject argb8888_bitmap, jboolean save_image,
jstring save_path, jboolean rendering) {
if (cxx_context == 0) {
return NULL;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return NULL;
}
auto c_model_ptr = reinterpret_cast<facedet::SCRFD *>(cxx_context);
vision::FaceDetectionResult c_result;
auto t = fni::GetCurrentTime();
c_model_ptr->Predict(&c_bgr, &c_result);
PERF_TIME_OF_RUNTIME(c_model_ptr, t)
if (rendering) {
fni::RenderingFaceDetection(env, c_bgr, c_result, argb8888_bitmap,
save_image, save_path);
}
return fni::NewJavaResultFromCxx(env, reinterpret_cast<void *>(&c_result),
vision::ResultType::FACE_DETECTION);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_facedet_SCRFD_releaseNative(
JNIEnv *env, jobject thiz, jlong cxx_context) {
if (cxx_context == 0) {
return JNI_FALSE;
}
auto c_model_ptr = reinterpret_cast<facedet::SCRFD *>(cxx_context);
PERF_TIME_OF_RUNTIME(c_model_ptr, -1)
delete c_model_ptr;
LOGD("[End] Release SCRFD in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -0,0 +1,98 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
#include "fastdeploy_jni/vision/facedet/facedet_utils_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
namespace facedet = fastdeploy::vision::facedet;
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_facedet_YOLOv5Face_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jobject runtime_option) {
auto c_model_file = fni::ConvertTo<std::string>(env, model_file);
auto c_params_file = fni::ConvertTo<std::string>(env, params_file);
auto c_runtime_option = fni::NewCxxRuntimeOption(env, runtime_option);
auto c_model_ptr = new facedet::YOLOv5Face(
c_model_file, c_params_file, c_runtime_option, fastdeploy::ModelFormat::PADDLE);
INITIALIZED_OR_RETURN(c_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_model_ptr->EnableRecordTimeOfRuntime();
#endif
// Setup input size, such as (320, 320), H x W
const jclass j_yolov5face_clazz = env->GetObjectClass(thiz);
const jfieldID j_yolov5face_size_id = env->GetFieldID(
j_yolov5face_clazz, "mSize", "[I");
jintArray j_yolov5face_size = reinterpret_cast<jintArray>(
env->GetObjectField(thiz, j_yolov5face_size_id));
const auto c_size = fni::ConvertTo<std::vector<int>>(env, j_yolov5face_size);
c_model_ptr->size = c_size; // e.g (320, 320)
env->DeleteLocalRef(j_yolov5face_clazz); // release local Refs
vision::EnableFlyCV();
return reinterpret_cast<jlong>(c_model_ptr);
}
JNIEXPORT jobject JNICALL
Java_com_baidu_paddle_fastdeploy_vision_facedet_YOLOv5Face_predictNative(
JNIEnv *env, jobject thiz, jlong cxx_context,
jobject argb8888_bitmap, jboolean save_image,
jstring save_path, jboolean rendering) {
if (cxx_context == 0) {
return NULL;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return NULL;
}
auto c_model_ptr = reinterpret_cast<facedet::YOLOv5Face *>(cxx_context);
vision::FaceDetectionResult c_result;
auto t = fni::GetCurrentTime();
c_model_ptr->Predict(&c_bgr, &c_result);
PERF_TIME_OF_RUNTIME(c_model_ptr, t)
if (rendering) {
fni::RenderingFaceDetection(env, c_bgr, c_result, argb8888_bitmap,
save_image, save_path);
}
return fni::NewJavaResultFromCxx(env, reinterpret_cast<void *>(&c_result),
vision::ResultType::FACE_DETECTION);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_facedet_YOLOv5Face_releaseNative(
JNIEnv *env, jobject thiz, jlong cxx_context) {
if (cxx_context == 0) {
return JNI_FALSE;
}
auto c_model_ptr = reinterpret_cast<facedet::YOLOv5Face *>(cxx_context);
PERF_TIME_OF_RUNTIME(c_model_ptr, -1)
delete c_model_ptr;
LOGD("[End] Release YOLOv5Face in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,35 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
namespace fastdeploy {
namespace jni {
bool AllocateJavaResultFromCxx(
JNIEnv *env, jobject j_result_obj, void *cxx_result,
vision::ResultType type);
bool AllocateCxxResultFromJava(
JNIEnv *env, jobject j_result_obj, void *cxx_result,
vision::ResultType type);
jobject NewJavaResultFromCxx(
JNIEnv *env, void *cxx_result, vision::ResultType type);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,90 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
#include "fastdeploy_jni/vision/segmentation/segmentation_utils_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
namespace segmentation = fastdeploy::vision::segmentation;
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_segmentation_PaddleSegModel_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jstring config_file, jobject runtime_option) {
auto c_model_file = fni::ConvertTo<std::string>(env, model_file);
auto c_params_file = fni::ConvertTo<std::string>(env, params_file);
auto c_config_file = fni::ConvertTo<std::string>(env, config_file);
auto c_runtime_option = fni::NewCxxRuntimeOption(env, runtime_option);
auto c_model_ptr = new segmentation::PaddleSegModel(
c_model_file, c_params_file, c_config_file, c_runtime_option);
INITIALIZED_OR_RETURN(c_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_model_ptr->EnableRecordTimeOfRuntime();
#endif
vision::EnableFlyCV();
return reinterpret_cast<jlong>(c_model_ptr);
}
JNIEXPORT jobject JNICALL
Java_com_baidu_paddle_fastdeploy_vision_segmentation_PaddleSegModel_predictNative(
JNIEnv *env, jobject thiz, jlong cxx_context, jobject argb8888_bitmap,
jboolean save_image, jstring save_path, jboolean rendering, jfloat weight) {
if (cxx_context == 0) {
return NULL;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return NULL;
}
auto c_model_ptr = reinterpret_cast<segmentation::PaddleSegModel *>(cxx_context);
vision::SegmentationResult c_result;
auto t = fni::GetCurrentTime();
c_model_ptr->Predict(&c_bgr, &c_result);
PERF_TIME_OF_RUNTIME(c_model_ptr, t)
if (rendering) {
fni::RenderingSegmentation(env, c_bgr, c_result, argb8888_bitmap,
save_image, weight, save_path);
}
return fni::NewJavaResultFromCxx(env, reinterpret_cast<void *>(&c_result),
vision::ResultType::SEGMENTATION);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_segmentation_PaddleSegModel_releaseNative(
JNIEnv *env, jobject thiz, jlong cxx_context) {
if (cxx_context == 0) {
return JNI_FALSE;
}
auto c_model_ptr = reinterpret_cast<segmentation::PaddleSegModel *>(cxx_context);
PERF_TIME_OF_RUNTIME(c_model_ptr, -1)
delete c_model_ptr;
LOGD("[End] Release PaddleSegModel in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -0,0 +1,42 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/vision/segmentation/segmentation_utils_jni.h"
namespace fastdeploy {
namespace jni {
/// Rendering SegmentationResult to ARGB888Bitmap
void RenderingSegmentation(JNIEnv *env, const cv::Mat &c_bgr,
const vision::SegmentationResult &c_result,
jobject argb8888_bitmap, bool save_image,
float weight, jstring save_path) {
if (!c_result.label_map.empty()) {
auto t = GetCurrentTime();
auto c_vis_im = vision::VisSegmentation(c_bgr, c_result, weight);
LOGD("Visualize from native costs %f ms", GetElapsedTime(t));
if (!BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
LOGD("Write to bitmap from native failed!");
}
auto c_saved_image_path = ConvertTo<std::string>(env, save_path);
if (!c_saved_image_path.empty() && save_image) {
cv::imwrite(c_saved_image_path, c_vis_im);
}
}
}
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,33 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
#include "fastdeploy_jni/perf_jni.h" // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
/// Rendering SegmentationResult to ARGB888Bitmap
void RenderingSegmentation(JNIEnv *env, const cv::Mat &c_bgr,
const vision::SegmentationResult &c_result,
jobject argb8888_bitmap, bool save_image,
float weight, jstring save_path);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,164 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
#ifdef __cplusplus
extern "C" {
#endif
/// VisClassification
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visClassificationNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap,
jobject result, jfloat score_threshold, jfloat font_size,
jobjectArray labels) {
vision::ClassifyResult c_result;
if (!fni::AllocateCxxResultFromJava(
env, result, reinterpret_cast<void *>(&c_result),
vision::ResultType::CLASSIFY)) {
return JNI_FALSE;
}
// Get labels from Java [n]
auto c_labels = fni::ConvertTo<std::vector<std::string>>(env, labels);
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
cv::Mat c_vis_im;
if (!c_labels.empty()) {
c_vis_im = vision::VisClassification(c_bgr, c_result, c_labels, 5,
score_threshold, font_size);
} else {
c_vis_im = vision::VisClassification(c_bgr, c_result, 5, score_threshold,
font_size);
}
if (!fni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
/// VisDetection
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visDetectionNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap,
jobject result, jfloat score_threshold, jint line_size,
jfloat font_size, jobjectArray labels) {
vision::DetectionResult c_result;
if (!fni::AllocateCxxResultFromJava(
env, result, reinterpret_cast<void *>(&c_result),
vision::ResultType::DETECTION)) {
return JNI_FALSE;
}
// Get labels from Java [n]
auto c_labels = fni::ConvertTo<std::vector<std::string>>(env, labels);
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
cv::Mat c_vis_im;
if (!c_labels.empty()) {
c_vis_im = vision::VisDetection(c_bgr, c_result, c_labels, score_threshold,
line_size, font_size);
} else {
c_vis_im = vision::VisDetection(c_bgr, c_result, score_threshold, line_size,
font_size);
}
if (!fni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
/// VisOcr
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visOcrNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap,
jobject result) {
vision::OCRResult c_result;
if (!fni::AllocateCxxResultFromJava(
env, result, reinterpret_cast<void *>(&c_result),
vision::ResultType::OCR)) {
return JNI_FALSE;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
auto c_vis_im = vision::VisOcr(c_bgr, c_result);
if (!fni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visSegmentationNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap,
jobject result, jfloat weight) {
vision::SegmentationResult c_result;
if (!fni::AllocateCxxResultFromJava(
env, result, reinterpret_cast<void *>(&c_result),
vision::ResultType::SEGMENTATION)) {
return JNI_FALSE;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
auto c_vis_im = vision::VisSegmentation(c_bgr, c_result, weight);
if (!fni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visFaceDetectionNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap,
jobject result, jint line_size, jfloat font_size) {
vision::FaceDetectionResult c_result;
if (!fni::AllocateCxxResultFromJava(
env, result, reinterpret_cast<void *>(&c_result),
vision::ResultType::FACE_DETECTION)) {
return JNI_FALSE;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
auto c_vis_im = vision::VisFaceDetection(c_bgr, c_result, line_size, font_size);
if (!fni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -1,418 +0,0 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
namespace pipeline {
enum PPOCRVersion {
OCR_V1 = 0,
OCR_V2 = 1,
OCR_V3 = 2
};
/// Handle the native PP-OCR pipeline resources.
class PPOCRHandler {
public:
PPOCRHandler() = default;
PPOCRHandler(fastdeploy::vision::ocr::DBDetector *det_model,
fastdeploy::vision::ocr::Classifier *cls_model,
fastdeploy::vision::ocr::Recognizer *rec_model,
fastdeploy::pipeline::PPOCRv2 *ppocr_v2) :
detector_(det_model), classifier_(cls_model),
recognizer_(rec_model), ppocr_v2_(ppocr_v2) {
if (detector_ != nullptr && classifier_ != nullptr
&& recognizer_ != nullptr && ppocr_v2_ != nullptr) {
initialized_ = true;
}
}
PPOCRHandler(fastdeploy::vision::ocr::DBDetector *det_model,
fastdeploy::vision::ocr::Recognizer *rec_model,
fastdeploy::pipeline::PPOCRv2 *ppocr_v2) :
detector_(det_model), recognizer_(rec_model),
ppocr_v2_(ppocr_v2) {
if (detector_ != nullptr && recognizer_ != nullptr
&& ppocr_v2_ != nullptr) {
initialized_ = true;
}
}
PPOCRHandler(fastdeploy::vision::ocr::DBDetector *det_model,
fastdeploy::vision::ocr::Classifier *cls_model,
fastdeploy::vision::ocr::Recognizer *rec_model,
fastdeploy::pipeline::PPOCRv3 *ppocr_v3) :
detector_(det_model), classifier_(cls_model),
recognizer_(rec_model), ppocr_v3_(ppocr_v3) {
if (detector_ != nullptr && classifier_ != nullptr
&& recognizer_ != nullptr && ppocr_v3_ != nullptr) {
initialized_ = true;
}
}
PPOCRHandler(fastdeploy::vision::ocr::DBDetector *det_model,
fastdeploy::vision::ocr::Recognizer *rec_model,
fastdeploy::pipeline::PPOCRv3 *ppocr_v3) :
detector_(det_model), recognizer_(rec_model),
ppocr_v3_(ppocr_v3) {
if (detector_ != nullptr && recognizer_ != nullptr
&& ppocr_v3_ != nullptr) {
initialized_ = true;
}
}
void SetPPOCRVersion(PPOCRVersion version_tag) {
ppocr_version_tag_ = version_tag;
}
bool Predict(cv::Mat* img, fastdeploy::vision::OCRResult* result) {
if (ppocr_version_tag_ == PPOCRVersion::OCR_V2) {
if (ppocr_v2_ != nullptr) {
return ppocr_v2_->Predict(img, result);
}
return false;
} else if (ppocr_version_tag_ == PPOCRVersion::OCR_V3) {
if (ppocr_v3_ != nullptr) {
return ppocr_v3_->Predict(img, result);
}
return false;
}
return false;
}
bool Initialized() {
if (!initialized_) {
return false;
}
if (ppocr_version_tag_ == PPOCRVersion::OCR_V2) {
if (ppocr_v2_ != nullptr) {
return ppocr_v2_->Initialized();
}
return false;
} else if (ppocr_version_tag_ == PPOCRVersion::OCR_V3) {
if (ppocr_v3_ != nullptr) {
return ppocr_v3_->Initialized();
}
return false;
}
return false;
}
// Call init manually if you want to release the allocated
// PP-OCRv2/v3's memory by 'new' operator via 'delete'.
bool ReleaseAllocatedOCRMemories() {
if (!Initialized()) {
return false;
}
if (detector_ != nullptr) {
delete detector_;
detector_ = nullptr;
LOGD("[End] Release DBDetector in native !");
}
if (classifier_ != nullptr) {
delete classifier_;
classifier_ = nullptr;
LOGD("[End] Release Classifier in native !");
}
if (recognizer_ != nullptr) {
delete recognizer_;
recognizer_ = nullptr;
LOGD("[End] Release Recognizer in native !");
}
if (ppocr_v2_ != nullptr) {
delete ppocr_v2_;
ppocr_v2_ = nullptr;
LOGD("[End] Release PP-OCRv2 in native !");
}
if (ppocr_v3_ != nullptr) {
delete ppocr_v3_;
ppocr_v3_ = nullptr;
LOGD("[End] Release PP-OCRv3 in native !");
}
initialized_ = false;
return true;
}
void PrintPPOCRHandlerTimeOfRuntime() const {
if ((detector_ != nullptr) && (detector_->EnabledRecordTimeOfRuntime())) {
auto det_info_of_runtime = detector_->PrintStatisInfoOfRuntime();
LOGD("[Det] Avg runtime costs %f ms", det_info_of_runtime["avg_time"] * 1000.0f);
}
if ((classifier_ != nullptr) && (classifier_->EnabledRecordTimeOfRuntime())) {
auto cls_info_of_runtime = classifier_->PrintStatisInfoOfRuntime();
LOGD("[Cls] Avg runtime costs %f ms", cls_info_of_runtime["avg_time"] * 1000.0f);
}
if ((recognizer_ != nullptr) && (recognizer_->EnabledRecordTimeOfRuntime())) {
auto rec_info_of_runtime = recognizer_->PrintStatisInfoOfRuntime();
LOGD("[Rec] Avg runtime costs %f ms", rec_info_of_runtime["avg_time"] * 1000.0f);
}
}
public:
fastdeploy::vision::ocr::DBDetector *detector_ = nullptr;
fastdeploy::vision::ocr::Classifier *classifier_ = nullptr;
fastdeploy::vision::ocr::Recognizer *recognizer_ = nullptr;
fastdeploy::pipeline::PPOCRv2 *ppocr_v2_ = nullptr;
fastdeploy::pipeline::PPOCRv3 *ppocr_v3_ = nullptr;
private:
bool initialized_ = false;
PPOCRVersion ppocr_version_tag_ = PPOCRVersion::OCR_V2;
};
} // namespace pipeline
} // namespace jni
} // namespace fastdeploy
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_pipeline_PPOCRBase_bindNative(
JNIEnv *env,
jobject thiz,
jint ocr_version_tag,
jstring det_model_file,
jstring det_params_file,
jstring cls_model_file,
jstring cls_params_file,
jstring rec_model_file,
jstring rec_params_file,
jstring rec_label_path,
jint det_cpu_num_thread,
jint cls_cpu_num_thread,
jint rec_cpu_num_thread,
jboolean det_enable_lite_fp16,
jboolean cls_enable_lite_fp16,
jboolean rec_enable_lite_fp16,
jint det_lite_power_mode,
jint cls_lite_power_mode,
jint rec_lite_power_mode,
jstring det_lite_optimized_model_dir,
jstring cls_lite_optimized_model_dir,
jstring rec_lite_optimized_model_dir,
jboolean det_enable_record_time_of_runtime,
jboolean cls_enable_record_time_of_runtime,
jboolean rec_enable_record_time_of_runtime,
jboolean have_cls_model) {
auto c_ocr_version_tag = static_cast<
fastdeploy::jni::pipeline::PPOCRVersion>(ocr_version_tag);
if (c_ocr_version_tag == fastdeploy::jni::pipeline::PPOCRVersion::OCR_V1) {
LOGE("Not support for PPOCRVersion::OCR_V1 now!");
return 0;
}
// TODO(qiuyanjun): Allows users to set model parameters, such as det_db_box_thresh,
// det_db_thresh, use_dilation, etc. These parameters should be passed in via JNI.
std::string c_det_model_file = fastdeploy::jni::ConvertTo<std::string>(env, det_model_file);
std::string c_det_params_file = fastdeploy::jni::ConvertTo<std::string>(env, det_params_file);
std::string c_cls_model_file = fastdeploy::jni::ConvertTo<std::string>(env, cls_model_file);
std::string c_cls_params_file = fastdeploy::jni::ConvertTo<std::string>(env, cls_params_file);
std::string c_rec_model_file = fastdeploy::jni::ConvertTo<std::string>(env, rec_model_file);
std::string c_rec_params_file = fastdeploy::jni::ConvertTo<std::string>(env, rec_params_file);
std::string c_rec_label_path = fastdeploy::jni::ConvertTo<std::string>(env, rec_label_path);
auto c_det_cpu_num_thread = static_cast<int>(det_cpu_num_thread);
auto c_cls_cpu_num_thread = static_cast<int>(cls_cpu_num_thread);
auto c_rec_cpu_num_thread = static_cast<int>(rec_cpu_num_thread);
auto c_det_enable_lite_fp16 = static_cast<bool>(det_enable_lite_fp16);
auto c_cls_enable_lite_fp16 = static_cast<bool>(cls_enable_lite_fp16);
auto c_rec_enable_lite_fp16 = static_cast<bool>(rec_enable_lite_fp16);
auto c_det_lite_power_mode = static_cast<fastdeploy::LitePowerMode>(det_lite_power_mode);
auto c_cls_lite_power_mode = static_cast<fastdeploy::LitePowerMode>(cls_lite_power_mode);
auto c_rec_lite_power_mode = static_cast<fastdeploy::LitePowerMode>(rec_lite_power_mode);
std::string c_det_lite_optimized_model_dir = fastdeploy::jni::ConvertTo<std::string>(
env, det_lite_optimized_model_dir);
std::string c_cls_lite_optimized_model_dir = fastdeploy::jni::ConvertTo<std::string>(
env, cls_lite_optimized_model_dir);
std::string c_rec_lite_optimized_model_dir = fastdeploy::jni::ConvertTo<std::string>(
env, rec_lite_optimized_model_dir);
auto c_det_enable_record_time_of_runtime = static_cast<bool>(det_enable_record_time_of_runtime);
auto c_cls_enable_record_time_of_runtime = static_cast<bool>(cls_enable_record_time_of_runtime);
auto c_rec_enable_record_time_of_runtime = static_cast<bool>(rec_enable_record_time_of_runtime);
auto c_have_cls_model = static_cast<bool>(have_cls_model);
// RuntimeOptions in native
fastdeploy::RuntimeOption c_det_option;
c_det_option.UseCpu();
c_det_option.UseLiteBackend();
c_det_option.SetCpuThreadNum(c_det_cpu_num_thread);
c_det_option.SetLitePowerMode(c_det_lite_power_mode);
c_det_option.SetLiteOptimizedModelDir(c_det_lite_optimized_model_dir);
if (c_det_enable_lite_fp16) {
c_det_option.EnableLiteFP16();
}
fastdeploy::RuntimeOption c_cls_option;
c_cls_option.UseCpu();
c_cls_option.UseLiteBackend();
c_cls_option.SetCpuThreadNum(c_cls_cpu_num_thread);
c_cls_option.SetLitePowerMode(c_cls_lite_power_mode);
c_cls_option.SetLiteOptimizedModelDir(c_cls_lite_optimized_model_dir);
if (c_cls_enable_lite_fp16) {
c_cls_option.EnableLiteFP16();
}
fastdeploy::RuntimeOption c_rec_option;
c_rec_option.UseCpu();
c_rec_option.UseLiteBackend();
c_rec_option.SetCpuThreadNum(c_rec_cpu_num_thread);
c_rec_option.SetLitePowerMode(c_rec_lite_power_mode);
c_rec_option.SetLiteOptimizedModelDir(c_rec_lite_optimized_model_dir);
if (c_rec_enable_lite_fp16) {
c_rec_option.EnableLiteFP16();
}
// Init PP-OCR pipeline
auto c_det_model_ptr = new fastdeploy::vision::ocr::DBDetector(
c_det_model_file, c_det_params_file, c_det_option);
auto c_rec_model_ptr = new fastdeploy::vision::ocr::Recognizer(
c_rec_model_file, c_rec_params_file, c_rec_label_path, c_rec_option);
// Enable record Runtime time costs.
if (c_det_enable_record_time_of_runtime) {
c_det_model_ptr->EnableRecordTimeOfRuntime();
}
if (c_rec_enable_record_time_of_runtime) {
c_rec_model_ptr->EnableRecordTimeOfRuntime();
}
// PP-OCRv2
if (c_ocr_version_tag == fastdeploy::jni::pipeline::PPOCRVersion::OCR_V2) {
if (c_have_cls_model) {
auto c_cls_model_ptr = new fastdeploy::vision::ocr::Classifier(
c_cls_model_file, c_cls_params_file, c_cls_option);
if (c_cls_enable_record_time_of_runtime) {
c_cls_model_ptr->EnableRecordTimeOfRuntime();
}
auto c_ppocr_pipeline_ptr = new fastdeploy::pipeline::PPOCRv2(
c_det_model_ptr, c_cls_model_ptr, c_rec_model_ptr);
// PP-OCRv2 handler with cls model
auto c_ppocr_handler_ptr = new fastdeploy::jni::pipeline::PPOCRHandler(
c_det_model_ptr, c_cls_model_ptr, c_rec_model_ptr, c_ppocr_pipeline_ptr);
c_ppocr_handler_ptr->SetPPOCRVersion(c_ocr_version_tag);
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(c_ppocr_handler_ptr); // native handler context
} else {
auto c_ppocr_pipeline_ptr = new fastdeploy::pipeline::PPOCRv2(
c_det_model_ptr, c_rec_model_ptr);
// PP-OCRv2 handler without cls model
auto c_ppocr_handler_ptr = new fastdeploy::jni::pipeline::PPOCRHandler(
c_det_model_ptr, c_rec_model_ptr, c_ppocr_pipeline_ptr);
c_ppocr_handler_ptr->SetPPOCRVersion(c_ocr_version_tag);
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(c_ppocr_handler_ptr); // native handler context
}
} // PP-OCRv3
else if (c_ocr_version_tag == fastdeploy::jni::pipeline::PPOCRVersion::OCR_V3) {
if (c_have_cls_model) {
auto c_cls_model_ptr = new fastdeploy::vision::ocr::Classifier(
c_cls_model_file, c_cls_params_file, c_cls_option);
if (c_cls_enable_record_time_of_runtime) {
c_cls_model_ptr->EnableRecordTimeOfRuntime();
}
auto c_ppocr_pipeline_ptr = new fastdeploy::pipeline::PPOCRv3(
c_det_model_ptr, c_cls_model_ptr, c_rec_model_ptr);
// PP-OCRv3 handler with cls model
auto c_ppocr_handler_ptr = new fastdeploy::jni::pipeline::PPOCRHandler(
c_det_model_ptr, c_cls_model_ptr, c_rec_model_ptr, c_ppocr_pipeline_ptr);
c_ppocr_handler_ptr->SetPPOCRVersion(c_ocr_version_tag);
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(c_ppocr_handler_ptr); // native handler context
} else {
auto c_ppocr_pipeline_ptr = new fastdeploy::pipeline::PPOCRv3(
c_det_model_ptr, c_rec_model_ptr);
// PP-OCRv3 handler without cls model
auto c_ppocr_handler_ptr = new fastdeploy::jni::pipeline::PPOCRHandler(
c_det_model_ptr, c_rec_model_ptr, c_ppocr_pipeline_ptr);
c_ppocr_handler_ptr->SetPPOCRVersion(c_ocr_version_tag);
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(c_ppocr_handler_ptr); // native handler context
}
}
return 0;
}
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_pipeline_PPOCRBase_predictNative(
JNIEnv *env, jobject thiz, jlong native_handler_context,
jobject argb8888_bitmap, jboolean saved, jstring saved_image_path,
jboolean rendering) {
if (native_handler_context == 0) {
return 0;
}
cv::Mat c_bgr;
auto t = fastdeploy::jni::GetCurrentTime();
if (!fastdeploy::jni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return 0;
}
LOGD("Read from bitmap costs %f ms", fastdeploy::jni::GetElapsedTime(t));
auto c_ppocr_handler_ptr = reinterpret_cast<fastdeploy::jni::pipeline::PPOCRHandler*>(
native_handler_context);
auto c_result_ptr = new fastdeploy::vision::OCRResult();
t = fastdeploy::jni::GetCurrentTime();
if (!c_ppocr_handler_ptr->Predict(&c_bgr, c_result_ptr)) {
delete c_result_ptr;
return 0;
}
LOGD("Predict from native costs %f ms", fastdeploy::jni::GetElapsedTime(t));
// DEBUG: show result
LOGD("Result: %s", c_result_ptr->Str().c_str());
c_ppocr_handler_ptr->PrintPPOCRHandlerTimeOfRuntime();
if (!c_result_ptr->boxes.empty() && rendering) {
t = fastdeploy::jni::GetCurrentTime();
auto c_vis_im = fastdeploy::vision::VisOcr(c_bgr, *(c_result_ptr));
LOGD("Visualize from native costs %f ms", fastdeploy::jni::GetElapsedTime(t));
// Rendering to bitmap
t = fastdeploy::jni::GetCurrentTime();
if (!fastdeploy::jni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
delete c_result_ptr;
return 0;
}
LOGD("Write to bitmap from native costs %f ms",
fastdeploy::jni::GetElapsedTime(t));
std::string c_saved_image_path =
fastdeploy::jni::ConvertTo<std::string>(env, saved_image_path);
if (!c_saved_image_path.empty() && saved) {
t = fastdeploy::jni::GetCurrentTime();
cv::imwrite(c_saved_image_path, c_vis_im);
LOGD("Save image from native costs %f ms, path: %s",
fastdeploy::jni::GetElapsedTime(t), c_saved_image_path.c_str());
}
}
// WARN: need to release it manually in Java !
return reinterpret_cast<jlong>(c_result_ptr); // native result context
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_pipeline_PPOCRBase_releaseNative(
JNIEnv *env, jobject thiz, jlong native_handler_context) {
if (native_handler_context == 0) {
return JNI_FALSE;
}
auto c_ppocr_handler_ptr = reinterpret_cast<fastdeploy::jni::pipeline::PPOCRHandler*>(
native_handler_context);
if (!c_ppocr_handler_ptr->ReleaseAllocatedOCRMemories()) {
delete c_ppocr_handler_ptr;
return JNI_FALSE;
}
delete c_ppocr_handler_ptr;
LOGD("[End] Release PPOCRHandler in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -1,148 +0,0 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni.h" // NOLINT
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_classification_PaddleClasModel_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jstring config_file, jint cpu_num_thread, jboolean enable_lite_fp16,
jint lite_power_mode, jstring lite_optimized_model_dir,
jboolean enable_record_time_of_runtime, jstring label_file) {
std::string c_model_file =
fastdeploy::jni::ConvertTo<std::string>(env, model_file);
std::string c_params_file =
fastdeploy::jni::ConvertTo<std::string>(env, params_file);
std::string c_config_file =
fastdeploy::jni::ConvertTo<std::string>(env, config_file);
std::string c_label_file =
fastdeploy::jni::ConvertTo<std::string>(env, label_file);
std::string c_lite_optimized_model_dir =
fastdeploy::jni::ConvertTo<std::string>(env, lite_optimized_model_dir);
auto c_cpu_num_thread = static_cast<int>(cpu_num_thread);
auto c_enable_lite_fp16 = static_cast<bool>(enable_lite_fp16);
auto c_lite_power_mode =
static_cast<fastdeploy::LitePowerMode>(lite_power_mode);
fastdeploy::RuntimeOption c_option;
c_option.UseCpu();
c_option.UseLiteBackend();
c_option.SetCpuThreadNum(c_cpu_num_thread);
c_option.SetLitePowerMode(c_lite_power_mode);
c_option.SetLiteOptimizedModelDir(c_lite_optimized_model_dir);
if (c_enable_lite_fp16) {
c_option.EnableLiteFP16();
}
auto c_model_ptr = new fastdeploy::vision::classification::PaddleClasModel(
c_model_file, c_params_file, c_config_file, c_option);
// Enable record Runtime time costs.
if (enable_record_time_of_runtime) {
c_model_ptr->EnableRecordTimeOfRuntime();
}
// Load classification labels if label path is not empty.
if ((!fastdeploy::jni::AssetsLoaderUtils::IsClassificationLabelsLoaded()) &&
(!c_label_file.empty())) {
fastdeploy::jni::AssetsLoaderUtils::LoadClassificationLabels(c_label_file);
}
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(c_model_ptr); // native model context
}
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_classification_PaddleClasModel_predictNative(
JNIEnv *env, jobject thiz, jlong native_model_context,
jobject argb8888_bitmap, jboolean saved, jstring saved_image_path,
jfloat score_threshold, jboolean rendering) {
if (native_model_context == 0) {
return 0;
}
cv::Mat c_bgr;
auto t = fastdeploy::jni::GetCurrentTime();
if (!fastdeploy::jni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return 0;
}
LOGD("Read from bitmap costs %f ms", fastdeploy::jni::GetElapsedTime(t));
auto c_model_ptr =
reinterpret_cast<fastdeploy::vision::classification::PaddleClasModel *>(
native_model_context);
auto c_result_ptr = new fastdeploy::vision::ClassifyResult();
t = fastdeploy::jni::GetCurrentTime();
if (!c_model_ptr->Predict(&c_bgr, c_result_ptr, 100)) {
delete c_result_ptr;
return 0;
}
LOGD("Predict from native costs %f ms", fastdeploy::jni::GetElapsedTime(t));
if (c_model_ptr->EnabledRecordTimeOfRuntime()) {
auto info_of_runtime = c_model_ptr->PrintStatisInfoOfRuntime();
LOGD("Avg runtime costs %f ms", info_of_runtime["avg_time"] * 1000.0f);
}
if (!c_result_ptr->scores.empty() && rendering) {
t = fastdeploy::jni::GetCurrentTime();
cv::Mat c_vis_im;
if (fastdeploy::jni::AssetsLoaderUtils::IsClassificationLabelsLoaded()) {
c_vis_im = fastdeploy::vision::VisClassification(
c_bgr, *(c_result_ptr),
fastdeploy::jni::AssetsLoaderUtils::GetClassificationLabels(), 5,
score_threshold, 1.0f);
} else {
c_vis_im = fastdeploy::vision::VisClassification(
c_bgr, *(c_result_ptr), 5, score_threshold, 1.0f);
}
LOGD("Visualize from native costs %f ms",
fastdeploy::jni::GetElapsedTime(t));
// Rendering to bitmap
t = fastdeploy::jni::GetCurrentTime();
if (!fastdeploy::jni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
delete c_result_ptr;
return 0;
}
LOGD("Write to bitmap from native costs %f ms",
fastdeploy::jni::GetElapsedTime(t));
std::string c_saved_image_path =
fastdeploy::jni::ConvertTo<std::string>(env, saved_image_path);
if (!c_saved_image_path.empty() && saved) {
t = fastdeploy::jni::GetCurrentTime();
cv::imwrite(c_saved_image_path, c_bgr);
LOGD("Save image from native costs %f ms, path: %s",
fastdeploy::jni::GetElapsedTime(t), c_saved_image_path.c_str());
}
}
// WARN: need to release it manually in Java !
return reinterpret_cast<jlong>(c_result_ptr); // native result context
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_classification_PaddleClasModel_releaseNative(
JNIEnv *env, jobject thiz, jlong native_model_context) {
auto c_model_ptr =
reinterpret_cast<fastdeploy::vision::classification::PaddleClasModel *>(
native_model_context);
if (c_model_ptr->EnabledRecordTimeOfRuntime()) {
auto info_of_runtime = c_model_ptr->PrintStatisInfoOfRuntime();
LOGD("[End] Avg runtime costs %f ms",
info_of_runtime["avg_time"] * 1000.0f);
}
delete c_model_ptr;
LOGD("[End] Release PaddleClasModel in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -1,149 +0,0 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni.h" // NOLINT
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_detection_PicoDet_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jstring config_file, jint cpu_num_thread, jboolean enable_lite_fp16,
jint lite_power_mode, jstring lite_optimized_model_dir,
jboolean enable_record_time_of_runtime, jstring label_file) {
std::string c_model_file =
fastdeploy::jni::ConvertTo<std::string>(env, model_file);
std::string c_params_file =
fastdeploy::jni::ConvertTo<std::string>(env, params_file);
std::string c_config_file =
fastdeploy::jni::ConvertTo<std::string>(env, config_file);
std::string c_label_file =
fastdeploy::jni::ConvertTo<std::string>(env, label_file);
std::string c_lite_optimized_model_dir =
fastdeploy::jni::ConvertTo<std::string>(env, lite_optimized_model_dir);
auto c_cpu_num_thread = static_cast<int>(cpu_num_thread);
auto c_enable_lite_fp16 = static_cast<bool>(enable_lite_fp16);
auto c_lite_power_mode =
static_cast<fastdeploy::LitePowerMode>(lite_power_mode);
fastdeploy::RuntimeOption c_option;
c_option.UseCpu();
c_option.UseLiteBackend();
c_option.SetCpuThreadNum(c_cpu_num_thread);
c_option.SetLitePowerMode(c_lite_power_mode);
c_option.SetLiteOptimizedModelDir(c_lite_optimized_model_dir);
if (c_enable_lite_fp16) {
c_option.EnableLiteFP16();
}
auto c_model_ptr = new fastdeploy::vision::detection::PicoDet(
c_model_file, c_params_file, c_config_file, c_option);
// Enable record Runtime time costs.
if (enable_record_time_of_runtime) {
c_model_ptr->EnableRecordTimeOfRuntime();
}
// Load detection labels if label path is not empty.
if ((!fastdeploy::jni::AssetsLoaderUtils::IsDetectionLabelsLoaded()) &&
(!c_label_file.empty())) {
fastdeploy::jni::AssetsLoaderUtils::LoadDetectionLabels(c_label_file);
}
// WARN: need to release manually in Java !
return reinterpret_cast<jlong>(c_model_ptr); // native model context
}
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_detection_PicoDet_predictNative(
JNIEnv *env, jobject thiz, jlong native_model_context,
jobject argb8888_bitmap, jboolean saved, jstring saved_image_path,
jfloat score_threshold, jboolean rendering) {
if (native_model_context == 0) {
return 0;
}
cv::Mat c_bgr;
auto t = fastdeploy::jni::GetCurrentTime();
if (!fastdeploy::jni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return 0;
}
LOGD("Read from bitmap costs %f ms", fastdeploy::jni::GetElapsedTime(t));
auto c_model_ptr = reinterpret_cast<fastdeploy::vision::detection::PicoDet *>(
native_model_context);
auto c_result_ptr = new fastdeploy::vision::DetectionResult();
t = fastdeploy::jni::GetCurrentTime();
if (!c_model_ptr->Predict(&c_bgr, c_result_ptr)) {
delete c_result_ptr;
return 0;
}
LOGD("Predict from native costs %f ms", fastdeploy::jni::GetElapsedTime(t));
if (c_model_ptr->EnabledRecordTimeOfRuntime()) {
auto info_of_runtime = c_model_ptr->PrintStatisInfoOfRuntime();
LOGD("Avg runtime costs %f ms", info_of_runtime["avg_time"] * 1000.0f);
}
if (!c_result_ptr->boxes.empty() && rendering) {
t = fastdeploy::jni::GetCurrentTime();
cv::Mat c_vis_im;
if (fastdeploy::jni::AssetsLoaderUtils::IsDetectionLabelsLoaded()) {
c_vis_im = fastdeploy::vision::VisDetection(
c_bgr, *(c_result_ptr),
fastdeploy::jni::AssetsLoaderUtils::GetDetectionLabels(),
score_threshold, 2, 1.0f);
} else {
c_vis_im = fastdeploy::vision::VisDetection(c_bgr, *(c_result_ptr),
score_threshold, 2, 1.0f);
}
LOGD("Visualize from native costs %f ms",
fastdeploy::jni::GetElapsedTime(t));
// Rendering to bitmap
t = fastdeploy::jni::GetCurrentTime();
if (!fastdeploy::jni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
delete c_result_ptr;
return 0;
}
LOGD("Write to bitmap from native costs %f ms",
fastdeploy::jni::GetElapsedTime(t));
std::string c_saved_image_path =
fastdeploy::jni::ConvertTo<std::string>(env, saved_image_path);
if (!c_saved_image_path.empty() && saved) {
t = fastdeploy::jni::GetCurrentTime();
cv::imwrite(c_saved_image_path, c_vis_im);
LOGD("Save image from native costs %f ms, path: %s",
fastdeploy::jni::GetElapsedTime(t), c_saved_image_path.c_str());
}
}
// WARN: need to release it manually in Java !
return reinterpret_cast<jlong>(c_result_ptr); // native result context
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_detection_PicoDet_releaseNative(
JNIEnv *env, jobject thiz, jlong native_model_context) {
if (native_model_context == 0) {
return JNI_FALSE;
}
auto c_model_ptr = reinterpret_cast<fastdeploy::vision::detection::PicoDet *>(
native_model_context);
if (c_model_ptr->EnabledRecordTimeOfRuntime()) {
auto info_of_runtime = c_model_ptr->PrintStatisInfoOfRuntime();
LOGD("[End] Avg runtime costs %f ms",
info_of_runtime["avg_time"] * 1000.0f);
}
delete c_model_ptr;
LOGD("[End] Release PicoDet in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -1,267 +0,0 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <android/bitmap.h> // NOLINT
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
#include "fastdeploy_jni.h" // NOLINT
#ifdef __cplusplus
extern "C" {
#endif
/// Native DetectionResult for vision::DetectionResult.
JNIEXPORT jint JNICALL
Java_com_baidu_paddle_fastdeploy_vision_DetectionResult_copyBoxesNumFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return 0;
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::DetectionResult *>(
native_result_context);
return static_cast<jint>(c_result_ptr->boxes.size());
}
JNIEXPORT jfloatArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_DetectionResult_copyBoxesFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::DetectionResult *>(
native_result_context);
if (c_result_ptr->boxes.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->boxes.size());
float buffer[len * 4];
const auto &boxes = c_result_ptr->boxes;
for (int64_t i = 0; i < len; ++i) {
std::memcpy((buffer + i * 4), (boxes.at(i).data()), 4 * sizeof(float));
}
return fastdeploy::jni::ConvertTo<jfloatArray>(env, buffer, len * 4);
}
JNIEXPORT jfloatArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_DetectionResult_copyScoresFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::DetectionResult *>(
native_result_context);
if (c_result_ptr->scores.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->scores.size());
const float *buffer = static_cast<float *>(c_result_ptr->scores.data());
return fastdeploy::jni::ConvertTo<jfloatArray>(env, buffer, len);
}
JNIEXPORT jintArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_DetectionResult_copyLabelIdsFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::DetectionResult *>(
native_result_context);
if (c_result_ptr->label_ids.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->label_ids.size());
const int *buffer = static_cast<int *>(c_result_ptr->label_ids.data());
return fastdeploy::jni::ConvertTo<jintArray>(env, buffer, len);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_DetectionResult_releaseNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return JNI_FALSE;
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::DetectionResult *>(
native_result_context);
delete c_result_ptr;
LOGD("Release DetectionResult in native !");
return JNI_TRUE;
}
/// Native ClassifyResult for vision::ClassifyResult.
JNIEXPORT jfloatArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_ClassifyResult_copyScoresFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::ClassifyResult *>(
native_result_context);
if (c_result_ptr->scores.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->scores.size());
const float *buffer = static_cast<float *>(c_result_ptr->scores.data());
return fastdeploy::jni::ConvertTo<jfloatArray>(env, buffer, len);
}
JNIEXPORT jintArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_ClassifyResult_copyLabelIdsFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::ClassifyResult *>(
native_result_context);
if (c_result_ptr->label_ids.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->label_ids.size());
const int *buffer = static_cast<int *>(c_result_ptr->label_ids.data());
return fastdeploy::jni::ConvertTo<jintArray>(env, buffer, len);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_ClassifyResult_releaseNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return JNI_FALSE;
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::ClassifyResult *>(
native_result_context);
delete c_result_ptr;
LOGD("Release ClassifyResult in native !");
return JNI_TRUE;
}
/// Native OCRResult for vision::OCRResult.
JNIEXPORT jint JNICALL
Java_com_baidu_paddle_fastdeploy_vision_OCRResult_copyBoxesNumFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return 0;
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::OCRResult *>(
native_result_context);
return static_cast<jint>(c_result_ptr->boxes.size());
}
JNIEXPORT jintArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_OCRResult_copyBoxesFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::OCRResult *>(
native_result_context);
if (c_result_ptr->boxes.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->boxes.size());
int buffer[len * 8];
const auto &boxes = c_result_ptr->boxes;
for (int64_t i = 0; i < len; ++i) {
std::memcpy((buffer + i * 8), (boxes.at(i).data()), 8 * sizeof(int));
}
return fastdeploy::jni::ConvertTo<jintArray>(env, buffer, len * 4);
}
JNIEXPORT jobjectArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_OCRResult_copyTextFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::OCRResult *>(
native_result_context);
if (c_result_ptr->text.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->text.size());
jclass jstr_clazz = env->FindClass("java/lang/String");
jobjectArray jstr_array = env->NewObjectArray(
static_cast<jsize>(len), jstr_clazz,env->NewStringUTF(""));
for (int64_t i = 0; i < len; ++i) {
env->SetObjectArrayElement(jstr_array, static_cast<jsize>(i),
fastdeploy::jni::ConvertTo<jstring>(
env, c_result_ptr->text.at(i)));
}
return jstr_array;
}
JNIEXPORT jfloatArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_OCRResult_copyRecScoresFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::OCRResult *>(
native_result_context);
if (c_result_ptr->rec_scores.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->rec_scores.size());
const float *buffer = static_cast<float *>(c_result_ptr->rec_scores.data());
return fastdeploy::jni::ConvertTo<jfloatArray>(env, buffer, len);
}
JNIEXPORT jfloatArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_OCRResult_copyClsScoresFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::OCRResult *>(
native_result_context);
if (c_result_ptr->cls_scores.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->cls_scores.size());
const float *buffer = static_cast<float *>(c_result_ptr->cls_scores.data());
return fastdeploy::jni::ConvertTo<jfloatArray>(env, buffer, len);
}
JNIEXPORT jintArray JNICALL
Java_com_baidu_paddle_fastdeploy_vision_OCRResult_copyClsLabelsFromNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return {};
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::OCRResult *>(
native_result_context);
if (c_result_ptr->cls_labels.empty()) {
return {};
}
const auto len = static_cast<int64_t>(c_result_ptr->cls_labels.size());
const int *buffer = static_cast<int *>(c_result_ptr->cls_labels.data());
return fastdeploy::jni::ConvertTo<jintArray>(env, buffer, len);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_OCRResult_releaseNative(
JNIEnv *env, jobject thiz, jlong native_result_context) {
if (native_result_context == 0) {
return JNI_FALSE;
}
auto c_result_ptr = reinterpret_cast<fastdeploy::vision::OCRResult *>(
native_result_context);
delete c_result_ptr;
LOGD("Release OCRResult in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -1,248 +0,0 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h>
#include "fastdeploy_jni.h"
#ifdef __cplusplus
extern "C" {
#endif
/// VisDetection
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visDetectionNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap, jobjectArray boxes,
jfloatArray scores, jintArray label_ids, jfloat score_threshold,
jint line_size, jfloat font_size, jobjectArray labels) {
// Draw DetectionResult to ARGB8888 Bitmap
int len = env->GetArrayLength(boxes);
if ((len == 0) || (len != env->GetArrayLength(scores)) ||
(len != env->GetArrayLength(label_ids))) {
return JNI_FALSE;
}
fastdeploy::vision::DetectionResult c_result;
c_result.Resize(len);
// boxes [n,4]
bool check_validation = true;
for (int i = 0; i < len; ++i) {
auto j_box =
reinterpret_cast<jfloatArray>(env->GetObjectArrayElement(boxes, i));
if (env->GetArrayLength(j_box) == 4) {
jfloat *j_box_ptr = env->GetFloatArrayElements(j_box, nullptr);
std::memcpy(c_result.boxes[i].data(), j_box_ptr, 4 * sizeof(float));
env->ReleaseFloatArrayElements(j_box, j_box_ptr, 0);
} else {
check_validation = false;
break;
}
}
if (!check_validation) {
return JNI_FALSE;
}
// scores [n]
jfloat *j_scores_ptr = env->GetFloatArrayElements(scores, nullptr);
std::memcpy(c_result.scores.data(), j_scores_ptr, len * sizeof(float));
env->ReleaseFloatArrayElements(scores, j_scores_ptr, 0);
// label_ids [n]
jint *j_label_ids_ptr = env->GetIntArrayElements(label_ids, nullptr);
std::memcpy(c_result.label_ids.data(), j_label_ids_ptr, len * sizeof(int));
env->ReleaseIntArrayElements(label_ids, j_label_ids_ptr, 0);
// Get labels from Java [n]
std::vector<std::string> c_labels;
int label_len = env->GetArrayLength(labels);
if (label_len > 0) {
for (int i = 0; i < label_len; ++i) {
auto j_str =
reinterpret_cast<jstring>(env->GetObjectArrayElement(labels, i));
c_labels.push_back(fastdeploy::jni::ConvertTo<std::string>(env, j_str));
}
}
cv::Mat c_bgr;
// From ARGB Bitmap to BGR
if (!fastdeploy::jni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
cv::Mat c_vis_im;
if (!c_labels.empty()) {
c_vis_im = fastdeploy::vision::VisDetection(
c_bgr, c_result, c_labels, score_threshold, line_size, font_size);
} else {
c_vis_im = fastdeploy::vision::VisDetection(
c_bgr, c_result, score_threshold, line_size, font_size);
}
// Rendering to bitmap
if (!fastdeploy::jni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
/// VisClassification
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visClassificationNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap, jfloatArray scores,
jintArray label_ids, jfloat score_threshold, jfloat font_size,
jobjectArray labels) {
// Draw ClassifyResult to ARGB8888 Bitmap
int len = env->GetArrayLength(scores);
if ((len == 0) || (len != env->GetArrayLength(label_ids))) {
return JNI_FALSE;
}
fastdeploy::vision::ClassifyResult c_result;
c_result.scores.resize(len);
c_result.label_ids.resize(len);
// scores [n]
jfloat *j_scores_ptr = env->GetFloatArrayElements(scores, nullptr);
std::memcpy(c_result.scores.data(), j_scores_ptr, len * sizeof(float));
env->ReleaseFloatArrayElements(scores, j_scores_ptr, 0);
// label_ids [n]
jint *j_label_ids_ptr = env->GetIntArrayElements(label_ids, nullptr);
std::memcpy(c_result.label_ids.data(), j_label_ids_ptr, len * sizeof(int));
env->ReleaseIntArrayElements(label_ids, j_label_ids_ptr, 0);
// Get labels from Java [n]
std::vector<std::string> c_labels;
int label_len = env->GetArrayLength(labels);
if (label_len > 0) {
for (int i = 0; i < label_len; ++i) {
auto j_str =
reinterpret_cast<jstring>(env->GetObjectArrayElement(labels, i));
c_labels.push_back(fastdeploy::jni::ConvertTo<std::string>(env, j_str));
}
}
cv::Mat c_bgr;
// From ARGB Bitmap to BGR
if (!fastdeploy::jni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
cv::Mat c_vis_im;
if (!c_labels.empty()) {
c_vis_im = fastdeploy::vision::VisClassification(
c_bgr, c_result, c_labels, 5, score_threshold, font_size);
} else {
c_vis_im = fastdeploy::vision::VisClassification(
c_bgr, c_result, 5, score_threshold, font_size);
}
// Rendering to bitmap
if (!fastdeploy::jni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
/// VisOcr
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visOcrNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap,
jobjectArray boxes, jobjectArray text, jfloatArray rec_scores,
jfloatArray cls_scores, jintArray cls_labels) {
// Draw OCRResult to ARGB8888 Bitmap
int len = env->GetArrayLength(boxes);
if ((len == 0) || (len != env->GetArrayLength(text)) ||
(len != env->GetArrayLength(rec_scores)) ||
(len != env->GetArrayLength(cls_scores)) ||
(len != env->GetArrayLength(cls_labels))) {
return JNI_FALSE;
}
fastdeploy::vision::OCRResult c_result;
c_result.boxes.resize(len);
c_result.rec_scores.resize(len);
c_result.cls_scores.resize(len);
c_result.cls_labels.resize(len);
// boxes [n,8]
bool check_validation = true;
for (int i = 0; i < len; ++i) {
auto j_box =
reinterpret_cast<jintArray>(env->GetObjectArrayElement(boxes, i));
if (env->GetArrayLength(j_box) == 8) {
jint *j_box_ptr = env->GetIntArrayElements(j_box, nullptr);
std::memcpy(c_result.boxes[i].data(), j_box_ptr, 8 * sizeof(int));
env->ReleaseIntArrayElements(j_box, j_box_ptr, 0);
} else {
check_validation = false;
break;
}
}
if (!check_validation) {
return JNI_FALSE;
}
// text [n]
int text_len = env->GetArrayLength(text);
if (text_len > 0) {
for (int i = 0; i < text_len; ++i) {
auto j_str =
reinterpret_cast<jstring>(env->GetObjectArrayElement(text, i));
c_result.text.push_back(fastdeploy::jni::ConvertTo<std::string>(env, j_str));
}
}
// rec_scores [n]
jfloat *j_rec_scores_ptr = env->GetFloatArrayElements(rec_scores, nullptr);
std::memcpy(c_result.rec_scores.data(), j_rec_scores_ptr, len * sizeof(float));
env->ReleaseFloatArrayElements(rec_scores, j_rec_scores_ptr, 0);
// cls_scores [n]
jfloat *j_cls_scores_ptr = env->GetFloatArrayElements(cls_scores, nullptr);
std::memcpy(c_result.cls_scores.data(), j_cls_scores_ptr, len * sizeof(float));
env->ReleaseFloatArrayElements(cls_scores, j_cls_scores_ptr, 0);
// cls_labels [n]
jint *j_cls_label_ptr = env->GetIntArrayElements(cls_labels, nullptr);
std::memcpy(c_result.cls_labels.data(), j_cls_label_ptr, len * sizeof(int));
env->ReleaseIntArrayElements(cls_labels, j_cls_label_ptr, 0);
cv::Mat c_bgr;
// From ARGB Bitmap to BGR
if (!fastdeploy::jni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
auto c_vis_im = fastdeploy::vision::VisOcr(c_bgr, c_result);
// Rendering to bitmap
if (!fastdeploy::jni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -3,14 +3,12 @@ package com.baidu.paddle.fastdeploy;
public class RuntimeOption { public class RuntimeOption {
public int mCpuThreadNum = 1; public int mCpuThreadNum = 1;
public boolean mEnableLiteFp16 = false; public boolean mEnableLiteFp16 = false;
public boolean mEnableRecordTimeOfRuntime = false;
public LitePowerMode mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND; public LitePowerMode mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND;
public String mLiteOptimizedModelDir = ""; public String mLiteOptimizedModelDir = "";
public RuntimeOption() { public RuntimeOption() {
mCpuThreadNum = 1; mCpuThreadNum = 1;
mEnableLiteFp16 = false; mEnableLiteFp16 = false;
mEnableRecordTimeOfRuntime = false;
mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND; mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND;
mLiteOptimizedModelDir = ""; mLiteOptimizedModelDir = "";
} }
@@ -39,10 +37,6 @@ public class RuntimeOption {
mLiteOptimizedModelDir = modelDir; mLiteOptimizedModelDir = modelDir;
} }
public void enableRecordTimeOfRuntime() {
mEnableRecordTimeOfRuntime = true;
}
// Helpers: parse lite power mode from string // Helpers: parse lite power mode from string
public static LitePowerMode parseLitePowerModeFromString(String modeStr) { public static LitePowerMode parseLitePowerModeFromString(String modeStr) {
if (modeStr.equalsIgnoreCase("LITE_POWER_HIGH")) { if (modeStr.equalsIgnoreCase("LITE_POWER_HIGH")) {

View File

@@ -7,9 +7,10 @@ import com.baidu.paddle.fastdeploy.vision.OCRResult;
import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector; import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector;
import com.baidu.paddle.fastdeploy.vision.ocr.Classifier; import com.baidu.paddle.fastdeploy.vision.ocr.Classifier;
import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer; import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer;
import com.baidu.paddle.fastdeploy.RuntimeOption;
public class PPOCRBase { public class PPOCRBase {
protected long mNativeHandlerContext = 0; // Context from native. protected long mCxxContext = 0; // Context from native.
protected boolean mInitialized = false; protected boolean mInitialized = false;
public PPOCRBase() { public PPOCRBase() {
@@ -46,10 +47,10 @@ public class PPOCRBase {
public boolean release() { public boolean release() {
mInitialized = false; mInitialized = false;
if (mNativeHandlerContext == 0) { if (mCxxContext == 0) {
return false; return false;
} }
return releaseNative(mNativeHandlerContext); return releaseNative(mCxxContext);
} }
public boolean initialized() { public boolean initialized() {
@@ -58,26 +59,45 @@ public class PPOCRBase {
// Predict without image saving and bitmap rendering. // Predict without image saving and bitmap rendering.
public OCRResult predict(Bitmap ARGB8888Bitmap) { public OCRResult predict(Bitmap ARGB8888Bitmap) {
if (mNativeHandlerContext == 0) { if (mCxxContext == 0) {
return new OCRResult(); return new OCRResult();
} }
// Only support ARGB8888 bitmap in native now. // Only support ARGB8888 bitmap in native now.
return new OCRResult(predictNative( OCRResult result = predictNative(mCxxContext, ARGB8888Bitmap,
mNativeHandlerContext, ARGB8888Bitmap, false, false, "", false);
"", false)); if (result == null) {
return new OCRResult();
}
return result;
}
public OCRResult predict(Bitmap ARGB8888Bitmap, boolean rendering) {
if (mCxxContext == 0) {
return new OCRResult();
}
// Only support ARGB8888 bitmap in native now.
OCRResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", rendering);
if (result == null) {
return new OCRResult();
}
return result;
} }
// Predict with image saving and bitmap rendering (will cost more times) // Predict with image saving and bitmap rendering (will cost more times)
public OCRResult predict(Bitmap ARGB8888Bitmap, public OCRResult predict(Bitmap ARGB8888Bitmap,
String savedImagePath) { String savedImagePath) {
// scoreThreshold is for visualizing only. // scoreThreshold is for visualizing only.
if (mNativeHandlerContext == 0) { if (mCxxContext == 0) {
return new OCRResult(); return new OCRResult();
} }
// Only support ARGB8888 bitmap in native now. // Only support ARGB8888 bitmap in native now.
return new OCRResult(predictNative( OCRResult result = predictNative(mCxxContext, ARGB8888Bitmap,
mNativeHandlerContext, ARGB8888Bitmap, true, true, savedImagePath, true);
savedImagePath, true)); if (result == null) {
return new OCRResult();
}
return result;
} }
public boolean init_(DBDetector detModel, public boolean init_(DBDetector detModel,
@@ -85,7 +105,7 @@ public class PPOCRBase {
Recognizer recModel, Recognizer recModel,
PPOCRVersion OCRVersionTag) { PPOCRVersion OCRVersionTag) {
if (!mInitialized) { if (!mInitialized) {
mNativeHandlerContext = bindNative( mCxxContext = bindNative(
OCRVersionTag.ordinal(), OCRVersionTag.ordinal(),
detModel.mModelFile, detModel.mModelFile,
detModel.mParamsFile, detModel.mParamsFile,
@@ -94,30 +114,18 @@ public class PPOCRBase {
recModel.mModelFile, recModel.mModelFile,
recModel.mParamsFile, recModel.mParamsFile,
recModel.mLabelPath, recModel.mLabelPath,
detModel.mRuntimeOption.mCpuThreadNum, detModel.mRuntimeOption,
clsModel.mRuntimeOption.mCpuThreadNum, clsModel.mRuntimeOption,
recModel.mRuntimeOption.mCpuThreadNum, recModel.mRuntimeOption,
detModel.mRuntimeOption.mEnableLiteFp16,
clsModel.mRuntimeOption.mEnableLiteFp16,
recModel.mRuntimeOption.mEnableLiteFp16,
detModel.mRuntimeOption.mLitePowerMode.ordinal(),
clsModel.mRuntimeOption.mLitePowerMode.ordinal(),
recModel.mRuntimeOption.mLitePowerMode.ordinal(),
detModel.mRuntimeOption.mLiteOptimizedModelDir,
clsModel.mRuntimeOption.mLiteOptimizedModelDir,
recModel.mRuntimeOption.mLiteOptimizedModelDir,
detModel.mRuntimeOption.mEnableRecordTimeOfRuntime,
clsModel.mRuntimeOption.mEnableRecordTimeOfRuntime,
recModel.mRuntimeOption.mEnableRecordTimeOfRuntime,
clsModel.initialized()); clsModel.initialized());
if (mNativeHandlerContext != 0) { if (mCxxContext != 0) {
mInitialized = true; mInitialized = true;
} }
return mInitialized; return mInitialized;
} else { } else {
// release current native context and bind a new one. // release current native context and bind a new one.
if (release()) { if (release()) {
mNativeHandlerContext = bindNative( mCxxContext = bindNative(
OCRVersionTag.ordinal(), OCRVersionTag.ordinal(),
detModel.mModelFile, detModel.mModelFile,
detModel.mParamsFile, detModel.mParamsFile,
@@ -126,23 +134,11 @@ public class PPOCRBase {
recModel.mModelFile, recModel.mModelFile,
recModel.mParamsFile, recModel.mParamsFile,
recModel.mLabelPath, recModel.mLabelPath,
detModel.mRuntimeOption.mCpuThreadNum, detModel.mRuntimeOption,
clsModel.mRuntimeOption.mCpuThreadNum, clsModel.mRuntimeOption,
recModel.mRuntimeOption.mCpuThreadNum, recModel.mRuntimeOption,
detModel.mRuntimeOption.mEnableLiteFp16,
clsModel.mRuntimeOption.mEnableLiteFp16,
recModel.mRuntimeOption.mEnableLiteFp16,
detModel.mRuntimeOption.mLitePowerMode.ordinal(),
clsModel.mRuntimeOption.mLitePowerMode.ordinal(),
recModel.mRuntimeOption.mLitePowerMode.ordinal(),
detModel.mRuntimeOption.mLiteOptimizedModelDir,
clsModel.mRuntimeOption.mLiteOptimizedModelDir,
recModel.mRuntimeOption.mLiteOptimizedModelDir,
detModel.mRuntimeOption.mEnableRecordTimeOfRuntime,
clsModel.mRuntimeOption.mEnableRecordTimeOfRuntime,
recModel.mRuntimeOption.mEnableRecordTimeOfRuntime,
clsModel.initialized()); clsModel.initialized());
if (mNativeHandlerContext != 0) { if (mCxxContext != 0) {
mInitialized = true; mInitialized = true;
} }
return mInitialized; return mInitialized;
@@ -160,32 +156,20 @@ public class PPOCRBase {
String recModelFile, String recModelFile,
String recParamsFile, String recParamsFile,
String recLabelPath, String recLabelPath,
int detCpuNumThread, RuntimeOption detRuntimeOption,
int clsCpuNumThread, RuntimeOption clsRuntimeOption,
int recCpuNumThread, RuntimeOption recRuntimeOption,
boolean detEnableLiteFp16,
boolean clsEnableLiteFp16,
boolean recEnableLiteFp16,
int detLitePowerMode,
int clsLitePowerMode,
int recLitePowerMode,
String detLiteOptimizedModelDir,
String clsLiteOptimizedModelDir,
String recLiteOptimizedModelDir,
boolean detEnableRecordTimeOfRuntime,
boolean clsEnableRecordTimeOfRuntime,
boolean recEnableRecordTimeOfRuntime,
boolean haveClsModel); boolean haveClsModel);
// Call prediction from native context. // Call prediction from native context with rendering.
private native long predictNative(long nativeHandlerContext, private native OCRResult predictNative(long CxxContext,
Bitmap ARGB8888Bitmap, Bitmap ARGB8888Bitmap,
boolean saved, boolean saveImage,
String savedImagePath, String savePath,
boolean rendering); boolean rendering);
// Release buffers allocated in native context. // Release buffers allocated in native context.
private native boolean releaseNative(long nativeHandlerContext); private native boolean releaseNative(long CxxContext);
// Initializes at the beginning. // Initializes at the beginning.
static { static {

View File

@@ -11,41 +11,19 @@ public class ClassifyResult {
mInitialized = false; mInitialized = false;
} }
public ClassifyResult(long nativeResultContext) {
mInitialized = copyAllFromNativeContext(nativeResultContext);
}
public boolean initialized() { public boolean initialized() {
return mInitialized; return mInitialized;
} }
private void setScores(@NonNull float[] scoresBuffer) { public void setScores(@NonNull float[] scoresBuffer) {
if (scoresBuffer.length > 0) { if (scoresBuffer.length > 0) {
mScores = scoresBuffer.clone(); mScores = scoresBuffer.clone();
} }
} }
private void setLabelIds(@NonNull int[] labelIdsBuffer) { public void setLabelIds(@NonNull int[] labelIdsBuffer) {
if (labelIdsBuffer.length > 0) { if (labelIdsBuffer.length > 0) {
mLabelIds = labelIdsBuffer.clone(); mLabelIds = labelIdsBuffer.clone();
} }
} }
private boolean copyAllFromNativeContext(long nativeResultContext) {
if (nativeResultContext == 0) {
return false;
}
setScores(copyScoresFromNative(nativeResultContext));
setLabelIds(copyLabelIdsFromNative(nativeResultContext));
// WARN: must release ctx.
return releaseNative(nativeResultContext);
}
// Fetch native buffers from native context.
private native float[] copyScoresFromNative(long nativeResultContext);
private native int[] copyLabelIdsFromNative(long nativeResultContext);
private native boolean releaseNative(long nativeResultContext);
} }

View File

@@ -2,12 +2,9 @@ package com.baidu.paddle.fastdeploy.vision;
import android.support.annotation.NonNull; import android.support.annotation.NonNull;
import com.baidu.paddle.fastdeploy.FastDeployInitializer;
import java.util.Arrays; import java.util.Arrays;
public class DetectionResult { public class DetectionResult {
// Not support MaskRCNN now.
public float[][] mBoxes; // [n,4] public float[][] mBoxes; // [n,4]
public float[] mScores; // [n] public float[] mScores; // [n]
public int[] mLabelIds; // [n] public int[] mLabelIds; // [n]
@@ -17,29 +14,11 @@ public class DetectionResult {
mInitialized = false; mInitialized = false;
} }
public DetectionResult(long nativeResultContext) {
mInitialized = copyAllFromNativeContext(nativeResultContext);
}
public boolean initialized() { public boolean initialized() {
return mInitialized; return mInitialized;
} }
// Setup results from native buffers. public void setBoxes(@NonNull float[] boxesBuffer) {
private boolean copyAllFromNativeContext(long nativeResultContext) {
if (nativeResultContext == 0) {
return false;
}
if (copyBoxesNumFromNative(nativeResultContext) > 0) {
setBoxes(copyBoxesFromNative(nativeResultContext));
setScores(copyScoresFromNative(nativeResultContext));
setLabelIds(copyLabelIdsFromNative(nativeResultContext));
}
// WARN: must release ctx.
return releaseNative(nativeResultContext);
}
private void setBoxes(@NonNull float[] boxesBuffer) {
int boxesNum = boxesBuffer.length / 4; int boxesNum = boxesBuffer.length / 4;
if (boxesNum > 0) { if (boxesNum > 0) {
mBoxes = new float[boxesNum][4]; mBoxes = new float[boxesNum][4];
@@ -50,31 +29,15 @@ public class DetectionResult {
} }
} }
private void setScores(@NonNull float[] scoresBuffer) { public void setScores(@NonNull float[] scoresBuffer) {
if (scoresBuffer.length > 0) { if (scoresBuffer.length > 0) {
mScores = scoresBuffer.clone(); mScores = scoresBuffer.clone();
} }
} }
private void setLabelIds(@NonNull int[] labelIdsBuffer) { public void setLabelIds(@NonNull int[] labelIdsBuffer) {
if (labelIdsBuffer.length > 0) { if (labelIdsBuffer.length > 0) {
mLabelIds = labelIdsBuffer.clone(); mLabelIds = labelIdsBuffer.clone();
} }
} }
// Fetch native buffers from native context.
private native int copyBoxesNumFromNative(long nativeResultContext);
private native float[] copyBoxesFromNative(long nativeResultContext);
private native float[] copyScoresFromNative(long nativeResultContext);
private native int[] copyLabelIdsFromNative(long nativeResultContext);
private native boolean releaseNative(long nativeResultContext);
// Initializes at the beginning.
static {
FastDeployInitializer.init();
}
} }

View File

@@ -0,0 +1,54 @@
package com.baidu.paddle.fastdeploy.vision;
import android.support.annotation.NonNull;
import java.util.Arrays;
public class FaceDetectionResult {
public float[][] mBoxes; // [n,4]
public float[] mScores; // [n]
public float[][] mLandmarks; // [n,2]
int mLandmarksPerFace = 0;
public boolean mInitialized = false;
public FaceDetectionResult() {
mInitialized = false;
mLandmarksPerFace = 0;
}
public boolean initialized() {
return mInitialized;
}
public void setBoxes(@NonNull float[] boxesBuffer) {
int boxesNum = boxesBuffer.length / 4;
if (boxesNum > 0) {
mBoxes = new float[boxesNum][4];
for (int i = 0; i < boxesNum; ++i) {
mBoxes[i] = Arrays.copyOfRange(
boxesBuffer, i * 4, (i + 1) * 4);
}
}
}
public void setScores(@NonNull float[] scoresBuffer) {
if (scoresBuffer.length > 0) {
mScores = scoresBuffer.clone();
}
}
public void setLandmarks(@NonNull float[] landmarksBuffer) {
int landmarksNum = landmarksBuffer.length / 2;
if (landmarksNum > 0) {
mLandmarks = new float[landmarksNum][2];
for (int i = 0; i < landmarksNum; ++i) {
mLandmarks[i] = Arrays.copyOfRange(
landmarksBuffer, i * 2, (i + 1) * 2);
}
}
}
public void setLandmarksPerFace(int landmarksPerFace) {
mLandmarksPerFace = landmarksPerFace;
}
}

View File

@@ -1,9 +1,6 @@
package com.baidu.paddle.fastdeploy.vision; package com.baidu.paddle.fastdeploy.vision;
import android.support.annotation.NonNull; import android.support.annotation.NonNull;
import com.baidu.paddle.fastdeploy.FastDeployInitializer;
import java.util.Arrays; import java.util.Arrays;
public class OCRResult { public class OCRResult {
@@ -18,31 +15,11 @@ public class OCRResult {
mInitialized = false; mInitialized = false;
} }
public OCRResult(long nativeResultContext) {
mInitialized = copyAllFromNativeContext(nativeResultContext);
}
public boolean initialized() { public boolean initialized() {
return mInitialized; return mInitialized;
} }
// Setup results from native buffers. public void setBoxes(@NonNull int[] boxesBuffer) {
private boolean copyAllFromNativeContext(long nativeResultContext) {
if (nativeResultContext == 0) {
return false;
}
if (copyBoxesNumFromNative(nativeResultContext) > 0) {
setBoxes(copyBoxesFromNative(nativeResultContext));
setText(copyTextFromNative(nativeResultContext));
setRecScores(copyRecScoresFromNative(nativeResultContext));
setClsScores(copyClsScoresFromNative(nativeResultContext));
setClsLabels(copyClsLabelsFromNative(nativeResultContext));
}
// WARN: must release ctx.
return releaseNative(nativeResultContext);
}
private void setBoxes(@NonNull int[] boxesBuffer) {
int boxesNum = boxesBuffer.length / 8; int boxesNum = boxesBuffer.length / 8;
if (boxesNum > 0) { if (boxesNum > 0) {
mBoxes = new int[boxesNum][8]; mBoxes = new int[boxesNum][8];
@@ -53,47 +30,27 @@ public class OCRResult {
} }
} }
private void setText(@NonNull String[] textBuffer) { public void setText(@NonNull String[] textBuffer) {
if (textBuffer.length > 0) { if (textBuffer.length > 0) {
mText = textBuffer.clone(); mText = textBuffer.clone();
} }
} }
private void setRecScores(@NonNull float[] recScoresBuffer) { public void setRecScores(@NonNull float[] recScoresBuffer) {
if (recScoresBuffer.length > 0) { if (recScoresBuffer.length > 0) {
mRecScores = recScoresBuffer.clone(); mRecScores = recScoresBuffer.clone();
} }
} }
private void setClsScores(@NonNull float[] clsScoresBuffer) { public void setClsScores(@NonNull float[] clsScoresBuffer) {
if (clsScoresBuffer.length > 0) { if (clsScoresBuffer.length > 0) {
mClsScores = clsScoresBuffer.clone(); mClsScores = clsScoresBuffer.clone();
} }
} }
private void setClsLabels(@NonNull int[] clsLabelBuffer) { public void setClsLabels(@NonNull int[] clsLabelBuffer) {
if (clsLabelBuffer.length > 0) { if (clsLabelBuffer.length > 0) {
mClsLabels = clsLabelBuffer.clone(); mClsLabels = clsLabelBuffer.clone();
} }
} }
// Fetch native buffers from native context.
private native int copyBoxesNumFromNative(long nativeResultContext);
private native int[] copyBoxesFromNative(long nativeResultContext);
private native String[] copyTextFromNative(long nativeResultContext);
private native float[] copyRecScoresFromNative(long nativeResultContext);
private native float[] copyClsScoresFromNative(long nativeResultContext);
private native int[] copyClsLabelsFromNative(long nativeResultContext);
private native boolean releaseNative(long nativeResultContext);
// Initializes at the beginning.
static {
FastDeployInitializer.init();
}
} }

View File

@@ -0,0 +1,42 @@
package com.baidu.paddle.fastdeploy.vision;
import android.support.annotation.NonNull;
public class SegmentationResult {
// Init from native
public int[] mLabelMap;
public float[] mScoreMap;
public long[] mShape;
public boolean mContainScoreMap = false;
public boolean mInitialized = false;
public SegmentationResult() {
mInitialized = false;
}
public boolean initialized() {
return mInitialized;
}
public void setLabelMap(@NonNull int[] labelMapBuffer) {
if (labelMapBuffer.length > 0) {
mLabelMap = labelMapBuffer.clone();
}
}
public void setScoreMap(@NonNull float[] scoreMapBuffer) {
if (scoreMapBuffer.length > 0) {
mScoreMap = scoreMapBuffer.clone();
}
}
public void setShape(@NonNull long[] shapeBuffer) {
if (shapeBuffer.length > 0) {
mShape = shapeBuffer.clone();
}
}
public void setContainScoreMap(boolean containScoreMap) {
mContainScoreMap = containScoreMap;
}
}

View File

@@ -6,34 +6,39 @@ import com.baidu.paddle.fastdeploy.FastDeployInitializer;
public class Visualize { public class Visualize {
// TODO(qiuyanjun): // visClassification, visDetection, visSegmentation, visOcr, ...
// VisClassification, VisSegmentation, VisMatting, VisOcr, ...
// Visualize DetectionResult without labels // Visualize DetectionResult without labels
public static boolean visDetection(Bitmap ARGB8888Bitmap, public static boolean visDetection(Bitmap ARGB8888Bitmap,
DetectionResult result) { DetectionResult result) {
return visDetectionNative( return visDetectionNative(
ARGB8888Bitmap, ARGB8888Bitmap, result,
result.mBoxes, 0.f, 2, 0.5f,
result.mScores,
result.mLabelIds,
0.f, 1, 0.5f,
new String[]{}); new String[]{});
} }
public static boolean visDetection(Bitmap ARGB8888Bitmap, public static boolean visDetection(Bitmap ARGB8888Bitmap,
DetectionResult result, DetectionResult result,
float score_threshold, float scoreThreshold) {
int line_size,
float font_size) {
return visDetectionNative( return visDetectionNative(
ARGB8888Bitmap, ARGB8888Bitmap,
result.mBoxes, result,
result.mScores, scoreThreshold,
result.mLabelIds, 2,
score_threshold, 0.5f,
line_size, new String[]{});
font_size, }
public static boolean visDetection(Bitmap ARGB8888Bitmap,
DetectionResult result,
float scoreThreshold,
int lineSize,
float fontSize) {
return visDetectionNative(
ARGB8888Bitmap,
result,
scoreThreshold,
lineSize,
fontSize,
new String[]{}); new String[]{});
} }
@@ -43,27 +48,23 @@ public class Visualize {
String[] labels) { String[] labels) {
return visDetectionNative( return visDetectionNative(
ARGB8888Bitmap, ARGB8888Bitmap,
result.mBoxes, result,
result.mScores, 0.f, 2, 0.5f,
result.mLabelIds,
0.f, 1, 0.5f,
labels); labels);
} }
public static boolean visDetection(Bitmap ARGB8888Bitmap, public static boolean visDetection(Bitmap ARGB8888Bitmap,
DetectionResult result, DetectionResult result,
String[] labels, String[] labels,
float score_threshold, float scoreThreshold,
int line_size, int lineSize,
float font_size) { float fontSize) {
return visDetectionNative( return visDetectionNative(
ARGB8888Bitmap, ARGB8888Bitmap,
result.mBoxes, result,
result.mScores, scoreThreshold,
result.mLabelIds, lineSize,
score_threshold, fontSize,
line_size,
font_size,
labels); labels);
} }
@@ -71,9 +72,7 @@ public class Visualize {
public static boolean visClassification(Bitmap ARGB8888Bitmap, public static boolean visClassification(Bitmap ARGB8888Bitmap,
ClassifyResult result) { ClassifyResult result) {
return visClassificationNative( return visClassificationNative(
ARGB8888Bitmap, ARGB8888Bitmap, result,
result.mScores,
result.mLabelIds,
0.f, 1, 0.f, 1,
new String[]{}); new String[]{});
@@ -81,14 +80,12 @@ public class Visualize {
public static boolean visClassification(Bitmap ARGB8888Bitmap, public static boolean visClassification(Bitmap ARGB8888Bitmap,
ClassifyResult result, ClassifyResult result,
float score_threshold, float scoreThreshold,
float font_size) { float fontSize) {
return visClassificationNative( return visClassificationNative(
ARGB8888Bitmap, ARGB8888Bitmap, result,
result.mScores, scoreThreshold,
result.mLabelIds, fontSize,
score_threshold,
font_size,
new String[]{}); new String[]{});
} }
@@ -98,9 +95,7 @@ public class Visualize {
ClassifyResult result, ClassifyResult result,
String[] labels) { String[] labels) {
return visClassificationNative( return visClassificationNative(
ARGB8888Bitmap, ARGB8888Bitmap, result,
result.mScores,
result.mLabelIds,
0.f, 1, 0.f, 1,
labels); labels);
@@ -109,14 +104,13 @@ public class Visualize {
public static boolean visClassification(Bitmap ARGB8888Bitmap, public static boolean visClassification(Bitmap ARGB8888Bitmap,
ClassifyResult result, ClassifyResult result,
String[] labels, String[] labels,
float score_threshold, float scoreThreshold,
float font_size) { float fontSize) {
return visClassificationNative( return visClassificationNative(
ARGB8888Bitmap, ARGB8888Bitmap,
result.mScores, result,
result.mLabelIds, scoreThreshold,
score_threshold, fontSize,
font_size,
labels); labels);
} }
@@ -126,39 +120,76 @@ public class Visualize {
OCRResult result) { OCRResult result) {
return visOcrNative( return visOcrNative(
ARGB8888Bitmap, ARGB8888Bitmap,
result.mBoxes, result);
result.mText, }
result.mRecScores,
result.mClsScores, // Visualize SegmentationResult
result.mClsLabels); public static boolean visSegmentation(Bitmap ARGB8888Bitmap,
SegmentationResult result) {
return visSegmentationNative(
ARGB8888Bitmap,
result,
0.5f);
}
public static boolean visSegmentation(Bitmap ARGB8888Bitmap,
SegmentationResult result,
float weight) {
return visSegmentationNative(
ARGB8888Bitmap,
result,
weight);
}
// Visualize FaceDetectionResult
public static boolean visFaceDetection(Bitmap ARGB8888Bitmap,
FaceDetectionResult result) {
return visFaceDetectionNative(
ARGB8888Bitmap,
result,
2, 0.5f);
}
public static boolean visFaceDetection(Bitmap ARGB8888Bitmap,
FaceDetectionResult result,
int lineSize,
float fontSize) {
return visFaceDetectionNative(
ARGB8888Bitmap,
result,
lineSize,
fontSize);
} }
// VisDetection in native // VisDetection in native
private static native boolean visDetectionNative(Bitmap ARGB8888Bitmap, private static native boolean visDetectionNative(Bitmap ARGB8888Bitmap,
float[][] boxes, DetectionResult result,
float[] scores, float scoreThreshold,
int[] labelIds, int lineSize,
float score_threshold, float fontSize,
int line_size,
float font_size,
String[] labels); String[] labels);
// VisClassification in native // VisClassification in native
private static native boolean visClassificationNative(Bitmap ARGB8888Bitmap, private static native boolean visClassificationNative(Bitmap ARGB8888Bitmap,
float[] scores, ClassifyResult result,
int[] labelIds, float scoreThreshold,
float score_threshold, float fontSize,
float font_size,
String[] labels); String[] labels);
// VisOCRResult in native // VisOcr in native
private static native boolean visOcrNative(Bitmap ARGB8888Bitmap, private static native boolean visOcrNative(Bitmap ARGB8888Bitmap,
int[][] boxes, OCRResult result);
String[] text,
float[] recScores,
float[] clsScores,
int[] clsLabels);
// VisSegmentation in native
private static native boolean visSegmentationNative(Bitmap ARGB8888Bitmap,
SegmentationResult result,
float weight);
// VisFaceDetection in native
private static native boolean visFaceDetectionNative(Bitmap ARGB8888Bitmap,
FaceDetectionResult result,
int lineSize,
float fontSize);
/* Initializes at the beginning */ /* Initializes at the beginning */
static { static {

View File

@@ -7,14 +7,14 @@ import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.vision.ClassifyResult; import com.baidu.paddle.fastdeploy.vision.ClassifyResult;
public class PaddleClasModel { public class PaddleClasModel {
protected long mNativeModelContext = 0; // Context from native. protected long mCxxContext = 0; // Context from native.
protected boolean mInitialized = false; protected boolean mInitialized = false;
public PaddleClasModel() { public PaddleClasModel() {
mInitialized = false; mInitialized = false;
} }
// Constructor with default runtime option // Constructor with default runtimeOption
public PaddleClasModel(String modelFile, public PaddleClasModel(String modelFile,
String paramsFile, String paramsFile,
String configFile) { String configFile) {
@@ -32,8 +32,8 @@ public class PaddleClasModel {
public PaddleClasModel(String modelFile, public PaddleClasModel(String modelFile,
String paramsFile, String paramsFile,
String configFile, String configFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, configFile, "", option); init_(modelFile, paramsFile, configFile, "", runtimeOption);
} }
// Constructor with label file // Constructor with label file
@@ -41,16 +41,16 @@ public class PaddleClasModel {
String paramsFile, String paramsFile,
String configFile, String configFile,
String labelFile, String labelFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, configFile, labelFile, option); init_(modelFile, paramsFile, configFile, labelFile, runtimeOption);
} }
// Call init manually without label file // Call init manually without label file
public boolean init(String modelFile, public boolean init(String modelFile,
String paramsFile, String paramsFile,
String configFile, String configFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, configFile, "", option); return init_(modelFile, paramsFile, configFile, "", runtimeOption);
} }
// Call init manually with label file // Call init manually with label file
@@ -58,17 +58,16 @@ public class PaddleClasModel {
String paramsFile, String paramsFile,
String configFile, String configFile,
String labelFile, String labelFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, configFile, labelFile, option); return init_(modelFile, paramsFile, configFile, labelFile, runtimeOption);
} }
public boolean release() { public boolean release() {
mInitialized = false; mInitialized = false;
if (mNativeModelContext == 0) { if (mCxxContext == 0) {
return false; return false;
} }
return releaseNative(mNativeModelContext); return releaseNative(mCxxContext);
} }
public boolean initialized() { public boolean initialized() {
@@ -77,13 +76,31 @@ public class PaddleClasModel {
// Predict without image saving and bitmap rendering. // Predict without image saving and bitmap rendering.
public ClassifyResult predict(Bitmap ARGB8888Bitmap) { public ClassifyResult predict(Bitmap ARGB8888Bitmap) {
if (mNativeModelContext == 0) { if (mCxxContext == 0) {
return new ClassifyResult(); return new ClassifyResult();
} }
// Only support ARGB8888 bitmap in native now. // Only support ARGB8888 bitmap in native now.
return new ClassifyResult(predictNative( ClassifyResult result = predictNative(mCxxContext, ARGB8888Bitmap,
mNativeModelContext, ARGB8888Bitmap, false, false, "", false, 0.f);
"", 0.f, false)); if (result == null) {
return new ClassifyResult();
}
return result;
}
public ClassifyResult predict(Bitmap ARGB8888Bitmap,
boolean rendering,
float scoreThreshold) {
if (mCxxContext == 0) {
return new ClassifyResult();
}
// Only support ARGB8888 bitmap in native now.
ClassifyResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", rendering, scoreThreshold);
if (result == null) {
return new ClassifyResult();
}
return result;
} }
// Predict with image saving and bitmap rendering (will cost more times) // Predict with image saving and bitmap rendering (will cost more times)
@@ -91,13 +108,18 @@ public class PaddleClasModel {
String savedImagePath, String savedImagePath,
float scoreThreshold) { float scoreThreshold) {
// scoreThreshold is for visualizing only. // scoreThreshold is for visualizing only.
if (mNativeModelContext == 0) { if (mCxxContext == 0) {
return new ClassifyResult(); return new ClassifyResult();
} }
// Only support ARGB8888 bitmap in native now. // Only support ARGB8888 bitmap in native now.
return new ClassifyResult(predictNative( ClassifyResult result = predictNative(
mNativeModelContext, ARGB8888Bitmap, true, mCxxContext, ARGB8888Bitmap,
savedImagePath, scoreThreshold, true)); true, savedImagePath, true,
scoreThreshold);
if (result == null) {
return new ClassifyResult();
}
return result;
} }
// Internal init_ method // Internal init_ method
@@ -105,34 +127,28 @@ public class PaddleClasModel {
String paramsFile, String paramsFile,
String configFile, String configFile,
String labelFile, String labelFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
if (!mInitialized) { if (!mInitialized) {
mNativeModelContext = bindNative( mCxxContext = bindNative(
modelFile, modelFile,
paramsFile, paramsFile,
configFile, configFile,
option.mCpuThreadNum, runtimeOption,
option.mEnableLiteFp16, labelFile);
option.mLitePowerMode.ordinal(), if (mCxxContext != 0) {
option.mLiteOptimizedModelDir,
option.mEnableRecordTimeOfRuntime, labelFile);
if (mNativeModelContext != 0) {
mInitialized = true; mInitialized = true;
} }
return mInitialized; return mInitialized;
} else { } else {
// release current native context and bind a new one. // release current native context and bind a new one.
if (release()) { if (release()) {
mNativeModelContext = bindNative( mCxxContext = bindNative(
modelFile, modelFile,
paramsFile, paramsFile,
configFile, configFile,
option.mCpuThreadNum, runtimeOption,
option.mEnableLiteFp16, labelFile);
option.mLitePowerMode.ordinal(), if (mCxxContext != 0) {
option.mLiteOptimizedModelDir,
option.mEnableRecordTimeOfRuntime, labelFile);
if (mNativeModelContext != 0) {
mInitialized = true; mInitialized = true;
} }
return mInitialized; return mInitialized;
@@ -146,23 +162,19 @@ public class PaddleClasModel {
private native long bindNative(String modelFile, private native long bindNative(String modelFile,
String paramsFile, String paramsFile,
String configFile, String configFile,
int cpuNumThread, RuntimeOption runtimeOption,
boolean enableLiteFp16,
int litePowerMode,
String liteOptimizedModelDir,
boolean enableRecordTimeOfRuntime,
String labelFile); String labelFile);
// Call prediction from native context. // Call prediction from native context with rendering.
private native long predictNative(long nativeModelContext, private native ClassifyResult predictNative(long CxxContext,
Bitmap ARGB8888Bitmap, Bitmap ARGB8888Bitmap,
boolean saved, boolean saveImage,
String savedImagePath, String savePath,
float scoreThreshold, boolean rendering,
boolean rendering); float scoreThreshold);
// Release buffers allocated in native context. // Release buffers allocated in native context.
private native boolean releaseNative(long nativeModelContext); private native boolean releaseNative(long CxxContext);
// Initializes at the beginning. // Initializes at the beginning.
static { static {

View File

@@ -7,7 +7,7 @@ import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.vision.DetectionResult; import com.baidu.paddle.fastdeploy.vision.DetectionResult;
public class PicoDet { public class PicoDet {
protected long mNativeModelContext = 0; // Context from native. protected long mCxxContext = 0; // Context from native.
protected boolean mInitialized = false; protected boolean mInitialized = false;
public PicoDet() { public PicoDet() {
@@ -32,8 +32,8 @@ public class PicoDet {
public PicoDet(String modelFile, public PicoDet(String modelFile,
String paramsFile, String paramsFile,
String configFile, String configFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, configFile, "", option); init_(modelFile, paramsFile, configFile, "", runtimeOption);
} }
// Constructor with label file // Constructor with label file
@@ -41,16 +41,16 @@ public class PicoDet {
String paramsFile, String paramsFile,
String configFile, String configFile,
String labelFile, String labelFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, configFile, labelFile, option); init_(modelFile, paramsFile, configFile, labelFile, runtimeOption);
} }
// Call init manually without label file // Call init manually without label file
public boolean init(String modelFile, public boolean init(String modelFile,
String paramsFile, String paramsFile,
String configFile, String configFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, configFile, "", option); return init_(modelFile, paramsFile, configFile, "", runtimeOption);
} }
// Call init manually with label file // Call init manually with label file
@@ -58,16 +58,16 @@ public class PicoDet {
String paramsFile, String paramsFile,
String configFile, String configFile,
String labelFile, String labelFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, configFile, labelFile, option); return init_(modelFile, paramsFile, configFile, labelFile, runtimeOption);
} }
public boolean release() { public boolean release() {
mInitialized = false; mInitialized = false;
if (mNativeModelContext == 0) { if (mCxxContext == 0) {
return false; return false;
} }
return releaseNative(mNativeModelContext); return releaseNative(mCxxContext);
} }
public boolean initialized() { public boolean initialized() {
@@ -76,13 +76,31 @@ public class PicoDet {
// Predict without image saving and bitmap rendering. // Predict without image saving and bitmap rendering.
public DetectionResult predict(Bitmap ARGB8888Bitmap) { public DetectionResult predict(Bitmap ARGB8888Bitmap) {
if (mNativeModelContext == 0) { if (mCxxContext == 0) {
return new DetectionResult(); return new DetectionResult();
} }
// Only support ARGB8888 bitmap in native now. // Only support ARGB8888 bitmap in native now.
return new DetectionResult(predictNative( DetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
mNativeModelContext, ARGB8888Bitmap, false, false, "", false, 0.f);
"", 0.f, false)); if (result == null) {
return new DetectionResult();
}
return result;
}
public DetectionResult predict(Bitmap ARGB8888Bitmap,
boolean rendering,
float scoreThreshold) {
if (mCxxContext == 0) {
return new DetectionResult();
}
// Only support ARGB8888 bitmap in native now.
DetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", rendering, scoreThreshold);
if (result == null) {
return new DetectionResult();
}
return result;
} }
// Predict with image saving and bitmap rendering (will cost more times) // Predict with image saving and bitmap rendering (will cost more times)
@@ -90,48 +108,45 @@ public class PicoDet {
String savedImagePath, String savedImagePath,
float scoreThreshold) { float scoreThreshold) {
// scoreThreshold is for visualizing only. // scoreThreshold is for visualizing only.
if (mNativeModelContext == 0) { if (mCxxContext == 0) {
return new DetectionResult(); return new DetectionResult();
} }
// Only support ARGB8888 bitmap in native now. // Only support ARGB8888 bitmap in native now.
return new DetectionResult(predictNative( DetectionResult result = predictNative(
mNativeModelContext, ARGB8888Bitmap, true, mCxxContext, ARGB8888Bitmap, true,
savedImagePath, scoreThreshold, true)); savedImagePath, true, scoreThreshold);
if (result == null) {
return new DetectionResult();
}
return result;
} }
private boolean init_(String modelFile, private boolean init_(String modelFile,
String paramsFile, String paramsFile,
String configFile, String configFile,
String labelFile, String labelFile,
RuntimeOption option) { RuntimeOption runtimeOption) {
if (!mInitialized) { if (!mInitialized) {
mNativeModelContext = bindNative( mCxxContext = bindNative(
modelFile, modelFile,
paramsFile, paramsFile,
configFile, configFile,
option.mCpuThreadNum, runtimeOption,
option.mEnableLiteFp16, labelFile);
option.mLitePowerMode.ordinal(), if (mCxxContext != 0) {
option.mLiteOptimizedModelDir,
option.mEnableRecordTimeOfRuntime, labelFile);
if (mNativeModelContext != 0) {
mInitialized = true; mInitialized = true;
} }
return mInitialized; return mInitialized;
} else { } else {
// release current native context and bind a new one. // release current native context and bind a new one.
if (release()) { if (release()) {
mNativeModelContext = bindNative( mCxxContext = bindNative(
modelFile, modelFile,
paramsFile, paramsFile,
configFile, configFile,
option.mCpuThreadNum, runtimeOption,
option.mEnableLiteFp16, labelFile);
option.mLitePowerMode.ordinal(), if (mCxxContext != 0) {
option.mLiteOptimizedModelDir,
option.mEnableRecordTimeOfRuntime, labelFile);
if (mNativeModelContext != 0) {
mInitialized = true; mInitialized = true;
} }
return mInitialized; return mInitialized;
@@ -144,23 +159,19 @@ public class PicoDet {
private native long bindNative(String modelFile, private native long bindNative(String modelFile,
String paramsFile, String paramsFile,
String configFile, String configFile,
int cpuNumThread, RuntimeOption runtimeOption,
boolean enableLiteFp16,
int litePowerMode,
String liteOptimizedModelDir,
boolean enableRecordTimeOfRuntime,
String labelFile); String labelFile);
// Call prediction from native context. // Call prediction from native context with rendering.
private native long predictNative(long nativeModelContext, private native DetectionResult predictNative(long CxxContext,
Bitmap ARGB8888Bitmap, Bitmap ARGB8888Bitmap,
boolean saved, boolean saveImage,
String savedImagePath, String savePath,
float scoreThreshold, boolean rendering,
boolean rendering); float scoreThreshold);
// Release buffers allocated in native context. // Release buffers allocated in native context.
private native boolean releaseNative(long nativeModelContext); private native boolean releaseNative(long CxxContext);
// Initializes at the beginning. // Initializes at the beginning.
static { static {

View File

@@ -0,0 +1,142 @@
package com.baidu.paddle.fastdeploy.vision.facedet;
import android.graphics.Bitmap;
import com.baidu.paddle.fastdeploy.FastDeployInitializer;
import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.vision.FaceDetectionResult;
public class SCRFD {
// TODO(qiuyanjun): support more option, 'use_kps', 'landmarks_per_face' etc
public int[] mSize = {320, 320}; // H x W
protected long mCxxContext = 0; // Context from native.
protected boolean mInitialized = false;
public SCRFD() {
mInitialized = false;
}
// Constructor with default runtime option
public SCRFD(String modelFile,
String paramsFile) {
init_(modelFile, paramsFile, new RuntimeOption());
}
// Constructor with custom runtime option
public SCRFD(String modelFile,
String paramsFile,
RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, runtimeOption);
}
public boolean init(String modelFile,
String paramsFile,
RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, runtimeOption);
}
public boolean release() {
mInitialized = false;
if (mCxxContext == 0) {
return false;
}
return releaseNative(mCxxContext);
}
public boolean initialized() {
return mInitialized;
}
// Predict without image saving and bitmap rendering.
public FaceDetectionResult predict(Bitmap ARGB8888Bitmap) {
if (mCxxContext == 0) {
return new FaceDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
FaceDetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", false);
if (result == null) {
return new FaceDetectionResult();
}
return result;
}
public FaceDetectionResult predict(Bitmap ARGB8888Bitmap,
boolean rendering) {
if (mCxxContext == 0) {
return new FaceDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
FaceDetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", rendering);
if (result == null) {
return new FaceDetectionResult();
}
return result;
}
// Predict with image saving and bitmap rendering (will cost more times)
public FaceDetectionResult predict(Bitmap ARGB8888Bitmap,
String savedImagePath) {
// scoreThreshold is for visualizing only.
if (mCxxContext == 0) {
return new FaceDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
FaceDetectionResult result = predictNative(
mCxxContext, ARGB8888Bitmap, true,
savedImagePath, true);
if (result == null) {
return new FaceDetectionResult();
}
return result;
}
private boolean init_(String modelFile,
String paramsFile,
RuntimeOption runtimeOption) {
if (!mInitialized) {
mCxxContext = bindNative(
modelFile,
paramsFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
} else {
// release current native context and bind a new one.
if (release()) {
mCxxContext = bindNative(
modelFile,
paramsFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
}
return false;
}
}
// Bind predictor from native context.
private native long bindNative(String modelFile,
String paramsFile,
RuntimeOption runtimeOption);
// Call prediction from native context with rendering.
private native FaceDetectionResult predictNative(long CxxContext,
Bitmap ARGB8888Bitmap,
boolean saveImage,
String savePath,
boolean rendering);
// Release buffers allocated in native context.
private native boolean releaseNative(long CxxContext);
// Initializes at the beginning.
static {
FastDeployInitializer.init();
}
}

View File

@@ -0,0 +1,141 @@
package com.baidu.paddle.fastdeploy.vision.facedet;
import android.graphics.Bitmap;
import com.baidu.paddle.fastdeploy.FastDeployInitializer;
import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.vision.FaceDetectionResult;
public class YOLOv5Face {
public int[] mSize = {320, 320};
protected long mCxxContext = 0; // Context from native.
protected boolean mInitialized = false;
public YOLOv5Face() {
mInitialized = false;
}
// Constructor with default runtime option
public YOLOv5Face(String modelFile,
String paramsFile) {
init_(modelFile, paramsFile, new RuntimeOption());
}
// Constructor with custom runtime option
public YOLOv5Face(String modelFile,
String paramsFile,
RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, runtimeOption);
}
public boolean init(String modelFile,
String paramsFile,
RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, runtimeOption);
}
public boolean release() {
mInitialized = false;
if (mCxxContext == 0) {
return false;
}
return releaseNative(mCxxContext);
}
public boolean initialized() {
return mInitialized;
}
// Predict without image saving and bitmap rendering.
public FaceDetectionResult predict(Bitmap ARGB8888Bitmap) {
if (mCxxContext == 0) {
return new FaceDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
FaceDetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", false);
if (result == null) {
return new FaceDetectionResult();
}
return result;
}
public FaceDetectionResult predict(Bitmap ARGB8888Bitmap,
boolean rendering) {
if (mCxxContext == 0) {
return new FaceDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
FaceDetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", rendering);
if (result == null) {
return new FaceDetectionResult();
}
return result;
}
// Predict with image saving and bitmap rendering (will cost more times)
public FaceDetectionResult predict(Bitmap ARGB8888Bitmap,
String savedImagePath) {
// scoreThreshold is for visualizing only.
if (mCxxContext == 0) {
return new FaceDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
FaceDetectionResult result = predictNative(
mCxxContext, ARGB8888Bitmap, true,
savedImagePath, true);
if (result == null) {
return new FaceDetectionResult();
}
return result;
}
private boolean init_(String modelFile,
String paramsFile,
RuntimeOption runtimeOption) {
if (!mInitialized) {
mCxxContext = bindNative(
modelFile,
paramsFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
} else {
// release current native context and bind a new one.
if (release()) {
mCxxContext = bindNative(
modelFile,
paramsFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
}
return false;
}
}
// Bind predictor from native context.
private native long bindNative(String modelFile,
String paramsFile,
RuntimeOption runtimeOption);
// Call prediction from native context with rendering.
private native FaceDetectionResult predictNative(long CxxContext,
Bitmap ARGB8888Bitmap,
boolean saveImage,
String savePath,
boolean rendering);
// Release buffers allocated in native context.
private native boolean releaseNative(long CxxContext);
// Initializes at the beginning.
static {
FastDeployInitializer.init();
}
}

View File

@@ -0,0 +1,150 @@
package com.baidu.paddle.fastdeploy.vision.segmentation;
import android.graphics.Bitmap;
import com.baidu.paddle.fastdeploy.FastDeployInitializer;
import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.vision.SegmentationResult;
public class PaddleSegModel {
protected long mCxxContext = 0; // Context from native.
protected boolean mInitialized = false;
public PaddleSegModel() {
mInitialized = false;
}
// Constructor with default runtime option
public PaddleSegModel(String modelFile,
String paramsFile,
String configFile) {
init_(modelFile, paramsFile, configFile, new RuntimeOption());
}
// Constructor with custom runtime option
public PaddleSegModel(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, configFile, runtimeOption);
}
public boolean init(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, configFile, runtimeOption);
}
public boolean release() {
mInitialized = false;
if (mCxxContext == 0) {
return false;
}
return releaseNative(mCxxContext);
}
public boolean initialized() {
return mInitialized;
}
// Predict without image saving and bitmap rendering.
public SegmentationResult predict(Bitmap ARGB8888Bitmap) {
if (mCxxContext == 0) {
return new SegmentationResult();
}
// Only support ARGB8888 bitmap in native now.
SegmentationResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", false, 0.5f);
if (result == null) {
return new SegmentationResult();
}
return result;
}
public SegmentationResult predict(Bitmap ARGB8888Bitmap,
boolean rendering,
float weight) {
if (mCxxContext == 0) {
return new SegmentationResult();
}
// Only support ARGB8888 bitmap in native now.
SegmentationResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", rendering, weight);
if (result == null) {
return new SegmentationResult();
}
return result;
}
// Predict with image saving and bitmap rendering (will cost more times)
public SegmentationResult predict(Bitmap ARGB8888Bitmap,
String savedImagePath,
float weight) {
// scoreThreshold is for visualizing only.
if (mCxxContext == 0) {
return new SegmentationResult();
}
// Only support ARGB8888 bitmap in native now.
SegmentationResult result = predictNative(
mCxxContext, ARGB8888Bitmap, true,
savedImagePath,true, weight);
if (result == null) {
return new SegmentationResult();
}
return result;
}
private boolean init_(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
if (!mInitialized) {
mCxxContext = bindNative(
modelFile,
paramsFile,
configFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
} else {
// release current native context and bind a new one.
if (release()) {
mCxxContext = bindNative(
modelFile,
paramsFile,
configFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
}
return false;
}
}
// Bind predictor from native context.
private native long bindNative(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption);
// Call prediction from native context with rendering.
private native SegmentationResult predictNative(long CxxContext,
Bitmap ARGB8888Bitmap,
boolean saveImage,
String savePath,
boolean rendering,
float weight);
// Release buffers allocated in native context.
private native boolean releaseNative(long CxxContext);
// Initializes at the beginning.
static {
FastDeployInitializer.init();
}
}