[Android] Support PPTinyPose on Android (#746)

* [Android] Update ppseg jni via new api and optimize jni vis funcs

* delete local refs

* [Android] Add PPTinyPose jni and java api

* [Android] Update gradle download tasks info

* [Android] Add PPTinyPose Android app example

* update app build.gradle
This commit is contained in:
DefTruth
2022-11-30 16:29:20 +08:00
committed by GitHub
parent 9d78b1d414
commit 8e4a38ce21
27 changed files with 1649 additions and 50 deletions

View File

@@ -56,7 +56,7 @@ def FD_CXX_LIB = [
task downloadAndExtractLibs(type: DefaultTask) {
doFirst {
println "Downloading and extracting fastdeploy android c++ lib ..."
println "[INFO] Downloading and extracting fastdeploy android c++ lib ..."
}
doLast {
String cachePath = "cache"
@@ -69,15 +69,18 @@ task downloadAndExtractLibs(type: DefaultTask) {
libName = libName.split("\\.")[0]
boolean copyFiles = !file("${lib.dest}/${libName}").exists()
if (!file("${cachePath}/${libName}.tgz").exists()) {
println "Downloading ${lib.src} -> ${cachePath}/${libName}.tgz"
println "[INFO] Downloading ${lib.src} -> ${cachePath}/${libName}.tgz"
ant.get(src: lib.src, dest: file("${cachePath}/${libName}.tgz"))
copyFiles = true
}
if (copyFiles) {
println "[INFO] Taring ${cachePath}/${libName}.tgz -> ${lib.dest}/${libName}"
copy {
from tarTree("${cachePath}/${libName}.tgz")
into "${lib.dest}"
}
} else {
println "[INFO] ${lib.dest}/${libName} already exists!"
}
}
}

View File

@@ -43,6 +43,8 @@ add_library(
fastdeploy_jni/vision/facedet/scrfd_jni.cc
fastdeploy_jni/vision/facedet/yolov5face_jni.cc
fastdeploy_jni/vision/facedet/facedet_utils_jni.cc
fastdeploy_jni/vision/keypointdetection/pptinypose_jni.cc
fastdeploy_jni/vision/keypointdetection/keypointdetection_utils_jni.cc
)
# Searches for a specified prebuilt library and stores the path as a

View File

@@ -19,6 +19,33 @@
namespace fastdeploy {
namespace jni {
cv::Mat CreateZeroCopyRGBAFromBitmap(JNIEnv *env, jobject j_argb8888_bitmap) {
cv::Mat c_rgba;
AndroidBitmapInfo j_bitmap_info;
if (AndroidBitmap_getInfo(env, j_argb8888_bitmap, &j_bitmap_info) < 0) {
LOGE("Invoke AndroidBitmap_getInfo() failed!");
return c_rgba;
}
if (j_bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Only Bitmap.Config.ARGB8888 color format is supported!");
return c_rgba;
}
void *j_bitmap_pixels;
if (AndroidBitmap_lockPixels(env, j_argb8888_bitmap, &j_bitmap_pixels) < 0) {
LOGE("Invoke AndroidBitmap_lockPixels() failed!");
return c_rgba;
}
cv::Mat j_bitmap_im(static_cast<int>(j_bitmap_info.height),
static_cast<int>(j_bitmap_info.width), CV_8UC4,
j_bitmap_pixels); // no copied.
c_rgba = j_bitmap_im; // ref only.
if (AndroidBitmap_unlockPixels(env, j_argb8888_bitmap) < 0) {
LOGE("Invoke AndroidBitmap_unlockPixels() failed!");
return c_rgba;
}
return c_rgba;
}
jboolean ARGB888Bitmap2RGBA(JNIEnv *env, jobject j_argb8888_bitmap,
cv::Mat *c_rgba) {
// Convert the android bitmap(ARGB8888) to the OpenCV RGBA image. Actually,
@@ -26,26 +53,8 @@ jboolean ARGB888Bitmap2RGBA(JNIEnv *env, jobject j_argb8888_bitmap,
// so it is unnecessary to do the conversion of color format, check
// https://developer.android.com/reference/android/graphics/Bitmap.Config#ARGB_8888
// to get the more details about Bitmap.Config.ARGB8888
AndroidBitmapInfo j_bitmap_info;
if (AndroidBitmap_getInfo(env, j_argb8888_bitmap, &j_bitmap_info) < 0) {
LOGE("Invoke AndroidBitmap_getInfo() failed!");
return JNI_FALSE;
}
if (j_bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Only Bitmap.Config.ARGB8888 color format is supported!");
return JNI_FALSE;
}
void *j_bitmap_pixels;
if (AndroidBitmap_lockPixels(env, j_argb8888_bitmap, &j_bitmap_pixels) < 0) {
LOGE("Invoke AndroidBitmap_lockPixels() failed!");
return JNI_FALSE;
}
cv::Mat j_bitmap_im(static_cast<int>(j_bitmap_info.height),
static_cast<int>(j_bitmap_info.width), CV_8UC4,
j_bitmap_pixels);
j_bitmap_im.copyTo(*(c_rgba));
if (AndroidBitmap_unlockPixels(env, j_argb8888_bitmap) < 0) {
LOGE("Invoke AndroidBitmap_unlockPixels() failed!");
*c_rgba = CreateZeroCopyRGBAFromBitmap(env, j_argb8888_bitmap);
if (c_rgba->empty()) {
return JNI_FALSE;
}
return JNI_TRUE;
@@ -57,6 +66,8 @@ jboolean ARGB888Bitmap2BGR(JNIEnv *env, jobject j_argb8888_bitmap,
if (!ARGB888Bitmap2RGBA(env, j_argb8888_bitmap, &c_rgba)) {
return JNI_FALSE;
}
// TODO: Use the neon instruction to optimize this conversion.
// COLOR_RGBA2BGR will allocate memories for new mat.
cv::cvtColor(c_rgba, *(c_bgr), cv::COLOR_RGBA2BGR);
return JNI_TRUE;
}
@@ -73,9 +84,11 @@ jboolean RGBA2ARGB888Bitmap(JNIEnv *env, jobject j_argb8888_bitmap,
LOGE("Invoke AndroidBitmap_lockPixels() failed!");
return JNI_FALSE;
}
// no copied, but point to bitmap data.
cv::Mat j_bitmap_im(static_cast<int>(j_bitmap_info.height),
static_cast<int>(j_bitmap_info.width), CV_8UC4,
j_bitmap_pixels);
// TODO: Use zero copy operation or neon to boost performance.
c_rgba.copyTo(j_bitmap_im);
if (AndroidBitmap_unlockPixels(env, j_argb8888_bitmap) < 0) {
LOGE("Invoke AndroidBitmap_unlockPixels() failed!");

View File

@@ -34,6 +34,8 @@ fastdeploy::RuntimeOption NewCxxRuntimeOption(
j_runtime_option_clazz, "mCpuThreadNum", "I");
const jfieldID j_enable_lite_fp16_id = env->GetFieldID(
j_runtime_option_clazz, "mEnableLiteFp16", "Z");
const jfieldID j_enable_lite_int8_id = env->GetFieldID(
j_runtime_option_clazz, "mEnableLiteInt8", "Z");
const jfieldID j_lite_power_mode_id = env->GetFieldID(
j_runtime_option_clazz, "mLitePowerMode",
"Lcom/baidu/paddle/fastdeploy/LitePowerMode;");
@@ -59,6 +61,8 @@ fastdeploy::RuntimeOption NewCxxRuntimeOption(
j_runtime_option_obj, j_cpu_num_thread_id);
jboolean j_enable_lite_fp16 = env->GetBooleanField(
j_runtime_option_obj, j_enable_lite_fp16_id);
jboolean j_enable_lite_int8 = env->GetBooleanField(
j_runtime_option_obj, j_enable_lite_int8_id);
jstring j_lite_optimized_model_dir = static_cast<jstring>(
env->GetObjectField(j_runtime_option_obj, j_lite_optimized_model_dir_id));
jobject j_lite_power_mode_obj = env->GetObjectField(
@@ -68,6 +72,7 @@ fastdeploy::RuntimeOption NewCxxRuntimeOption(
int c_cpu_num_thread = static_cast<int>(j_cpu_num_thread);
bool c_enable_lite_fp16 = static_cast<bool>(j_enable_lite_fp16);
bool c_enable_lite_int8 = static_cast<bool>(j_enable_lite_int8);
fastdeploy::LitePowerMode c_lite_power_mode =
static_cast<fastdeploy::LitePowerMode>(j_lite_power_mode);
std::string c_lite_optimized_model_dir =
@@ -80,6 +85,9 @@ fastdeploy::RuntimeOption NewCxxRuntimeOption(
if (c_enable_lite_fp16) {
c_runtime_option.EnableLiteFP16();
}
if (c_enable_lite_int8) {
c_runtime_option.EnableLiteInt8();
}
env->DeleteLocalRef(j_runtime_option_clazz);
env->DeleteLocalRef(j_lite_power_mode_clazz);

View File

@@ -0,0 +1,45 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/vision/keypointdetection/keypointdetection_utils_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
/// Rendering KeyPointDetectionResult to ARGB888Bitmap
void RenderingKeyPointDetection(
JNIEnv *env, const cv::Mat &c_bgr,
const vision::KeyPointDetectionResult &c_result,
jobject argb8888_bitmap, bool save_image, float conf_threshold,
jstring save_path) {
if (!c_result.keypoints.empty()) {
auto t = GetCurrentTime();
auto c_vis_im = vision::VisKeypointDetection(
c_bgr, c_result, conf_threshold);
LOGD("Visualize from native costs %f ms", GetElapsedTime(t));
if (!BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
LOGD("Write to bitmap from native failed!");
}
auto c_saved_image_path = ConvertTo<std::string>(env, save_path);
if (!c_saved_image_path.empty() && save_image) {
cv::imwrite(c_saved_image_path, c_vis_im);
}
}
}
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,32 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
#include "fastdeploy_jni/perf_jni.h" // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
void RenderingKeyPointDetection(
JNIEnv *env, const cv::Mat &c_bgr,
const vision::KeyPointDetectionResult &c_result,
jobject argb8888_bitmap, bool save_image,
float conf_threshold, jstring save_path);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,103 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
#include "fastdeploy_jni/vision/keypointdetection/keypointdetection_utils_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
namespace keypointdetection = fastdeploy::vision::keypointdetection;
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_keypointdetection_PPTinyPose_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jstring config_file, jobject runtime_option) {
auto c_model_file = fni::ConvertTo<std::string>(env, model_file);
auto c_params_file = fni::ConvertTo<std::string>(env, params_file);
auto c_config_file = fni::ConvertTo<std::string>(env, config_file);
auto c_runtime_option = fni::NewCxxRuntimeOption(env, runtime_option);
auto c_model_ptr = new keypointdetection::PPTinyPose(
c_model_file, c_params_file, c_config_file, c_runtime_option);
INITIALIZED_OR_RETURN(c_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_model_ptr->EnableRecordTimeOfRuntime();
#endif
// setup use_dark param
const jclass j_pptinypose_clazz = env->GetObjectClass(thiz);
const jfieldID j_use_dark_id = env->GetFieldID(
j_pptinypose_clazz, "mUseDark", "Z");
jboolean j_use_dark = env->GetBooleanField(thiz, j_use_dark_id);
const bool c_use_dark = static_cast<bool>(j_use_dark);
c_model_ptr->use_dark = c_use_dark;
env->DeleteLocalRef(j_pptinypose_clazz);
vision::EnableFlyCV();
return reinterpret_cast<jlong>(c_model_ptr);
}
JNIEXPORT jobject JNICALL
Java_com_baidu_paddle_fastdeploy_vision_keypointdetection_PPTinyPose_predictNative(
JNIEnv *env, jobject thiz, jlong cxx_context, jobject argb8888_bitmap,
jboolean save_image, jstring save_path, jboolean rendering,
jfloat conf_threshold) {
if (cxx_context == 0) {
return NULL;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return NULL;
}
auto c_model_ptr = reinterpret_cast<keypointdetection::PPTinyPose *>(cxx_context);
vision::KeyPointDetectionResult c_result;
auto t = fni::GetCurrentTime();
c_model_ptr->Predict(&c_bgr, &c_result);
PERF_TIME_OF_RUNTIME(c_model_ptr, t)
if (rendering) {
fni::RenderingKeyPointDetection(
env, c_bgr, c_result, argb8888_bitmap, save_image,
conf_threshold, save_path);
}
return fni::NewJavaResultFromCxx(env, reinterpret_cast<void *>(&c_result),
vision::ResultType::KEYPOINT_DETECTION);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_keypointdetection_PPTinyPose_releaseNative(
JNIEnv *env, jobject thiz, jlong cxx_context) {
if (cxx_context == 0) {
return JNI_FALSE;
}
auto c_model_ptr = reinterpret_cast<keypointdetection::PPTinyPose *>(cxx_context);
PERF_TIME_OF_RUNTIME(c_model_ptr, -1)
delete c_model_ptr;
LOGD("[End] Release PPTinyPose in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -438,9 +438,79 @@ bool AllocateJavaFaceDetectionResultFromCxx(
return true;
}
bool AllocateJavaResultFromCxx(
JNIEnv *env, jobject j_result_obj, void *cxx_result,
vision::ResultType type) {
bool AllocateJavaKeyPointDetectionResultFromCxx(
JNIEnv *env, jobject j_keypoint_det_result_obj, void *cxx_result) {
// WARN: Please make sure 'j_keypoint_det_result_obj' param
// is a ref of Java KeyPointDetectionResult.
// Field signatures of Java KeyPointDetectionResult:
// (1) mBoxes float[][] shape (n*num_joints,2): [[F
// (2) mScores float[] shape (n*num_joints): [F
// (3) mNumJoints int shape (1): I
// (4) mInitialized boolean: Z
// Docs: docs/api/vision_results/keypointdetection_result.md
if (cxx_result == nullptr) {
return false;
}
auto c_result_ptr = reinterpret_cast<vision::KeyPointDetectionResult *>(cxx_result);
const int len = static_cast<int>(c_result_ptr->keypoints.size());
if (len == 0) {
return false;
}
const jclass j_keypoint_det_result_clazz = env->FindClass(
"com/baidu/paddle/fastdeploy/vision/KeyPointDetectionResult");
const jclass j_keypoint_float_arr_clazz = env->FindClass("[F"); // (2,)
const jfieldID j_keypoint_det_keypoints_id = env->GetFieldID(
j_keypoint_det_result_clazz, "mKeyPoints", "[[F");
const jfieldID j_keypoint_det_scores_id = env->GetFieldID(
j_keypoint_det_result_clazz, "mScores", "[F");
const jfieldID j_keypoint_det_num_joints_id = env->GetFieldID(
j_keypoint_det_result_clazz, "mNumJoints", "I");
const jfieldID j_keypoint_det_initialized_id = env->GetFieldID(
j_keypoint_det_result_clazz, "mInitialized", "Z");
if (!env->IsInstanceOf(j_keypoint_det_result_obj, j_keypoint_det_result_clazz)) {
return false;
}
// mKeyPoints float[][] shape (n*num_joints,2): [[F
const auto &keypoints = c_result_ptr->keypoints;
jobjectArray j_keypoint_det_keypoints_float_arr =
env->NewObjectArray(len, j_keypoint_float_arr_clazz, NULL);
for (int i = 0; i < len; ++i) {
jfloatArray j_point = env->NewFloatArray(2);
env->SetFloatArrayRegion(j_point, 0, 2, keypoints.at(i).data());
env->SetObjectArrayElement(j_keypoint_det_keypoints_float_arr, i, j_point);
env->DeleteLocalRef(j_point);
}
// mScores float[] shape (n): [F
const auto &scores = c_result_ptr->scores;
const int score_len = scores.size();
jfloatArray j_keypoint_det_scores_float_arr = env->NewFloatArray(score_len);
env->SetFloatArrayRegion(j_keypoint_det_scores_float_arr, 0, score_len, scores.data());
// mNumJoints int shape (1): I
jint j_keypoint_det_num_joints = static_cast<jint>(c_result_ptr->num_joints);
// Set object fields
env->SetObjectField(j_keypoint_det_result_obj, j_keypoint_det_keypoints_id, j_keypoint_det_keypoints_float_arr);
env->SetObjectField(j_keypoint_det_result_obj, j_keypoint_det_scores_id, j_keypoint_det_scores_float_arr);
env->SetIntField(j_keypoint_det_result_obj, j_keypoint_det_num_joints_id, j_keypoint_det_num_joints);
env->SetBooleanField(j_keypoint_det_result_obj, j_keypoint_det_initialized_id, JNI_TRUE);
// Release local Refs
env->DeleteLocalRef(j_keypoint_det_keypoints_float_arr);
env->DeleteLocalRef(j_keypoint_det_scores_float_arr);
env->DeleteLocalRef(j_keypoint_det_result_clazz);
env->DeleteLocalRef(j_keypoint_float_arr_clazz);
return true;
}
bool AllocateJavaResultFromCxx(JNIEnv *env, jobject j_result_obj,
void *cxx_result, vision::ResultType type) {
if (type == vision::ResultType::CLASSIFY) {
return AllocateJavaClassifyResultFromCxx(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::DETECTION) {
@@ -451,6 +521,8 @@ bool AllocateJavaResultFromCxx(
return AllocateJavaSegmentationResultFromCxx(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::FACE_DETECTION) {
return AllocateJavaFaceDetectionResultFromCxx(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::KEYPOINT_DETECTION) {
return AllocateJavaKeyPointDetectionResultFromCxx(env, j_result_obj, cxx_result);
} else {
LOGE("Not support this ResultType in JNI now, type: %d",
static_cast<int>(type));
@@ -519,6 +591,18 @@ jobject NewJavaFaceDetectionResultFromCxx(JNIEnv *env, void *cxx_result) {
return j_face_det_result_obj;
}
jobject NewJavaKeyPointDetectionResultFromCxx(JNIEnv *env, void *cxx_result) {
const jclass j_keypoint_det_result_clazz = env->FindClass(
"com/baidu/paddle/fastdeploy/vision/KeyPointDetectionResult");
const jmethodID j_keypoint_det_result_init = env->GetMethodID(
j_keypoint_det_result_clazz, "<init>", "()V");
jobject j_keypoint_det_result_obj = env->NewObject(
j_keypoint_det_result_clazz, j_keypoint_det_result_init);
AllocateJavaKeyPointDetectionResultFromCxx(env, j_keypoint_det_result_obj, cxx_result);
env->DeleteLocalRef(j_keypoint_det_result_clazz);
return j_keypoint_det_result_obj;
}
jobject NewJavaResultFromCxx(
JNIEnv *env, void *cxx_result, vision::ResultType type) {
if (type == vision::ResultType::CLASSIFY) {
@@ -531,6 +615,8 @@ jobject NewJavaResultFromCxx(
return NewJavaSegmentationResultFromCxx(env, cxx_result);
} else if (type == vision::ResultType::FACE_DETECTION) {
return NewJavaFaceDetectionResultFromCxx(env, cxx_result);
} else if (type == vision::ResultType::KEYPOINT_DETECTION) {
return NewJavaKeyPointDetectionResultFromCxx(env, cxx_result);
} else {
LOGE("Not support this ResultType in JNI now, type: %d",
static_cast<int>(type));
@@ -1058,6 +1144,95 @@ bool AllocateFaceDetectionResultFromJava(
return true;
}
bool AllocateKeyPointDetectionResultFromJava(
JNIEnv *env, jobject j_keypoint_det_result_obj, void *cxx_result) {
// WARN: Please make sure 'j_keypoint_det_result_obj' param
// is a ref of Java KeyPointDetectionResult.
// Field signatures of Java KeyPointDetectionResult:
// (1) mBoxes float[][] shape (n*num_joints,2): [[F
// (2) mScores float[] shape (n*num_joints): [F
// (3) mNumJoints int shape (1): I
// (4) mInitialized boolean: Z
// Docs: docs/api/vision_results/keypointdetection_result.md
if (cxx_result == nullptr || j_keypoint_det_result_obj == nullptr) {
return false;
}
auto c_result_ptr = reinterpret_cast<vision::KeyPointDetectionResult *>(cxx_result);
const jclass j_keypoint_det_result_clazz_cc = env->FindClass(
"com/baidu/paddle/fastdeploy/vision/KeyPointDetectionResult");
const jfieldID j_keypoint_det_keypoints_id_cc = env->GetFieldID(
j_keypoint_det_result_clazz_cc, "mKeyPoints", "[[F");
const jfieldID j_keypoint_det_scores_id_cc = env->GetFieldID(
j_keypoint_det_result_clazz_cc, "mScores", "[F");
const jfieldID j_keypoint_det_num_joints_id_cc = env->GetFieldID(
j_keypoint_det_result_clazz_cc, "mNumJoints", "I");
const jfieldID j_keypoint_det_initialized_id_cc = env->GetFieldID(
j_keypoint_det_result_clazz_cc, "mInitialized", "Z");
if (!env->IsInstanceOf(j_keypoint_det_result_obj, j_keypoint_det_result_clazz_cc)) {
return false;
}
// mInitialized boolean: Z
jboolean j_keypoint_det_initialized =
env->GetBooleanField(j_keypoint_det_result_obj, j_keypoint_det_initialized_id_cc);
if (j_keypoint_det_initialized == JNI_FALSE) {
return false;
}
jobjectArray j_keypoint_det_keypoints_float_arr = reinterpret_cast<jobjectArray>(
env->GetObjectField(j_keypoint_det_result_obj, j_keypoint_det_keypoints_id_cc));
jfloatArray j_keypoint_det_scores_float_arr = reinterpret_cast<jfloatArray>(
env->GetObjectField(j_keypoint_det_result_obj, j_keypoint_det_scores_id_cc));
jint j_keypoint_det_num_joints = env->GetIntField(
j_keypoint_det_result_obj, j_keypoint_det_num_joints_id_cc);
int len = env->GetArrayLength(j_keypoint_det_keypoints_float_arr);
if ((len == 0) || (len != env->GetArrayLength(j_keypoint_det_scores_float_arr)) ||
(j_keypoint_det_num_joints < 0)) {
return false;
}
// Init Cxx result
c_result_ptr->Clear();
// mKeyPoints float[][] shape (n*num_joints,2): [[F
c_result_ptr->keypoints.resize(len);
bool c_check_validation = true;
for (int i = 0; i < len; ++i) {
auto j_point = reinterpret_cast<jfloatArray>(
env->GetObjectArrayElement(j_keypoint_det_keypoints_float_arr, i));
if (env->GetArrayLength(j_point) == 2) {
jfloat *j_point_ptr = env->GetFloatArrayElements(j_point, nullptr);
std::memcpy(c_result_ptr->keypoints[i].data(), j_point_ptr, 2 * sizeof(float));
env->ReleaseFloatArrayElements(j_point, j_point_ptr, 0);
} else {
c_check_validation = false;
break;
}
}
if (!c_check_validation) {
LOGE("The length of each detection box is not equal 2!");
return false;
}
// mScores float[] shape (n): [F
c_result_ptr->scores.resize(len);
jfloat *j_keypoint_det_scores_ptr =
env->GetFloatArrayElements(j_keypoint_det_scores_float_arr, nullptr);
std::memcpy(c_result_ptr->scores.data(), j_keypoint_det_scores_ptr, len * sizeof(float));
env->ReleaseFloatArrayElements(j_keypoint_det_scores_float_arr, j_keypoint_det_scores_ptr, 0);
// mNumJoints int shape (1): I
c_result_ptr->num_joints = static_cast<int>(j_keypoint_det_num_joints);
// Release local Refs
env->DeleteLocalRef(j_keypoint_det_result_clazz_cc);
return true;
}
bool AllocateCxxResultFromJava(
JNIEnv *env, jobject j_result_obj, void *cxx_result,
vision::ResultType type) {
@@ -1069,8 +1244,10 @@ bool AllocateCxxResultFromJava(
return AllocateOCRResultFromJava(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::SEGMENTATION) {
return AllocateSegmentationResultFromJava(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::FACE_DETECTION) {
} else if (type == vision::ResultType::FACE_DETECTION) {
return AllocateFaceDetectionResultFromJava(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::KEYPOINT_DETECTION) {
return AllocateKeyPointDetectionResultFromJava(env, j_result_obj, cxx_result);
} else {
LOGE("Not support this ResultType in JNI now, type: %d",
static_cast<int>(type));
@@ -1081,7 +1258,6 @@ bool AllocateCxxResultFromJava(
} // namespace jni
} // namespace fastdeploy
#ifdef __cplusplus
extern "C" {
#endif
@@ -1109,7 +1285,7 @@ Java_com_baidu_paddle_fastdeploy_vision_SegmentationResult_releaseCxxBufferNativ
auto c_result_ptr = reinterpret_cast<
fastdeploy::vision::SegmentationResult *>(j_cxx_buffer);
delete c_result_ptr;
LOGD("[End] Release SegmentationResult in native !");
LOGD("[End] Release SegmentationResult & CxxBuffer in native !");
env->SetBooleanField(thiz, j_seg_initialized_id, JNI_FALSE);
env->DeleteLocalRef(j_seg_result_clazz);

View File

@@ -42,13 +42,13 @@ Java_com_baidu_paddle_fastdeploy_vision_segmentation_PaddleSegModel_bindNative(
c_model_ptr->EnableRecordTimeOfRuntime();
#endif
// Setup is_vertical_screen param
// setup is_vertical_screen param
const jclass j_ppseg_clazz = env->GetObjectClass(thiz);
const jfieldID j_is_vertical_screen_id = env->GetFieldID(
j_ppseg_clazz, "mIsVerticalScreen", "Z");
jboolean j_is_vertical_screen = env->GetBooleanField(
thiz, j_is_vertical_screen_id);
bool c_is_vertical_screen = static_cast<jboolean>(j_is_vertical_screen);
const bool c_is_vertical_screen = static_cast<bool>(j_is_vertical_screen);
c_model_ptr->GetPreprocessor().SetIsVerticalScreen(c_is_vertical_screen);
env->DeleteLocalRef(j_ppseg_clazz);

View File

@@ -225,6 +225,33 @@ jboolean VisFaceDetectionFromJava(
return JNI_TRUE;
}
jboolean VisKeyPointDetectionFromJava(
JNIEnv *env, jobject argb8888_bitmap, jobject result,
jfloat conf_threshold) {
const jclass j_keypoint_det_result_clazz = env->FindClass(
"com/baidu/paddle/fastdeploy/vision/KeyPointDetectionResult");
if (!env->IsInstanceOf(result, j_keypoint_det_result_clazz)) {
env->DeleteLocalRef(j_keypoint_det_result_clazz);
return JNI_FALSE;
}
env->DeleteLocalRef(j_keypoint_det_result_clazz);
vision::KeyPointDetectionResult c_result;
if (!fni::AllocateCxxResultFromJava(
env, result, reinterpret_cast<void *>(&c_result),
vision::ResultType::KEYPOINT_DETECTION)) {
return JNI_FALSE;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
auto c_vis_im = vision::VisKeypointDetection(c_bgr, c_result, conf_threshold);
if (!fni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
} // jni
} // fastdeploy
@@ -283,8 +310,15 @@ Java_com_baidu_paddle_fastdeploy_vision_Visualize_visFaceDetectionNative(
line_size, font_size);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visKeyPointDetectionNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap,
jobject result, jfloat conf_threshold) {
return fni::VisKeyPointDetectionFromJava(env, argb8888_bitmap, result,
conf_threshold);
}
#ifdef __cplusplus
}
#endif

View File

@@ -3,12 +3,14 @@ package com.baidu.paddle.fastdeploy;
public class RuntimeOption {
public int mCpuThreadNum = 1;
public boolean mEnableLiteFp16 = false;
public boolean mEnableLiteInt8 = false;
public LitePowerMode mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND;
public String mLiteOptimizedModelDir = "";
public RuntimeOption() {
mCpuThreadNum = 1;
mEnableLiteFp16 = false;
mEnableLiteInt8 = false;
mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND;
mLiteOptimizedModelDir = "";
}
@@ -21,6 +23,14 @@ public class RuntimeOption {
mEnableLiteFp16 = false;
}
public void enableLiteInt8() {
mEnableLiteInt8 = true;
}
public void disableLiteInt8() {
mEnableLiteInt8 = false;
}
public void setCpuThreadNum(int threadNum) {
mCpuThreadNum = threadNum;
}

View File

@@ -0,0 +1,42 @@
package com.baidu.paddle.fastdeploy.vision;
import android.support.annotation.NonNull;
import java.util.Arrays;
public class KeyPointDetectionResult {
public float[][] mKeyPoints; // [n*num_joints, 2]
public float[] mScores; // [n*num_joints]
public int mNumJoints = -1;
public boolean mInitialized = false;
public KeyPointDetectionResult() {
mInitialized = false;
}
public boolean initialized() {
return mInitialized;
}
public void setKeyPoints(@NonNull float[] keyPointsBuffer) {
int pointNum = keyPointsBuffer.length / 2;
if (pointNum > 0) {
mKeyPoints = new float[pointNum][2];
for (int i = 0; i < pointNum; ++i) {
mKeyPoints[i] = Arrays.copyOfRange(
keyPointsBuffer, i * 2, (i + 1) * 2);
}
}
}
public void setScores(@NonNull float[] scoresBuffer) {
if (scoresBuffer.length > 0) {
mScores = scoresBuffer.clone();
}
}
public void setNumJoints(int numJoints) {
mNumJoints = numJoints;
}
}

View File

@@ -161,6 +161,24 @@ public class Visualize {
fontSize);
}
// Visualize KeyPointDetectionResult
public static boolean visKeypointDetection(Bitmap ARGB8888Bitmap,
KeyPointDetectionResult result) {
return visKeyPointDetectionNative(
ARGB8888Bitmap,
result,
0.5f);
}
public static boolean visKeypointDetection(Bitmap ARGB8888Bitmap,
KeyPointDetectionResult result,
float confThreshold) {
return visKeyPointDetectionNative(
ARGB8888Bitmap,
result,
confThreshold);
}
// VisDetection in native
private static native boolean visDetectionNative(Bitmap ARGB8888Bitmap,
DetectionResult result,
@@ -191,6 +209,11 @@ public class Visualize {
int lineSize,
float fontSize);
// VisKeypointDetection in native
private static native boolean visKeyPointDetectionNative(Bitmap ARGB8888Bitmap,
KeyPointDetectionResult result,
float confThreshold);
/* Initializes at the beginning */
static {
FastDeployInitializer.init();

View File

@@ -0,0 +1,156 @@
package com.baidu.paddle.fastdeploy.vision.keypointdetection;
import android.graphics.Bitmap;
import com.baidu.paddle.fastdeploy.FastDeployInitializer;
import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.vision.KeyPointDetectionResult;
public class PPTinyPose {
protected long mCxxContext = 0; // Context from native.
protected boolean mUseDark = true;
protected boolean mInitialized = false;
public PPTinyPose() {
mInitialized = false;
}
// Constructor with default runtime option
public PPTinyPose(String modelFile,
String paramsFile,
String configFile) {
init_(modelFile, paramsFile, configFile, new RuntimeOption());
}
// Constructor without label file
public PPTinyPose(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, configFile, runtimeOption);
}
public void setUseDark(boolean flag) {
mUseDark = flag;
}
// Call init manually without label file
public boolean init(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, configFile, runtimeOption);
}
public boolean release() {
mInitialized = false;
if (mCxxContext == 0) {
return false;
}
return releaseNative(mCxxContext);
}
public boolean initialized() {
return mInitialized;
}
// Predict without image saving and bitmap rendering.
public KeyPointDetectionResult predict(Bitmap ARGB8888Bitmap) {
if (mCxxContext == 0) {
return new KeyPointDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
KeyPointDetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", false, 0.f);
if (result == null) {
return new KeyPointDetectionResult();
}
return result;
}
public KeyPointDetectionResult predict(Bitmap ARGB8888Bitmap,
boolean rendering,
float confThreshold) {
if (mCxxContext == 0) {
return new KeyPointDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
KeyPointDetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", rendering, confThreshold);
if (result == null) {
return new KeyPointDetectionResult();
}
return result;
}
// Predict with image saving and bitmap rendering (will cost more times)
public KeyPointDetectionResult predict(Bitmap ARGB8888Bitmap,
String savedImagePath,
float confThreshold) {
// confThreshold is for visualizing only.
if (mCxxContext == 0) {
return new KeyPointDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
KeyPointDetectionResult result = predictNative(
mCxxContext, ARGB8888Bitmap, true,
savedImagePath, true, confThreshold);
if (result == null) {
return new KeyPointDetectionResult();
}
return result;
}
private boolean init_(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
if (!mInitialized) {
mCxxContext = bindNative(
modelFile,
paramsFile,
configFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
} else {
// release current native context and bind a new one.
if (release()) {
mCxxContext = bindNative(
modelFile,
paramsFile,
configFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
}
return false;
}
}
// Bind predictor from native context.
private native long bindNative(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption);
// Call prediction from native context with rendering.
private native KeyPointDetectionResult predictNative(long CxxContext,
Bitmap ARGB8888Bitmap,
boolean saveImage,
String savePath,
boolean rendering,
float confThreshold);
// Release buffers allocated in native context.
private native boolean releaseNative(long CxxContext);
// Initializes at the beginning.
static {
FastDeployInitializer.init();
}
}

View File

@@ -22,12 +22,6 @@ public class PaddleSegModel {
init_(modelFile, paramsFile, configFile, new RuntimeOption());
}
// Is vertical screen or not, for PP-HumanSeg on vertical screen,
// this flag must be 'true'.
public void setVerticalScreenFlag(boolean flag) {
mIsVerticalScreen = flag;
}
// Constructor with custom runtime option
public PaddleSegModel(String modelFile,
String paramsFile,
@@ -43,6 +37,17 @@ public class PaddleSegModel {
return init_(modelFile, paramsFile, configFile, runtimeOption);
}
// Deprecated. Please use setIsVerticalScreen instead.
public void setVerticalScreenFlag(boolean flag) {
mIsVerticalScreen = flag;
}
// Is vertical screen or not, for PP-HumanSeg on vertical screen,
// this flag must be 'true'.
public void setIsVerticalScreen(boolean flag) {
mIsVerticalScreen = flag;
}
public boolean release() {
mInitialized = false;
if (mCxxContext == 0) {