[Android] Support PPTinyPose on Android (#746)

* [Android] Update ppseg jni via new api and optimize jni vis funcs

* delete local refs

* [Android] Add PPTinyPose jni and java api

* [Android] Update gradle download tasks info

* [Android] Add PPTinyPose Android app example

* update app build.gradle
This commit is contained in:
DefTruth
2022-11-30 16:29:20 +08:00
committed by GitHub
parent 9d78b1d414
commit 8e4a38ce21
27 changed files with 1649 additions and 50 deletions

View File

@@ -19,7 +19,6 @@ android {
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
} }
} }
} }
dependencies { dependencies {
@@ -69,6 +68,10 @@ def FD_MODEL = [
[ [
'src' : 'https://bj.bcebos.com/paddlehub/fastdeploy/portrait_pp_humansegv2_lite_256x144_inference_model.tgz', 'src' : 'https://bj.bcebos.com/paddlehub/fastdeploy/portrait_pp_humansegv2_lite_256x144_inference_model.tgz',
'dest': 'src/main/assets/models' 'dest': 'src/main/assets/models'
],
[
'src' : 'https://bj.bcebos.com/paddlehub/fastdeploy/PP_TinyPose_128x96_infer.tgz',
'dest': 'src/main/assets/models'
] ]
] ]
@@ -81,7 +84,7 @@ def FD_JAVA_SDK = [
task downloadAndExtractModels(type: DefaultTask) { task downloadAndExtractModels(type: DefaultTask) {
doFirst { doFirst {
println "Downloading and extracting fastdeploy models ..." println "[INFO] Downloading and extracting fastdeploy models ..."
} }
doLast { doLast {
String cachePath = "cache" String cachePath = "cache"
@@ -91,19 +94,22 @@ task downloadAndExtractModels(type: DefaultTask) {
FD_MODEL.eachWithIndex { model, index -> FD_MODEL.eachWithIndex { model, index ->
String[] modelPaths = model.src.split("/") String[] modelPaths = model.src.split("/")
String modelName = modelPaths[modelPaths.length - 1] String modelName = modelPaths[modelPaths.length - 1]
String modelPrefix = modelName.substring(0, modelName.length() - 4)
// Download the target model if not exists // Download the target model if not exists
boolean copyFiles = !file("${model.dest}").exists() boolean copyFiles = !file("${model.dest}/${modelPrefix}").exists()
if (!file("${cachePath}/${modelName}").exists()) { if (!file("${cachePath}/${modelName}").exists()) {
println "Downloading ${model.src} -> ${cachePath}/${modelName}" println "[INFO] Downloading ${model.src} -> ${cachePath}/${modelName}"
ant.get(src: model.src, dest: file("${cachePath}/${modelName}")) ant.get(src: model.src, dest: file("${cachePath}/${modelName}"))
copyFiles = true copyFiles = true
} }
if (copyFiles) { if (copyFiles) {
println "Coping ${cachePath}/${modelName} -> ${model.dest}" println "[INFO] Taring ${cachePath}/${modelName} -> ${model.dest}/${modelPrefix}"
copy { copy {
from tarTree("${cachePath}/${modelName}") from tarTree("${cachePath}/${modelName}")
into "${model.dest}" into "${model.dest}"
} }
} else {
println "[INFO] ${model.dest}/${modelPrefix} already exists!"
} }
} }
} }
@@ -111,7 +117,7 @@ task downloadAndExtractModels(type: DefaultTask) {
task downloadAndExtractSDKs(type: DefaultTask) { task downloadAndExtractSDKs(type: DefaultTask) {
doFirst { doFirst {
println "Downloading and extracting fastdeploy android java sdk ..." println "[INFO] Downloading and extracting fastdeploy android java sdk ..."
} }
doLast { doLast {
String cachePath = "cache" String cachePath = "cache"
@@ -124,16 +130,18 @@ task downloadAndExtractSDKs(type: DefaultTask) {
// Download the target SDK if not exists // Download the target SDK if not exists
boolean copyFiles = !file("${sdk.dest}/${sdkName}").exists() boolean copyFiles = !file("${sdk.dest}/${sdkName}").exists()
if (!file("${cachePath}/${sdkName}").exists()) { if (!file("${cachePath}/${sdkName}").exists()) {
println "Downloading ${sdk.src} -> ${cachePath}/${sdkName}" println "[INFO] Downloading ${sdk.src} -> ${cachePath}/${sdkName}"
ant.get(src: sdk.src, dest: file("${cachePath}/${sdkName}")) ant.get(src: sdk.src, dest: file("${cachePath}/${sdkName}"))
copyFiles = true copyFiles = true
} }
if (copyFiles) { if (copyFiles) {
println "Coping ${cachePath}/${sdkName} -> ${sdk.dest}/${sdkName}" println "[INFO] Coping ${cachePath}/${sdkName} -> ${sdk.dest}/${sdkName}"
copy { copy {
from "${cachePath}/${sdkName}" from "${cachePath}/${sdkName}"
into "${sdk.dest}" into "${sdk.dest}"
} }
} else {
println "[INFO] ${sdk.dest}/${sdkName} already exists!"
} }
} }
} }

View File

@@ -15,14 +15,14 @@
android:roundIcon="@mipmap/ic_launcher_round" android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme"> android:theme="@style/AppTheme">
<activity android:name=".segmentation.SegmentationMainActivity"> <activity android:name=".keypointdetection.KeyPointDetectionMainActivity">
<intent-filter> <intent-filter>
<action android:name="android.intent.action.MAIN"/> <action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/> <category android:name="android.intent.category.LAUNCHER"/>
</intent-filter> </intent-filter>
</activity> </activity>
<activity <activity
android:name=".segmentation.SegmentationSettingsActivity" android:name=".keypointdetection.KeyPointDetectionSettingsActivity"
android:label="Settings"> android:label="Settings">
</activity> </activity>
</application> </application>

View File

@@ -0,0 +1,403 @@
package com.baidu.paddle.fastdeploy.app.examples.keypointdetection;
import static com.baidu.paddle.fastdeploy.ui.Utils.decodeBitmap;
import static com.baidu.paddle.fastdeploy.ui.Utils.getRealPathFromURI;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Bundle;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.app.examples.R;
import com.baidu.paddle.fastdeploy.ui.Utils;
import com.baidu.paddle.fastdeploy.ui.view.CameraSurfaceView;
import com.baidu.paddle.fastdeploy.ui.view.ResultListView;
import com.baidu.paddle.fastdeploy.ui.view.model.BaseResultModel;
import com.baidu.paddle.fastdeploy.vision.SegmentationResult;
import com.baidu.paddle.fastdeploy.vision.Visualize;
import com.baidu.paddle.fastdeploy.vision.KeyPointDetectionResult;
import com.baidu.paddle.fastdeploy.vision.keypointdetection.PPTinyPose;
import java.util.ArrayList;
import java.util.List;
public class KeyPointDetectionMainActivity extends Activity implements View.OnClickListener, CameraSurfaceView.OnTextureChangedListener {
private static final String TAG = KeyPointDetectionMainActivity.class.getSimpleName();
CameraSurfaceView svPreview;
TextView tvStatus;
ImageButton btnSwitch;
ImageButton btnShutter;
ImageButton btnSettings;
ImageView realtimeToggleButton;
boolean isRealtimeStatusRunning = false;
ImageView backInPreview;
private ImageView albumSelectButton;
private View cameraPageView;
private ViewGroup resultPageView;
private ImageView resultImage;
private ImageView backInResult;
private ResultListView resultView;
private Bitmap shutterBitmap;
private Bitmap picBitmap;
private boolean isShutterBitmapCopied = false;
public static final int TYPE_UNKNOWN = -1;
public static final int BTN_SHUTTER = 0;
public static final int ALBUM_SELECT = 1;
public static final int REALTIME_DETECT = 2;
private static int TYPE = REALTIME_DETECT;
private static final int REQUEST_PERMISSION_CODE_STORAGE = 101;
private static final int INTENT_CODE_PICK_IMAGE = 100;
private static final int TIME_SLEEP_INTERVAL = 50; // ms
long timeElapsed = 0;
long frameCounter = 0;
// Call 'init' and 'release' manually later
PPTinyPose predictor = new PPTinyPose();
private List<BaseResultModel> results = new ArrayList<>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Fullscreen
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.keypointdetection_activity_main);
// Clear all setting items to avoid app crashing due to the incorrect settings
initSettings();
// Check and request CAMERA and WRITE_EXTERNAL_STORAGE permissions
if (!checkAllPermissions()) {
requestAllPermissions();
}
// Init the camera preview and UI components
initView();
}
@SuppressLint("NonConstantResourceId")
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.btn_switch:
svPreview.switchCamera();
break;
case R.id.btn_shutter:
TYPE = BTN_SHUTTER;
shutterAndPauseCamera();
resultView.setAdapter(null);
break;
case R.id.btn_settings:
startActivity(new Intent(this, KeyPointDetectionSettingsActivity.class));
break;
case R.id.realtime_toggle_btn:
toggleRealtimeStyle();
break;
case R.id.back_in_preview:
finish();
break;
case R.id.album_select:
TYPE = ALBUM_SELECT;
// Judge whether authority has been granted.
if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
// If this permission was requested before the application but the user refused the request, this method will return true.
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_PERMISSION_CODE_STORAGE);
} else {
Intent intent = new Intent(Intent.ACTION_PICK);
intent.setType("image/*");
startActivityForResult(intent, INTENT_CODE_PICK_IMAGE);
}
resultView.setAdapter(null);
break;
case R.id.back_in_result:
back();
break;
}
}
@Override
public void onBackPressed() {
super.onBackPressed();
back();
}
private void back() {
resultPageView.setVisibility(View.GONE);
cameraPageView.setVisibility(View.VISIBLE);
TYPE = REALTIME_DETECT;
isShutterBitmapCopied = false;
svPreview.onResume();
results.clear();
}
private void shutterAndPauseCamera() {
new Thread(new Runnable() {
@Override
public void run() {
try {
// Sleep some times to ensure picture has been correctly shut.
Thread.sleep(TIME_SLEEP_INTERVAL * 10); // 500ms
} catch (InterruptedException e) {
e.printStackTrace();
}
runOnUiThread(new Runnable() {
@SuppressLint("SetTextI18n")
public void run() {
// These codes will run in main thread.
svPreview.onPause();
cameraPageView.setVisibility(View.GONE);
resultPageView.setVisibility(View.VISIBLE);
if (shutterBitmap != null && !shutterBitmap.isRecycled()) {
detail(shutterBitmap);
} else {
new AlertDialog.Builder(KeyPointDetectionMainActivity.this)
.setTitle("Empty Result!")
.setMessage("Current picture is empty, please shutting it again!")
.setCancelable(true)
.show();
}
}
});
}
}).start();
}
private void copyBitmapFromCamera(Bitmap ARGB8888ImageBitmap) {
if (isShutterBitmapCopied || ARGB8888ImageBitmap == null) {
return;
}
if (!ARGB8888ImageBitmap.isRecycled()) {
synchronized (this) {
shutterBitmap = ARGB8888ImageBitmap.copy(Bitmap.Config.ARGB_8888, true);
}
SystemClock.sleep(TIME_SLEEP_INTERVAL);
isShutterBitmapCopied = true;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == INTENT_CODE_PICK_IMAGE) {
if (resultCode == Activity.RESULT_OK) {
cameraPageView.setVisibility(View.GONE);
resultPageView.setVisibility(View.VISIBLE);
Uri uri = data.getData();
String path = getRealPathFromURI(this, uri);
Bitmap bitmap = decodeBitmap(path, 720, 1280);
picBitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
SystemClock.sleep(TIME_SLEEP_INTERVAL * 10); // 500ms
detail(picBitmap);
}
}
}
private void toggleRealtimeStyle() {
if (isRealtimeStatusRunning) {
isRealtimeStatusRunning = false;
realtimeToggleButton.setImageResource(R.drawable.realtime_stop_btn);
svPreview.setOnTextureChangedListener(this);
tvStatus.setVisibility(View.VISIBLE);
} else {
isRealtimeStatusRunning = true;
realtimeToggleButton.setImageResource(R.drawable.realtime_start_btn);
tvStatus.setVisibility(View.GONE);
isShutterBitmapCopied = false;
// Camera is still working but detecting loop is on pause.
svPreview.setOnTextureChangedListener(new CameraSurfaceView.OnTextureChangedListener() {
@Override
public boolean onTextureChanged(Bitmap ARGB8888ImageBitmap) {
if (TYPE == BTN_SHUTTER) {
copyBitmapFromCamera(ARGB8888ImageBitmap);
}
return false;
}
});
}
}
@Override
public boolean onTextureChanged(Bitmap ARGB8888ImageBitmap) {
if (TYPE == BTN_SHUTTER) {
copyBitmapFromCamera(ARGB8888ImageBitmap);
return false;
}
boolean modified = false;
long tc = System.currentTimeMillis();
KeyPointDetectionResult result = predictor.predict(ARGB8888ImageBitmap);
timeElapsed += (System.currentTimeMillis() - tc);
Visualize.visKeypointDetection(ARGB8888ImageBitmap, result, 0.f);
modified = result.initialized();
frameCounter++;
if (frameCounter >= 30) {
final int fps = (int) (1000 / (timeElapsed / 30));
runOnUiThread(new Runnable() {
@SuppressLint("SetTextI18n")
public void run() {
tvStatus.setText(Integer.toString(fps) + "fps");
}
});
frameCounter = 0;
timeElapsed = 0;
}
return modified;
}
@Override
protected void onResume() {
super.onResume();
// Reload settings and re-initialize the predictor
checkAndUpdateSettings();
// Open camera until the permissions have been granted
if (!checkAllPermissions()) {
svPreview.disableCamera();
} else {
svPreview.enableCamera();
}
svPreview.onResume();
}
@Override
protected void onPause() {
super.onPause();
svPreview.onPause();
}
@Override
protected void onDestroy() {
if (predictor != null) {
predictor.release();
}
super.onDestroy();
}
public void initView() {
TYPE = REALTIME_DETECT;
// (1) EXPECTED_PREVIEW_WIDTH should mean 'height' and EXPECTED_PREVIEW_HEIGHT
// should mean 'width' if the camera display orientation is 90 | 270 degree
// (Hold the phone upright to record video)
// (2) Smaller resolution is more suitable for Human Pose detection on mobile
// device. So, we set this preview size (720,480) here. Reference:
// https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.5/configs/keypoint/tiny_pose
CameraSurfaceView.EXPECTED_PREVIEW_WIDTH = 720;
CameraSurfaceView.EXPECTED_PREVIEW_HEIGHT = 480;
svPreview = (CameraSurfaceView) findViewById(R.id.sv_preview);
svPreview.setOnTextureChangedListener(this);
tvStatus = (TextView) findViewById(R.id.tv_status);
btnSwitch = (ImageButton) findViewById(R.id.btn_switch);
btnSwitch.setOnClickListener(this);
btnShutter = (ImageButton) findViewById(R.id.btn_shutter);
btnShutter.setOnClickListener(this);
btnSettings = (ImageButton) findViewById(R.id.btn_settings);
btnSettings.setOnClickListener(this);
realtimeToggleButton = findViewById(R.id.realtime_toggle_btn);
realtimeToggleButton.setOnClickListener(this);
backInPreview = findViewById(R.id.back_in_preview);
backInPreview.setOnClickListener(this);
albumSelectButton = findViewById(R.id.album_select);
albumSelectButton.setOnClickListener(this);
cameraPageView = findViewById(R.id.camera_page);
resultPageView = findViewById(R.id.result_page);
resultImage = findViewById(R.id.result_image);
backInResult = findViewById(R.id.back_in_result);
backInResult.setOnClickListener(this);
resultView = findViewById(R.id.result_list_view);
}
private void detail(Bitmap bitmap) {
predictor.predict(bitmap, true, 5.f);
resultImage.setImageBitmap(bitmap);
}
@SuppressLint("ApplySharedPref")
public void initSettings() {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.clear();
editor.commit();
KeyPointDetectionSettingsActivity.resetSettings();
}
public void checkAndUpdateSettings() {
if (KeyPointDetectionSettingsActivity.checkAndUpdateSettings(this)) {
String realModelDir = getCacheDir() + "/" + KeyPointDetectionSettingsActivity.modelDir;
Utils.copyDirectoryFromAssets(this, KeyPointDetectionSettingsActivity.modelDir, realModelDir);
String modelFile = realModelDir + "/" + "model.pdmodel";
String paramsFile = realModelDir + "/" + "model.pdiparams";
String configFile = realModelDir + "/" + "infer_cfg.yml";
RuntimeOption option = new RuntimeOption();
option.setCpuThreadNum(KeyPointDetectionSettingsActivity.cpuThreadNum);
option.setLitePowerMode(KeyPointDetectionSettingsActivity.cpuPowerMode);
if (Boolean.parseBoolean(KeyPointDetectionSettingsActivity.enableLiteFp16)) {
option.enableLiteFp16();
}
predictor.setUseDark(true);
predictor.init(modelFile, paramsFile, configFile, option);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (grantResults[0] != PackageManager.PERMISSION_GRANTED || grantResults[1] != PackageManager.PERMISSION_GRANTED) {
new AlertDialog.Builder(KeyPointDetectionMainActivity.this)
.setTitle("Permission denied")
.setMessage("Click to force quit the app, then open Settings->Apps & notifications->Target " +
"App->Permissions to grant all of the permissions.")
.setCancelable(false)
.setPositiveButton("Exit", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
KeyPointDetectionMainActivity.this.finish();
}
}).show();
}
}
private void requestAllPermissions() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.CAMERA}, 0);
}
private boolean checkAllPermissions() {
return ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED
&& ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED;
}
}

View File

@@ -0,0 +1,164 @@
package com.baidu.paddle.fastdeploy.app.examples.keypointdetection;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.EditTextPreference;
import android.preference.ListPreference;
import android.preference.PreferenceManager;
import android.support.v7.app.ActionBar;
import com.baidu.paddle.fastdeploy.app.examples.R;
import com.baidu.paddle.fastdeploy.ui.Utils;
import com.baidu.paddle.fastdeploy.ui.view.AppCompatPreferenceActivity;
import java.util.ArrayList;
import java.util.List;
public class KeyPointDetectionSettingsActivity extends AppCompatPreferenceActivity implements
SharedPreferences.OnSharedPreferenceChangeListener {
private static final String TAG = KeyPointDetectionSettingsActivity.class.getSimpleName();
static public int selectedModelIdx = -1;
static public String modelDir = "";
static public int cpuThreadNum = 2;
static public String cpuPowerMode = "";
static public String enableLiteFp16 = "true";
ListPreference lpChoosePreInstalledModel = null;
EditTextPreference etModelDir = null;
ListPreference lpCPUThreadNum = null;
ListPreference lpCPUPowerMode = null;
ListPreference lpEnableLiteFp16 = null;
List<String> preInstalledModelDirs = null;
List<String> preInstalledCPUThreadNums = null;
List<String> preInstalledCPUPowerModes = null;
List<String> preInstalledEnableLiteFp16s = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.keypointdetection_settting);
ActionBar supportActionBar = getSupportActionBar();
if (supportActionBar != null) {
supportActionBar.setDisplayHomeAsUpEnabled(true);
}
// Initialize pre-installed models
preInstalledModelDirs = new ArrayList<String>();
preInstalledCPUThreadNums = new ArrayList<String>();
preInstalledCPUPowerModes = new ArrayList<String>();
preInstalledEnableLiteFp16s = new ArrayList<String>();
preInstalledModelDirs.add(getString(R.string.KEYPOINT_DETECTION_MODEL_DIR_DEFAULT));
preInstalledCPUThreadNums.add(getString(R.string.CPU_THREAD_NUM_DEFAULT));
preInstalledCPUPowerModes.add(getString(R.string.CPU_POWER_MODE_DEFAULT));
preInstalledEnableLiteFp16s.add(getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT));
// Setup UI components
lpChoosePreInstalledModel =
(ListPreference) findPreference(getString(R.string.CHOOSE_PRE_INSTALLED_MODEL_KEY));
String[] preInstalledModelNames = new String[preInstalledModelDirs.size()];
for (int i = 0; i < preInstalledModelDirs.size(); i++) {
preInstalledModelNames[i] = preInstalledModelDirs.get(i).substring(preInstalledModelDirs.get(i).lastIndexOf("/") + 1);
}
lpChoosePreInstalledModel.setEntries(preInstalledModelNames);
lpChoosePreInstalledModel.setEntryValues(preInstalledModelDirs.toArray(new String[preInstalledModelDirs.size()]));
lpCPUThreadNum = (ListPreference) findPreference(getString(R.string.CPU_THREAD_NUM_KEY));
lpCPUPowerMode = (ListPreference) findPreference(getString(R.string.CPU_POWER_MODE_KEY));
etModelDir = (EditTextPreference) findPreference(getString(R.string.MODEL_DIR_KEY));
etModelDir.setTitle("Model dir (SDCard: " + Utils.getSDCardDirectory() + ")");
lpEnableLiteFp16 = (ListPreference) findPreference(getString(R.string.ENABLE_LITE_FP16_MODE_KEY));
}
@SuppressLint("ApplySharedPref")
private void reloadSettingsAndUpdateUI() {
SharedPreferences sharedPreferences = getPreferenceScreen().getSharedPreferences();
String selected_model_dir = sharedPreferences.getString(getString(R.string.CHOOSE_PRE_INSTALLED_MODEL_KEY),
getString(R.string.KEYPOINT_DETECTION_MODEL_DIR_DEFAULT));
int selected_model_idx = lpChoosePreInstalledModel.findIndexOfValue(selected_model_dir);
if (selected_model_idx >= 0 && selected_model_idx < preInstalledModelDirs.size() && selected_model_idx != selectedModelIdx) {
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(getString(R.string.MODEL_DIR_KEY), preInstalledModelDirs.get(selected_model_idx));
editor.putString(getString(R.string.CPU_THREAD_NUM_KEY), preInstalledCPUThreadNums.get(selected_model_idx));
editor.putString(getString(R.string.CPU_POWER_MODE_KEY), preInstalledCPUPowerModes.get(selected_model_idx));
editor.putString(getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT), preInstalledEnableLiteFp16s.get(selected_model_idx));
editor.commit();
lpChoosePreInstalledModel.setSummary(selected_model_dir);
selectedModelIdx = selected_model_idx;
}
String model_dir = sharedPreferences.getString(getString(R.string.MODEL_DIR_KEY),
getString(R.string.KEYPOINT_DETECTION_MODEL_DIR_DEFAULT));
String cpu_thread_num = sharedPreferences.getString(getString(R.string.CPU_THREAD_NUM_KEY),
getString(R.string.CPU_THREAD_NUM_DEFAULT));
String cpu_power_mode = sharedPreferences.getString(getString(R.string.CPU_POWER_MODE_KEY),
getString(R.string.CPU_POWER_MODE_DEFAULT));
String enable_lite_fp16 = sharedPreferences.getString(getString(R.string.ENABLE_LITE_FP16_MODE_KEY),
getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT));
etModelDir.setSummary(model_dir);
lpCPUThreadNum.setValue(cpu_thread_num);
lpCPUThreadNum.setSummary(cpu_thread_num);
lpCPUPowerMode.setValue(cpu_power_mode);
lpCPUPowerMode.setSummary(cpu_power_mode);
lpEnableLiteFp16.setValue(enable_lite_fp16);
lpEnableLiteFp16.setSummary(enable_lite_fp16);
}
static boolean checkAndUpdateSettings(Context ctx) {
boolean settingsChanged = false;
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(ctx);
String model_dir = sharedPreferences.getString(ctx.getString(R.string.MODEL_DIR_KEY),
ctx.getString(R.string.KEYPOINT_DETECTION_MODEL_DIR_DEFAULT));
settingsChanged |= !modelDir.equalsIgnoreCase(model_dir);
modelDir = model_dir;
String cpu_thread_num = sharedPreferences.getString(ctx.getString(R.string.CPU_THREAD_NUM_KEY),
ctx.getString(R.string.CPU_THREAD_NUM_DEFAULT));
settingsChanged |= cpuThreadNum != Integer.parseInt(cpu_thread_num);
cpuThreadNum = Integer.parseInt(cpu_thread_num);
String cpu_power_mode = sharedPreferences.getString(ctx.getString(R.string.CPU_POWER_MODE_KEY),
ctx.getString(R.string.CPU_POWER_MODE_DEFAULT));
settingsChanged |= !cpuPowerMode.equalsIgnoreCase(cpu_power_mode);
cpuPowerMode = cpu_power_mode;
String enable_lite_fp16 = sharedPreferences.getString(ctx.getString(R.string.ENABLE_LITE_FP16_MODE_KEY),
ctx.getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT));
settingsChanged |= !enableLiteFp16.equalsIgnoreCase(enable_lite_fp16);
enableLiteFp16 = enable_lite_fp16;
return settingsChanged;
}
static void resetSettings() {
selectedModelIdx = -1;
modelDir = "";
cpuThreadNum = 2;
cpuPowerMode = "";
enableLiteFp16 = "true";
}
@Override
protected void onResume() {
super.onResume();
getPreferenceScreen().getSharedPreferences().registerOnSharedPreferenceChangeListener(this);
reloadSettingsAndUpdateUI();
}
@Override
protected void onPause() {
super.onPause();
getPreferenceScreen().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this);
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
reloadSettingsAndUpdateUI();
}
}

View File

@@ -367,7 +367,7 @@ public class SegmentationMainActivity extends Activity implements View.OnClickLi
if (Boolean.parseBoolean(SegmentationSettingsActivity.enableLiteFp16)) { if (Boolean.parseBoolean(SegmentationSettingsActivity.enableLiteFp16)) {
option.enableLiteFp16(); option.enableLiteFp16();
} }
predictor.setVerticalScreenFlag(true); predictor.setIsVerticalScreen(true);
predictor.init(modelFile, paramsFile, configFile, option); predictor.init(modelFile, paramsFile, configFile, option);
} }
} }

View File

@@ -96,8 +96,6 @@ public class SegmentationSettingsActivity extends AppCompatPreferenceActivity im
getString(R.string.CPU_THREAD_NUM_DEFAULT)); getString(R.string.CPU_THREAD_NUM_DEFAULT));
String cpu_power_mode = sharedPreferences.getString(getString(R.string.CPU_POWER_MODE_KEY), String cpu_power_mode = sharedPreferences.getString(getString(R.string.CPU_POWER_MODE_KEY),
getString(R.string.CPU_POWER_MODE_DEFAULT)); getString(R.string.CPU_POWER_MODE_DEFAULT));
String score_threshold = sharedPreferences.getString(getString(R.string.SCORE_THRESHOLD_KEY),
getString(R.string.SCORE_THRESHOLD_FACEDET));
String enable_lite_fp16 = sharedPreferences.getString(getString(R.string.ENABLE_LITE_FP16_MODE_KEY), String enable_lite_fp16 = sharedPreferences.getString(getString(R.string.ENABLE_LITE_FP16_MODE_KEY),
getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT)); getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT));

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<include
layout="@layout/keypointdetection_camera_page"
android:id="@+id/camera_page"></include>
<include
layout="@layout/keypointdetection_result_page"
android:id="@+id/result_page"
android:visibility="gone"></include>
</FrameLayout>

View File

@@ -0,0 +1,159 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true"
tools:context=".keypointdetection.KeyPointDetectionMainActivity">
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/colorWindow">
<com.baidu.paddle.fastdeploy.ui.layout.ActionBarLayout
android:id="@+id/action_bar_main"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<ImageView
android:id="@+id/back_in_preview"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:cropToPadding="true"
android:paddingLeft="40px"
android:paddingTop="60px"
android:paddingRight="60px"
android:paddingBottom="40px"
android:src="@drawable/back_btn" />
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_marginTop="50px"
android:orientation="horizontal">
<TextView
android:id="@+id/action_takepicture_btn"
style="@style/action_btn_selected"
android:layout_width="300px"
android:layout_height="wrap_content"
android:text="@string/action_bar_take_photo"
android:textAlignment="center"
android:visibility="gone" />
<TextView
android:id="@+id/action_realtime_btn"
style="@style/action_btn"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/action_bar_realtime"
android:textAlignment="center" />
</LinearLayout>
<com.baidu.paddle.fastdeploy.ui.view.CameraSurfaceView
android:id="@+id/sv_preview"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_above="@+id/contral"
android:layout_below="@+id/action_bar_main"
android:layout_centerInParent="true" />
<ImageView
android:id="@+id/album_select"
android:layout_width="40dp"
android:layout_height="40dp"
android:layout_alignParentRight="true"
android:layout_alignParentBottom="true"
android:layout_marginRight="20dp"
android:layout_marginBottom="145dp"
android:background="@drawable/album_btn"
android:scaleType="fitXY" />
<TextView
android:id="@+id/tv_status"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:layout_marginTop="60dp"
android:layout_marginRight="30dp"
android:textColor="@color/colorText"
android:textSize="@dimen/small_font_size" />
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="@dimen/top_bar_height"
android:layout_alignParentTop="true"
android:background="@color/colorTopBar">
<ImageButton
android:id="@+id/btn_settings"
android:layout_width="30dp"
android:layout_height="30dp"
android:layout_alignParentRight="true"
android:layout_centerVertical="true"
android:layout_marginRight="10dp"
android:background="@null"
android:scaleType="fitXY"
android:src="@drawable/btn_settings" />
</RelativeLayout>
<LinearLayout
android:id="@+id/contral"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:background="@color/colorBottomBar"
android:orientation="vertical">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="@dimen/bottom_bar_top_margin"
android:orientation="vertical"></LinearLayout>
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="@dimen/large_button_height">
<ImageButton
android:id="@+id/btn_switch"
android:layout_width="60dp"
android:layout_height="60dp"
android:layout_alignParentLeft="true"
android:layout_centerVertical="true"
android:layout_marginLeft="60dp"
android:background="#00000000"
android:scaleType="fitXY"
android:src="@drawable/switch_side_btn" />
<ImageButton
android:id="@+id/btn_shutter"
android:layout_width="@dimen/large_button_width"
android:layout_height="@dimen/large_button_height"
android:layout_centerInParent="true"
android:background="@null"
android:scaleType="fitXY"
android:src="@drawable/take_picture_btn" />
<ImageView
android:id="@+id/realtime_toggle_btn"
android:layout_width="60dp"
android:layout_height="60dp"
android:layout_alignParentRight="true"
android:layout_centerVertical="true"
android:layout_marginRight="60dp"
android:scaleType="fitXY"
android:src="@drawable/realtime_stop_btn" />
</RelativeLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="@dimen/bottom_bar_bottom_margin"
android:orientation="vertical"></LinearLayout>
</LinearLayout>
</RelativeLayout>
</android.support.constraint.ConstraintLayout>

View File

@@ -0,0 +1,160 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#FFFFFF"
android:orientation="vertical">
<com.baidu.paddle.fastdeploy.ui.layout.ActionBarLayout
android:id="@+id/action_bar_result"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<ImageView
android:id="@+id/back_in_result"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:cropToPadding="true"
android:paddingLeft="40px"
android:paddingTop="60px"
android:paddingRight="60px"
android:paddingBottom="40px"
android:src="@drawable/back_btn" />
<TextView
android:id="@+id/model_name"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_centerHorizontal="true"
android:layout_marginTop="50px"
android:textColor="@color/textColor"
android:textSize="@dimen/action_btn_text_size" />
</com.baidu.paddle.fastdeploy.ui.layout.ActionBarLayout>
<FrameLayout
android:layout_width="match_parent"
android:layout_height="700px">
<ImageView
android:id="@+id/result_image"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/bk_result_image_padding" />
</FrameLayout>
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="40px"
android:layout_marginTop="26px"
android:layout_marginBottom="20px"
android:text="@string/result_label"
android:textColor="@color/bk_black"
android:textSize="56px"
android:visibility="visible" />
<LinearLayout
android:id="@+id/result_seekbar_section"
android:layout_width="match_parent"
android:layout_height="130px"
android:layout_marginLeft="@dimen/result_list_padding_lr"
android:layout_marginRight="@dimen/result_list_padding_lr"
android:layout_marginBottom="@dimen/result_list_gap_width"
android:background="@drawable/result_page_border_section_bk"
android:visibility="gone">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:layout_weight="2"
android:paddingLeft="30px"
android:text="@string/result_table_header_confidence"
android:textColor="@color/table_result_tableheader_text_color"
android:textSize="@dimen/result_list_view_text_size" />
<SeekBar
android:id="@+id/confidence_seekbar"
android:layout_width="220dp"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:layout_weight="6"
android:focusable="false"
android:maxHeight="8px"
android:progressDrawable="@drawable/seekbar_progress_result"
android:splitTrack="false"
android:thumb="@drawable/seekbar_handle" />
<TextView
android:id="@+id/seekbar_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:layout_weight="1"
android:paddingRight="30px"
android:textSize="@dimen/result_list_view_text_size"
/>
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginLeft="@dimen/result_list_padding_lr"
android:layout_marginRight="@dimen/result_list_padding_lr"
android:layout_marginBottom="@dimen/result_list_gap_width"
android:background="@drawable/result_page_border_section_bk"
android:visibility="visible">
<TextView
style="@style/list_result_view_tablehead_style"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/result_table_header_index"
android:textColor="@color/table_result_tableheader_text_color" />
<TextView
style="@style/list_result_view_tablehead_style"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/result_table_header_name"
android:textColor="@color/table_result_tableheader_text_color" />
<TextView
style="@style/list_result_view_tablehead_style"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_weight="0.4"
android:gravity="right"
android:text="@string/result_table_header_confidence"
android:textColor="@color/table_result_tableheader_text_color" />
</LinearLayout>
<FrameLayout
android:layout_width="match_parent"
android:layout_height="wrap_content">
<ScrollView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="15px"
android:paddingLeft="@dimen/result_list_padding_lr"
android:paddingRight="@dimen/result_list_padding_lr">
<com.baidu.paddle.fastdeploy.ui.view.ResultListView
android:id="@+id/result_list_view"
android:layout_width="match_parent"
android:layout_height="700px"
android:divider="#FFFFFF"
android:dividerHeight="@dimen/result_list_gap_width"></com.baidu.paddle.fastdeploy.ui.view.ResultListView>
</ScrollView>
</FrameLayout>
</LinearLayout>
</FrameLayout>

View File

@@ -21,6 +21,7 @@
<string name="SCORE_THRESHOLD_DEFAULT">0.4</string> <string name="SCORE_THRESHOLD_DEFAULT">0.4</string>
<string name="SCORE_THRESHOLD_CLASSIFICATION">0.1</string> <string name="SCORE_THRESHOLD_CLASSIFICATION">0.1</string>
<string name="SCORE_THRESHOLD_FACEDET">0.25</string> <string name="SCORE_THRESHOLD_FACEDET">0.25</string>
<string name="SCORE_THRESHOLD_KEYPOINT_DETECTION">0.25</string>
<string name="ENABLE_LITE_FP16_MODE_DEFAULT">true</string> <string name="ENABLE_LITE_FP16_MODE_DEFAULT">true</string>
<!--Other values--> <!--Other values-->
<!-- Detection model & Label paths & other values ... --> <!-- Detection model & Label paths & other values ... -->
@@ -36,6 +37,8 @@
<string name="FACEDET_MODEL_DIR_DEFAULT">models/scrfd_500m_bnkps_shape320x320_pd</string> <string name="FACEDET_MODEL_DIR_DEFAULT">models/scrfd_500m_bnkps_shape320x320_pd</string>
<!-- segmentation values ... --> <!-- segmentation values ... -->
<string name="SEGMENTATION_MODEL_DIR_DEFAULT">models/portrait_pp_humansegv2_lite_256x144_inference_model</string> <string name="SEGMENTATION_MODEL_DIR_DEFAULT">models/portrait_pp_humansegv2_lite_256x144_inference_model</string>
<!-- keypointdetection values -->
<string name="KEYPOINT_DETECTION_MODEL_DIR_DEFAULT">models/PP_TinyPose_128x96_infer</string>
<!-- Other resources values--> <!-- Other resources values-->
<string name="action_bar_take_photo">拍照识别</string> <string name="action_bar_take_photo">拍照识别</string>
<string name="action_bar_realtime">实时识别</string> <string name="action_bar_realtime">实时识别</string>

View File

@@ -0,0 +1,37 @@
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
<ListPreference
android:defaultValue="@string/KEYPOINT_DETECTION_MODEL_DIR_DEFAULT"
android:key="@string/CHOOSE_PRE_INSTALLED_MODEL_KEY"
android:negativeButtonText="@null"
android:positiveButtonText="@null"
android:title="Choose Pre-Installed Models" />
<EditTextPreference
android:defaultValue="@string/KEYPOINT_DETECTION_MODEL_DIR_DEFAULT"
android:key="@string/MODEL_DIR_KEY"
android:title="Model Dir" />
<ListPreference
android:defaultValue="@string/CPU_THREAD_NUM_DEFAULT"
android:entries="@array/cpu_thread_num_entries"
android:entryValues="@array/cpu_thread_num_values"
android:key="@string/CPU_THREAD_NUM_KEY"
android:negativeButtonText="@null"
android:positiveButtonText="@null"
android:title="CPU Thread Num" />
<ListPreference
android:defaultValue="@string/CPU_POWER_MODE_DEFAULT"
android:entries="@array/cpu_power_mode_entries"
android:entryValues="@array/cpu_power_mode_values"
android:key="@string/CPU_POWER_MODE_KEY"
android:negativeButtonText="@null"
android:positiveButtonText="@null"
android:title="CPU Power Mode" />
<ListPreference
android:defaultValue="@string/ENABLE_LITE_FP16_MODE_DEFAULT"
android:entries="@array/enable_lite_fp16_mode_entries"
android:entryValues="@array/enable_lite_fp16_mode_values"
android:key="@string/ENABLE_LITE_FP16_MODE_KEY"
android:negativeButtonText="@null"
android:positiveButtonText="@null"
android:title="Enable Lite FP16" />
</PreferenceScreen>

View File

@@ -56,7 +56,7 @@ def FD_CXX_LIB = [
task downloadAndExtractLibs(type: DefaultTask) { task downloadAndExtractLibs(type: DefaultTask) {
doFirst { doFirst {
println "Downloading and extracting fastdeploy android c++ lib ..." println "[INFO] Downloading and extracting fastdeploy android c++ lib ..."
} }
doLast { doLast {
String cachePath = "cache" String cachePath = "cache"
@@ -69,15 +69,18 @@ task downloadAndExtractLibs(type: DefaultTask) {
libName = libName.split("\\.")[0] libName = libName.split("\\.")[0]
boolean copyFiles = !file("${lib.dest}/${libName}").exists() boolean copyFiles = !file("${lib.dest}/${libName}").exists()
if (!file("${cachePath}/${libName}.tgz").exists()) { if (!file("${cachePath}/${libName}.tgz").exists()) {
println "Downloading ${lib.src} -> ${cachePath}/${libName}.tgz" println "[INFO] Downloading ${lib.src} -> ${cachePath}/${libName}.tgz"
ant.get(src: lib.src, dest: file("${cachePath}/${libName}.tgz")) ant.get(src: lib.src, dest: file("${cachePath}/${libName}.tgz"))
copyFiles = true copyFiles = true
} }
if (copyFiles) { if (copyFiles) {
println "[INFO] Taring ${cachePath}/${libName}.tgz -> ${lib.dest}/${libName}"
copy { copy {
from tarTree("${cachePath}/${libName}.tgz") from tarTree("${cachePath}/${libName}.tgz")
into "${lib.dest}" into "${lib.dest}"
} }
} else {
println "[INFO] ${lib.dest}/${libName} already exists!"
} }
} }
} }

View File

@@ -43,6 +43,8 @@ add_library(
fastdeploy_jni/vision/facedet/scrfd_jni.cc fastdeploy_jni/vision/facedet/scrfd_jni.cc
fastdeploy_jni/vision/facedet/yolov5face_jni.cc fastdeploy_jni/vision/facedet/yolov5face_jni.cc
fastdeploy_jni/vision/facedet/facedet_utils_jni.cc fastdeploy_jni/vision/facedet/facedet_utils_jni.cc
fastdeploy_jni/vision/keypointdetection/pptinypose_jni.cc
fastdeploy_jni/vision/keypointdetection/keypointdetection_utils_jni.cc
) )
# Searches for a specified prebuilt library and stores the path as a # Searches for a specified prebuilt library and stores the path as a

View File

@@ -19,6 +19,33 @@
namespace fastdeploy { namespace fastdeploy {
namespace jni { namespace jni {
cv::Mat CreateZeroCopyRGBAFromBitmap(JNIEnv *env, jobject j_argb8888_bitmap) {
cv::Mat c_rgba;
AndroidBitmapInfo j_bitmap_info;
if (AndroidBitmap_getInfo(env, j_argb8888_bitmap, &j_bitmap_info) < 0) {
LOGE("Invoke AndroidBitmap_getInfo() failed!");
return c_rgba;
}
if (j_bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Only Bitmap.Config.ARGB8888 color format is supported!");
return c_rgba;
}
void *j_bitmap_pixels;
if (AndroidBitmap_lockPixels(env, j_argb8888_bitmap, &j_bitmap_pixels) < 0) {
LOGE("Invoke AndroidBitmap_lockPixels() failed!");
return c_rgba;
}
cv::Mat j_bitmap_im(static_cast<int>(j_bitmap_info.height),
static_cast<int>(j_bitmap_info.width), CV_8UC4,
j_bitmap_pixels); // no copied.
c_rgba = j_bitmap_im; // ref only.
if (AndroidBitmap_unlockPixels(env, j_argb8888_bitmap) < 0) {
LOGE("Invoke AndroidBitmap_unlockPixels() failed!");
return c_rgba;
}
return c_rgba;
}
jboolean ARGB888Bitmap2RGBA(JNIEnv *env, jobject j_argb8888_bitmap, jboolean ARGB888Bitmap2RGBA(JNIEnv *env, jobject j_argb8888_bitmap,
cv::Mat *c_rgba) { cv::Mat *c_rgba) {
// Convert the android bitmap(ARGB8888) to the OpenCV RGBA image. Actually, // Convert the android bitmap(ARGB8888) to the OpenCV RGBA image. Actually,
@@ -26,26 +53,8 @@ jboolean ARGB888Bitmap2RGBA(JNIEnv *env, jobject j_argb8888_bitmap,
// so it is unnecessary to do the conversion of color format, check // so it is unnecessary to do the conversion of color format, check
// https://developer.android.com/reference/android/graphics/Bitmap.Config#ARGB_8888 // https://developer.android.com/reference/android/graphics/Bitmap.Config#ARGB_8888
// to get the more details about Bitmap.Config.ARGB8888 // to get the more details about Bitmap.Config.ARGB8888
AndroidBitmapInfo j_bitmap_info; *c_rgba = CreateZeroCopyRGBAFromBitmap(env, j_argb8888_bitmap);
if (AndroidBitmap_getInfo(env, j_argb8888_bitmap, &j_bitmap_info) < 0) { if (c_rgba->empty()) {
LOGE("Invoke AndroidBitmap_getInfo() failed!");
return JNI_FALSE;
}
if (j_bitmap_info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
LOGE("Only Bitmap.Config.ARGB8888 color format is supported!");
return JNI_FALSE;
}
void *j_bitmap_pixels;
if (AndroidBitmap_lockPixels(env, j_argb8888_bitmap, &j_bitmap_pixels) < 0) {
LOGE("Invoke AndroidBitmap_lockPixels() failed!");
return JNI_FALSE;
}
cv::Mat j_bitmap_im(static_cast<int>(j_bitmap_info.height),
static_cast<int>(j_bitmap_info.width), CV_8UC4,
j_bitmap_pixels);
j_bitmap_im.copyTo(*(c_rgba));
if (AndroidBitmap_unlockPixels(env, j_argb8888_bitmap) < 0) {
LOGE("Invoke AndroidBitmap_unlockPixels() failed!");
return JNI_FALSE; return JNI_FALSE;
} }
return JNI_TRUE; return JNI_TRUE;
@@ -57,6 +66,8 @@ jboolean ARGB888Bitmap2BGR(JNIEnv *env, jobject j_argb8888_bitmap,
if (!ARGB888Bitmap2RGBA(env, j_argb8888_bitmap, &c_rgba)) { if (!ARGB888Bitmap2RGBA(env, j_argb8888_bitmap, &c_rgba)) {
return JNI_FALSE; return JNI_FALSE;
} }
// TODO: Use the neon instruction to optimize this conversion.
// COLOR_RGBA2BGR will allocate memories for new mat.
cv::cvtColor(c_rgba, *(c_bgr), cv::COLOR_RGBA2BGR); cv::cvtColor(c_rgba, *(c_bgr), cv::COLOR_RGBA2BGR);
return JNI_TRUE; return JNI_TRUE;
} }
@@ -73,9 +84,11 @@ jboolean RGBA2ARGB888Bitmap(JNIEnv *env, jobject j_argb8888_bitmap,
LOGE("Invoke AndroidBitmap_lockPixels() failed!"); LOGE("Invoke AndroidBitmap_lockPixels() failed!");
return JNI_FALSE; return JNI_FALSE;
} }
// no copied, but point to bitmap data.
cv::Mat j_bitmap_im(static_cast<int>(j_bitmap_info.height), cv::Mat j_bitmap_im(static_cast<int>(j_bitmap_info.height),
static_cast<int>(j_bitmap_info.width), CV_8UC4, static_cast<int>(j_bitmap_info.width), CV_8UC4,
j_bitmap_pixels); j_bitmap_pixels);
// TODO: Use zero copy operation or neon to boost performance.
c_rgba.copyTo(j_bitmap_im); c_rgba.copyTo(j_bitmap_im);
if (AndroidBitmap_unlockPixels(env, j_argb8888_bitmap) < 0) { if (AndroidBitmap_unlockPixels(env, j_argb8888_bitmap) < 0) {
LOGE("Invoke AndroidBitmap_unlockPixels() failed!"); LOGE("Invoke AndroidBitmap_unlockPixels() failed!");

View File

@@ -34,6 +34,8 @@ fastdeploy::RuntimeOption NewCxxRuntimeOption(
j_runtime_option_clazz, "mCpuThreadNum", "I"); j_runtime_option_clazz, "mCpuThreadNum", "I");
const jfieldID j_enable_lite_fp16_id = env->GetFieldID( const jfieldID j_enable_lite_fp16_id = env->GetFieldID(
j_runtime_option_clazz, "mEnableLiteFp16", "Z"); j_runtime_option_clazz, "mEnableLiteFp16", "Z");
const jfieldID j_enable_lite_int8_id = env->GetFieldID(
j_runtime_option_clazz, "mEnableLiteInt8", "Z");
const jfieldID j_lite_power_mode_id = env->GetFieldID( const jfieldID j_lite_power_mode_id = env->GetFieldID(
j_runtime_option_clazz, "mLitePowerMode", j_runtime_option_clazz, "mLitePowerMode",
"Lcom/baidu/paddle/fastdeploy/LitePowerMode;"); "Lcom/baidu/paddle/fastdeploy/LitePowerMode;");
@@ -59,6 +61,8 @@ fastdeploy::RuntimeOption NewCxxRuntimeOption(
j_runtime_option_obj, j_cpu_num_thread_id); j_runtime_option_obj, j_cpu_num_thread_id);
jboolean j_enable_lite_fp16 = env->GetBooleanField( jboolean j_enable_lite_fp16 = env->GetBooleanField(
j_runtime_option_obj, j_enable_lite_fp16_id); j_runtime_option_obj, j_enable_lite_fp16_id);
jboolean j_enable_lite_int8 = env->GetBooleanField(
j_runtime_option_obj, j_enable_lite_int8_id);
jstring j_lite_optimized_model_dir = static_cast<jstring>( jstring j_lite_optimized_model_dir = static_cast<jstring>(
env->GetObjectField(j_runtime_option_obj, j_lite_optimized_model_dir_id)); env->GetObjectField(j_runtime_option_obj, j_lite_optimized_model_dir_id));
jobject j_lite_power_mode_obj = env->GetObjectField( jobject j_lite_power_mode_obj = env->GetObjectField(
@@ -68,6 +72,7 @@ fastdeploy::RuntimeOption NewCxxRuntimeOption(
int c_cpu_num_thread = static_cast<int>(j_cpu_num_thread); int c_cpu_num_thread = static_cast<int>(j_cpu_num_thread);
bool c_enable_lite_fp16 = static_cast<bool>(j_enable_lite_fp16); bool c_enable_lite_fp16 = static_cast<bool>(j_enable_lite_fp16);
bool c_enable_lite_int8 = static_cast<bool>(j_enable_lite_int8);
fastdeploy::LitePowerMode c_lite_power_mode = fastdeploy::LitePowerMode c_lite_power_mode =
static_cast<fastdeploy::LitePowerMode>(j_lite_power_mode); static_cast<fastdeploy::LitePowerMode>(j_lite_power_mode);
std::string c_lite_optimized_model_dir = std::string c_lite_optimized_model_dir =
@@ -80,6 +85,9 @@ fastdeploy::RuntimeOption NewCxxRuntimeOption(
if (c_enable_lite_fp16) { if (c_enable_lite_fp16) {
c_runtime_option.EnableLiteFP16(); c_runtime_option.EnableLiteFP16();
} }
if (c_enable_lite_int8) {
c_runtime_option.EnableLiteInt8();
}
env->DeleteLocalRef(j_runtime_option_clazz); env->DeleteLocalRef(j_runtime_option_clazz);
env->DeleteLocalRef(j_lite_power_mode_clazz); env->DeleteLocalRef(j_lite_power_mode_clazz);

View File

@@ -0,0 +1,45 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/vision/keypointdetection/keypointdetection_utils_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
/// Rendering KeyPointDetectionResult to ARGB888Bitmap
void RenderingKeyPointDetection(
JNIEnv *env, const cv::Mat &c_bgr,
const vision::KeyPointDetectionResult &c_result,
jobject argb8888_bitmap, bool save_image, float conf_threshold,
jstring save_path) {
if (!c_result.keypoints.empty()) {
auto t = GetCurrentTime();
auto c_vis_im = vision::VisKeypointDetection(
c_bgr, c_result, conf_threshold);
LOGD("Visualize from native costs %f ms", GetElapsedTime(t));
if (!BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
LOGD("Write to bitmap from native failed!");
}
auto c_saved_image_path = ConvertTo<std::string>(env, save_path);
if (!c_saved_image_path.empty() && save_image) {
cv::imwrite(c_saved_image_path, c_vis_im);
}
}
}
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,32 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <jni.h> // NOLINT
#include "fastdeploy/vision.h" // NOLINT
#include "fastdeploy_jni/perf_jni.h" // NOLINT
#include "fastdeploy_jni/bitmap_jni.h" // NOLINT
namespace fastdeploy {
namespace jni {
void RenderingKeyPointDetection(
JNIEnv *env, const cv::Mat &c_bgr,
const vision::KeyPointDetectionResult &c_result,
jobject argb8888_bitmap, bool save_image,
float conf_threshold, jstring save_path);
} // namespace jni
} // namespace fastdeploy

View File

@@ -0,0 +1,103 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <jni.h> // NOLINT
#include "fastdeploy_jni/convert_jni.h" // NOLINT
#include "fastdeploy_jni/assets_loader_jni.h" // NOLINT
#include "fastdeploy_jni/runtime_option_jni.h" // NOLINT
#include "fastdeploy_jni/vision/results_jni.h" // NOLINT
#include "fastdeploy_jni/vision/keypointdetection/keypointdetection_utils_jni.h" // NOLINT
namespace fni = fastdeploy::jni;
namespace vision = fastdeploy::vision;
namespace keypointdetection = fastdeploy::vision::keypointdetection;
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL
Java_com_baidu_paddle_fastdeploy_vision_keypointdetection_PPTinyPose_bindNative(
JNIEnv *env, jobject thiz, jstring model_file, jstring params_file,
jstring config_file, jobject runtime_option) {
auto c_model_file = fni::ConvertTo<std::string>(env, model_file);
auto c_params_file = fni::ConvertTo<std::string>(env, params_file);
auto c_config_file = fni::ConvertTo<std::string>(env, config_file);
auto c_runtime_option = fni::NewCxxRuntimeOption(env, runtime_option);
auto c_model_ptr = new keypointdetection::PPTinyPose(
c_model_file, c_params_file, c_config_file, c_runtime_option);
INITIALIZED_OR_RETURN(c_model_ptr)
#ifdef ENABLE_RUNTIME_PERF
c_model_ptr->EnableRecordTimeOfRuntime();
#endif
// setup use_dark param
const jclass j_pptinypose_clazz = env->GetObjectClass(thiz);
const jfieldID j_use_dark_id = env->GetFieldID(
j_pptinypose_clazz, "mUseDark", "Z");
jboolean j_use_dark = env->GetBooleanField(thiz, j_use_dark_id);
const bool c_use_dark = static_cast<bool>(j_use_dark);
c_model_ptr->use_dark = c_use_dark;
env->DeleteLocalRef(j_pptinypose_clazz);
vision::EnableFlyCV();
return reinterpret_cast<jlong>(c_model_ptr);
}
JNIEXPORT jobject JNICALL
Java_com_baidu_paddle_fastdeploy_vision_keypointdetection_PPTinyPose_predictNative(
JNIEnv *env, jobject thiz, jlong cxx_context, jobject argb8888_bitmap,
jboolean save_image, jstring save_path, jboolean rendering,
jfloat conf_threshold) {
if (cxx_context == 0) {
return NULL;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return NULL;
}
auto c_model_ptr = reinterpret_cast<keypointdetection::PPTinyPose *>(cxx_context);
vision::KeyPointDetectionResult c_result;
auto t = fni::GetCurrentTime();
c_model_ptr->Predict(&c_bgr, &c_result);
PERF_TIME_OF_RUNTIME(c_model_ptr, t)
if (rendering) {
fni::RenderingKeyPointDetection(
env, c_bgr, c_result, argb8888_bitmap, save_image,
conf_threshold, save_path);
}
return fni::NewJavaResultFromCxx(env, reinterpret_cast<void *>(&c_result),
vision::ResultType::KEYPOINT_DETECTION);
}
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_keypointdetection_PPTinyPose_releaseNative(
JNIEnv *env, jobject thiz, jlong cxx_context) {
if (cxx_context == 0) {
return JNI_FALSE;
}
auto c_model_ptr = reinterpret_cast<keypointdetection::PPTinyPose *>(cxx_context);
PERF_TIME_OF_RUNTIME(c_model_ptr, -1)
delete c_model_ptr;
LOGD("[End] Release PPTinyPose in native !");
return JNI_TRUE;
}
#ifdef __cplusplus
}
#endif

View File

@@ -438,9 +438,79 @@ bool AllocateJavaFaceDetectionResultFromCxx(
return true; return true;
} }
bool AllocateJavaResultFromCxx( bool AllocateJavaKeyPointDetectionResultFromCxx(
JNIEnv *env, jobject j_result_obj, void *cxx_result, JNIEnv *env, jobject j_keypoint_det_result_obj, void *cxx_result) {
vision::ResultType type) { // WARN: Please make sure 'j_keypoint_det_result_obj' param
// is a ref of Java KeyPointDetectionResult.
// Field signatures of Java KeyPointDetectionResult:
// (1) mBoxes float[][] shape (n*num_joints,2): [[F
// (2) mScores float[] shape (n*num_joints): [F
// (3) mNumJoints int shape (1): I
// (4) mInitialized boolean: Z
// Docs: docs/api/vision_results/keypointdetection_result.md
if (cxx_result == nullptr) {
return false;
}
auto c_result_ptr = reinterpret_cast<vision::KeyPointDetectionResult *>(cxx_result);
const int len = static_cast<int>(c_result_ptr->keypoints.size());
if (len == 0) {
return false;
}
const jclass j_keypoint_det_result_clazz = env->FindClass(
"com/baidu/paddle/fastdeploy/vision/KeyPointDetectionResult");
const jclass j_keypoint_float_arr_clazz = env->FindClass("[F"); // (2,)
const jfieldID j_keypoint_det_keypoints_id = env->GetFieldID(
j_keypoint_det_result_clazz, "mKeyPoints", "[[F");
const jfieldID j_keypoint_det_scores_id = env->GetFieldID(
j_keypoint_det_result_clazz, "mScores", "[F");
const jfieldID j_keypoint_det_num_joints_id = env->GetFieldID(
j_keypoint_det_result_clazz, "mNumJoints", "I");
const jfieldID j_keypoint_det_initialized_id = env->GetFieldID(
j_keypoint_det_result_clazz, "mInitialized", "Z");
if (!env->IsInstanceOf(j_keypoint_det_result_obj, j_keypoint_det_result_clazz)) {
return false;
}
// mKeyPoints float[][] shape (n*num_joints,2): [[F
const auto &keypoints = c_result_ptr->keypoints;
jobjectArray j_keypoint_det_keypoints_float_arr =
env->NewObjectArray(len, j_keypoint_float_arr_clazz, NULL);
for (int i = 0; i < len; ++i) {
jfloatArray j_point = env->NewFloatArray(2);
env->SetFloatArrayRegion(j_point, 0, 2, keypoints.at(i).data());
env->SetObjectArrayElement(j_keypoint_det_keypoints_float_arr, i, j_point);
env->DeleteLocalRef(j_point);
}
// mScores float[] shape (n): [F
const auto &scores = c_result_ptr->scores;
const int score_len = scores.size();
jfloatArray j_keypoint_det_scores_float_arr = env->NewFloatArray(score_len);
env->SetFloatArrayRegion(j_keypoint_det_scores_float_arr, 0, score_len, scores.data());
// mNumJoints int shape (1): I
jint j_keypoint_det_num_joints = static_cast<jint>(c_result_ptr->num_joints);
// Set object fields
env->SetObjectField(j_keypoint_det_result_obj, j_keypoint_det_keypoints_id, j_keypoint_det_keypoints_float_arr);
env->SetObjectField(j_keypoint_det_result_obj, j_keypoint_det_scores_id, j_keypoint_det_scores_float_arr);
env->SetIntField(j_keypoint_det_result_obj, j_keypoint_det_num_joints_id, j_keypoint_det_num_joints);
env->SetBooleanField(j_keypoint_det_result_obj, j_keypoint_det_initialized_id, JNI_TRUE);
// Release local Refs
env->DeleteLocalRef(j_keypoint_det_keypoints_float_arr);
env->DeleteLocalRef(j_keypoint_det_scores_float_arr);
env->DeleteLocalRef(j_keypoint_det_result_clazz);
env->DeleteLocalRef(j_keypoint_float_arr_clazz);
return true;
}
bool AllocateJavaResultFromCxx(JNIEnv *env, jobject j_result_obj,
void *cxx_result, vision::ResultType type) {
if (type == vision::ResultType::CLASSIFY) { if (type == vision::ResultType::CLASSIFY) {
return AllocateJavaClassifyResultFromCxx(env, j_result_obj, cxx_result); return AllocateJavaClassifyResultFromCxx(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::DETECTION) { } else if (type == vision::ResultType::DETECTION) {
@@ -451,6 +521,8 @@ bool AllocateJavaResultFromCxx(
return AllocateJavaSegmentationResultFromCxx(env, j_result_obj, cxx_result); return AllocateJavaSegmentationResultFromCxx(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::FACE_DETECTION) { } else if (type == vision::ResultType::FACE_DETECTION) {
return AllocateJavaFaceDetectionResultFromCxx(env, j_result_obj, cxx_result); return AllocateJavaFaceDetectionResultFromCxx(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::KEYPOINT_DETECTION) {
return AllocateJavaKeyPointDetectionResultFromCxx(env, j_result_obj, cxx_result);
} else { } else {
LOGE("Not support this ResultType in JNI now, type: %d", LOGE("Not support this ResultType in JNI now, type: %d",
static_cast<int>(type)); static_cast<int>(type));
@@ -519,6 +591,18 @@ jobject NewJavaFaceDetectionResultFromCxx(JNIEnv *env, void *cxx_result) {
return j_face_det_result_obj; return j_face_det_result_obj;
} }
jobject NewJavaKeyPointDetectionResultFromCxx(JNIEnv *env, void *cxx_result) {
const jclass j_keypoint_det_result_clazz = env->FindClass(
"com/baidu/paddle/fastdeploy/vision/KeyPointDetectionResult");
const jmethodID j_keypoint_det_result_init = env->GetMethodID(
j_keypoint_det_result_clazz, "<init>", "()V");
jobject j_keypoint_det_result_obj = env->NewObject(
j_keypoint_det_result_clazz, j_keypoint_det_result_init);
AllocateJavaKeyPointDetectionResultFromCxx(env, j_keypoint_det_result_obj, cxx_result);
env->DeleteLocalRef(j_keypoint_det_result_clazz);
return j_keypoint_det_result_obj;
}
jobject NewJavaResultFromCxx( jobject NewJavaResultFromCxx(
JNIEnv *env, void *cxx_result, vision::ResultType type) { JNIEnv *env, void *cxx_result, vision::ResultType type) {
if (type == vision::ResultType::CLASSIFY) { if (type == vision::ResultType::CLASSIFY) {
@@ -531,6 +615,8 @@ jobject NewJavaResultFromCxx(
return NewJavaSegmentationResultFromCxx(env, cxx_result); return NewJavaSegmentationResultFromCxx(env, cxx_result);
} else if (type == vision::ResultType::FACE_DETECTION) { } else if (type == vision::ResultType::FACE_DETECTION) {
return NewJavaFaceDetectionResultFromCxx(env, cxx_result); return NewJavaFaceDetectionResultFromCxx(env, cxx_result);
} else if (type == vision::ResultType::KEYPOINT_DETECTION) {
return NewJavaKeyPointDetectionResultFromCxx(env, cxx_result);
} else { } else {
LOGE("Not support this ResultType in JNI now, type: %d", LOGE("Not support this ResultType in JNI now, type: %d",
static_cast<int>(type)); static_cast<int>(type));
@@ -1058,6 +1144,95 @@ bool AllocateFaceDetectionResultFromJava(
return true; return true;
} }
bool AllocateKeyPointDetectionResultFromJava(
JNIEnv *env, jobject j_keypoint_det_result_obj, void *cxx_result) {
// WARN: Please make sure 'j_keypoint_det_result_obj' param
// is a ref of Java KeyPointDetectionResult.
// Field signatures of Java KeyPointDetectionResult:
// (1) mBoxes float[][] shape (n*num_joints,2): [[F
// (2) mScores float[] shape (n*num_joints): [F
// (3) mNumJoints int shape (1): I
// (4) mInitialized boolean: Z
// Docs: docs/api/vision_results/keypointdetection_result.md
if (cxx_result == nullptr || j_keypoint_det_result_obj == nullptr) {
return false;
}
auto c_result_ptr = reinterpret_cast<vision::KeyPointDetectionResult *>(cxx_result);
const jclass j_keypoint_det_result_clazz_cc = env->FindClass(
"com/baidu/paddle/fastdeploy/vision/KeyPointDetectionResult");
const jfieldID j_keypoint_det_keypoints_id_cc = env->GetFieldID(
j_keypoint_det_result_clazz_cc, "mKeyPoints", "[[F");
const jfieldID j_keypoint_det_scores_id_cc = env->GetFieldID(
j_keypoint_det_result_clazz_cc, "mScores", "[F");
const jfieldID j_keypoint_det_num_joints_id_cc = env->GetFieldID(
j_keypoint_det_result_clazz_cc, "mNumJoints", "I");
const jfieldID j_keypoint_det_initialized_id_cc = env->GetFieldID(
j_keypoint_det_result_clazz_cc, "mInitialized", "Z");
if (!env->IsInstanceOf(j_keypoint_det_result_obj, j_keypoint_det_result_clazz_cc)) {
return false;
}
// mInitialized boolean: Z
jboolean j_keypoint_det_initialized =
env->GetBooleanField(j_keypoint_det_result_obj, j_keypoint_det_initialized_id_cc);
if (j_keypoint_det_initialized == JNI_FALSE) {
return false;
}
jobjectArray j_keypoint_det_keypoints_float_arr = reinterpret_cast<jobjectArray>(
env->GetObjectField(j_keypoint_det_result_obj, j_keypoint_det_keypoints_id_cc));
jfloatArray j_keypoint_det_scores_float_arr = reinterpret_cast<jfloatArray>(
env->GetObjectField(j_keypoint_det_result_obj, j_keypoint_det_scores_id_cc));
jint j_keypoint_det_num_joints = env->GetIntField(
j_keypoint_det_result_obj, j_keypoint_det_num_joints_id_cc);
int len = env->GetArrayLength(j_keypoint_det_keypoints_float_arr);
if ((len == 0) || (len != env->GetArrayLength(j_keypoint_det_scores_float_arr)) ||
(j_keypoint_det_num_joints < 0)) {
return false;
}
// Init Cxx result
c_result_ptr->Clear();
// mKeyPoints float[][] shape (n*num_joints,2): [[F
c_result_ptr->keypoints.resize(len);
bool c_check_validation = true;
for (int i = 0; i < len; ++i) {
auto j_point = reinterpret_cast<jfloatArray>(
env->GetObjectArrayElement(j_keypoint_det_keypoints_float_arr, i));
if (env->GetArrayLength(j_point) == 2) {
jfloat *j_point_ptr = env->GetFloatArrayElements(j_point, nullptr);
std::memcpy(c_result_ptr->keypoints[i].data(), j_point_ptr, 2 * sizeof(float));
env->ReleaseFloatArrayElements(j_point, j_point_ptr, 0);
} else {
c_check_validation = false;
break;
}
}
if (!c_check_validation) {
LOGE("The length of each detection box is not equal 2!");
return false;
}
// mScores float[] shape (n): [F
c_result_ptr->scores.resize(len);
jfloat *j_keypoint_det_scores_ptr =
env->GetFloatArrayElements(j_keypoint_det_scores_float_arr, nullptr);
std::memcpy(c_result_ptr->scores.data(), j_keypoint_det_scores_ptr, len * sizeof(float));
env->ReleaseFloatArrayElements(j_keypoint_det_scores_float_arr, j_keypoint_det_scores_ptr, 0);
// mNumJoints int shape (1): I
c_result_ptr->num_joints = static_cast<int>(j_keypoint_det_num_joints);
// Release local Refs
env->DeleteLocalRef(j_keypoint_det_result_clazz_cc);
return true;
}
bool AllocateCxxResultFromJava( bool AllocateCxxResultFromJava(
JNIEnv *env, jobject j_result_obj, void *cxx_result, JNIEnv *env, jobject j_result_obj, void *cxx_result,
vision::ResultType type) { vision::ResultType type) {
@@ -1071,6 +1246,8 @@ bool AllocateCxxResultFromJava(
return AllocateSegmentationResultFromJava(env, j_result_obj, cxx_result); return AllocateSegmentationResultFromJava(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::FACE_DETECTION) { } else if (type == vision::ResultType::FACE_DETECTION) {
return AllocateFaceDetectionResultFromJava(env, j_result_obj, cxx_result); return AllocateFaceDetectionResultFromJava(env, j_result_obj, cxx_result);
} else if (type == vision::ResultType::KEYPOINT_DETECTION) {
return AllocateKeyPointDetectionResultFromJava(env, j_result_obj, cxx_result);
} else { } else {
LOGE("Not support this ResultType in JNI now, type: %d", LOGE("Not support this ResultType in JNI now, type: %d",
static_cast<int>(type)); static_cast<int>(type));
@@ -1081,7 +1258,6 @@ bool AllocateCxxResultFromJava(
} // namespace jni } // namespace jni
} // namespace fastdeploy } // namespace fastdeploy
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
@@ -1109,7 +1285,7 @@ Java_com_baidu_paddle_fastdeploy_vision_SegmentationResult_releaseCxxBufferNativ
auto c_result_ptr = reinterpret_cast< auto c_result_ptr = reinterpret_cast<
fastdeploy::vision::SegmentationResult *>(j_cxx_buffer); fastdeploy::vision::SegmentationResult *>(j_cxx_buffer);
delete c_result_ptr; delete c_result_ptr;
LOGD("[End] Release SegmentationResult in native !"); LOGD("[End] Release SegmentationResult & CxxBuffer in native !");
env->SetBooleanField(thiz, j_seg_initialized_id, JNI_FALSE); env->SetBooleanField(thiz, j_seg_initialized_id, JNI_FALSE);
env->DeleteLocalRef(j_seg_result_clazz); env->DeleteLocalRef(j_seg_result_clazz);

View File

@@ -42,13 +42,13 @@ Java_com_baidu_paddle_fastdeploy_vision_segmentation_PaddleSegModel_bindNative(
c_model_ptr->EnableRecordTimeOfRuntime(); c_model_ptr->EnableRecordTimeOfRuntime();
#endif #endif
// Setup is_vertical_screen param // setup is_vertical_screen param
const jclass j_ppseg_clazz = env->GetObjectClass(thiz); const jclass j_ppseg_clazz = env->GetObjectClass(thiz);
const jfieldID j_is_vertical_screen_id = env->GetFieldID( const jfieldID j_is_vertical_screen_id = env->GetFieldID(
j_ppseg_clazz, "mIsVerticalScreen", "Z"); j_ppseg_clazz, "mIsVerticalScreen", "Z");
jboolean j_is_vertical_screen = env->GetBooleanField( jboolean j_is_vertical_screen = env->GetBooleanField(
thiz, j_is_vertical_screen_id); thiz, j_is_vertical_screen_id);
bool c_is_vertical_screen = static_cast<jboolean>(j_is_vertical_screen); const bool c_is_vertical_screen = static_cast<bool>(j_is_vertical_screen);
c_model_ptr->GetPreprocessor().SetIsVerticalScreen(c_is_vertical_screen); c_model_ptr->GetPreprocessor().SetIsVerticalScreen(c_is_vertical_screen);
env->DeleteLocalRef(j_ppseg_clazz); env->DeleteLocalRef(j_ppseg_clazz);

View File

@@ -225,6 +225,33 @@ jboolean VisFaceDetectionFromJava(
return JNI_TRUE; return JNI_TRUE;
} }
jboolean VisKeyPointDetectionFromJava(
JNIEnv *env, jobject argb8888_bitmap, jobject result,
jfloat conf_threshold) {
const jclass j_keypoint_det_result_clazz = env->FindClass(
"com/baidu/paddle/fastdeploy/vision/KeyPointDetectionResult");
if (!env->IsInstanceOf(result, j_keypoint_det_result_clazz)) {
env->DeleteLocalRef(j_keypoint_det_result_clazz);
return JNI_FALSE;
}
env->DeleteLocalRef(j_keypoint_det_result_clazz);
vision::KeyPointDetectionResult c_result;
if (!fni::AllocateCxxResultFromJava(
env, result, reinterpret_cast<void *>(&c_result),
vision::ResultType::KEYPOINT_DETECTION)) {
return JNI_FALSE;
}
cv::Mat c_bgr;
if (!fni::ARGB888Bitmap2BGR(env, argb8888_bitmap, &c_bgr)) {
return JNI_FALSE;
}
auto c_vis_im = vision::VisKeypointDetection(c_bgr, c_result, conf_threshold);
if (!fni::BGR2ARGB888Bitmap(env, argb8888_bitmap, c_vis_im)) {
return JNI_FALSE;
}
return JNI_TRUE;
}
} // jni } // jni
} // fastdeploy } // fastdeploy
@@ -283,8 +310,15 @@ Java_com_baidu_paddle_fastdeploy_vision_Visualize_visFaceDetectionNative(
line_size, font_size); line_size, font_size);
} }
JNIEXPORT jboolean JNICALL
Java_com_baidu_paddle_fastdeploy_vision_Visualize_visKeyPointDetectionNative(
JNIEnv *env, jclass clazz, jobject argb8888_bitmap,
jobject result, jfloat conf_threshold) {
return fni::VisKeyPointDetectionFromJava(env, argb8888_bitmap, result,
conf_threshold);
}
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif

View File

@@ -3,12 +3,14 @@ package com.baidu.paddle.fastdeploy;
public class RuntimeOption { public class RuntimeOption {
public int mCpuThreadNum = 1; public int mCpuThreadNum = 1;
public boolean mEnableLiteFp16 = false; public boolean mEnableLiteFp16 = false;
public boolean mEnableLiteInt8 = false;
public LitePowerMode mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND; public LitePowerMode mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND;
public String mLiteOptimizedModelDir = ""; public String mLiteOptimizedModelDir = "";
public RuntimeOption() { public RuntimeOption() {
mCpuThreadNum = 1; mCpuThreadNum = 1;
mEnableLiteFp16 = false; mEnableLiteFp16 = false;
mEnableLiteInt8 = false;
mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND; mLitePowerMode = LitePowerMode.LITE_POWER_NO_BIND;
mLiteOptimizedModelDir = ""; mLiteOptimizedModelDir = "";
} }
@@ -21,6 +23,14 @@ public class RuntimeOption {
mEnableLiteFp16 = false; mEnableLiteFp16 = false;
} }
public void enableLiteInt8() {
mEnableLiteInt8 = true;
}
public void disableLiteInt8() {
mEnableLiteInt8 = false;
}
public void setCpuThreadNum(int threadNum) { public void setCpuThreadNum(int threadNum) {
mCpuThreadNum = threadNum; mCpuThreadNum = threadNum;
} }

View File

@@ -0,0 +1,42 @@
package com.baidu.paddle.fastdeploy.vision;
import android.support.annotation.NonNull;
import java.util.Arrays;
public class KeyPointDetectionResult {
public float[][] mKeyPoints; // [n*num_joints, 2]
public float[] mScores; // [n*num_joints]
public int mNumJoints = -1;
public boolean mInitialized = false;
public KeyPointDetectionResult() {
mInitialized = false;
}
public boolean initialized() {
return mInitialized;
}
public void setKeyPoints(@NonNull float[] keyPointsBuffer) {
int pointNum = keyPointsBuffer.length / 2;
if (pointNum > 0) {
mKeyPoints = new float[pointNum][2];
for (int i = 0; i < pointNum; ++i) {
mKeyPoints[i] = Arrays.copyOfRange(
keyPointsBuffer, i * 2, (i + 1) * 2);
}
}
}
public void setScores(@NonNull float[] scoresBuffer) {
if (scoresBuffer.length > 0) {
mScores = scoresBuffer.clone();
}
}
public void setNumJoints(int numJoints) {
mNumJoints = numJoints;
}
}

View File

@@ -161,6 +161,24 @@ public class Visualize {
fontSize); fontSize);
} }
// Visualize KeyPointDetectionResult
public static boolean visKeypointDetection(Bitmap ARGB8888Bitmap,
KeyPointDetectionResult result) {
return visKeyPointDetectionNative(
ARGB8888Bitmap,
result,
0.5f);
}
public static boolean visKeypointDetection(Bitmap ARGB8888Bitmap,
KeyPointDetectionResult result,
float confThreshold) {
return visKeyPointDetectionNative(
ARGB8888Bitmap,
result,
confThreshold);
}
// VisDetection in native // VisDetection in native
private static native boolean visDetectionNative(Bitmap ARGB8888Bitmap, private static native boolean visDetectionNative(Bitmap ARGB8888Bitmap,
DetectionResult result, DetectionResult result,
@@ -191,6 +209,11 @@ public class Visualize {
int lineSize, int lineSize,
float fontSize); float fontSize);
// VisKeypointDetection in native
private static native boolean visKeyPointDetectionNative(Bitmap ARGB8888Bitmap,
KeyPointDetectionResult result,
float confThreshold);
/* Initializes at the beginning */ /* Initializes at the beginning */
static { static {
FastDeployInitializer.init(); FastDeployInitializer.init();

View File

@@ -0,0 +1,156 @@
package com.baidu.paddle.fastdeploy.vision.keypointdetection;
import android.graphics.Bitmap;
import com.baidu.paddle.fastdeploy.FastDeployInitializer;
import com.baidu.paddle.fastdeploy.RuntimeOption;
import com.baidu.paddle.fastdeploy.vision.KeyPointDetectionResult;
public class PPTinyPose {
protected long mCxxContext = 0; // Context from native.
protected boolean mUseDark = true;
protected boolean mInitialized = false;
public PPTinyPose() {
mInitialized = false;
}
// Constructor with default runtime option
public PPTinyPose(String modelFile,
String paramsFile,
String configFile) {
init_(modelFile, paramsFile, configFile, new RuntimeOption());
}
// Constructor without label file
public PPTinyPose(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
init_(modelFile, paramsFile, configFile, runtimeOption);
}
public void setUseDark(boolean flag) {
mUseDark = flag;
}
// Call init manually without label file
public boolean init(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
return init_(modelFile, paramsFile, configFile, runtimeOption);
}
public boolean release() {
mInitialized = false;
if (mCxxContext == 0) {
return false;
}
return releaseNative(mCxxContext);
}
public boolean initialized() {
return mInitialized;
}
// Predict without image saving and bitmap rendering.
public KeyPointDetectionResult predict(Bitmap ARGB8888Bitmap) {
if (mCxxContext == 0) {
return new KeyPointDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
KeyPointDetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", false, 0.f);
if (result == null) {
return new KeyPointDetectionResult();
}
return result;
}
public KeyPointDetectionResult predict(Bitmap ARGB8888Bitmap,
boolean rendering,
float confThreshold) {
if (mCxxContext == 0) {
return new KeyPointDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
KeyPointDetectionResult result = predictNative(mCxxContext, ARGB8888Bitmap,
false, "", rendering, confThreshold);
if (result == null) {
return new KeyPointDetectionResult();
}
return result;
}
// Predict with image saving and bitmap rendering (will cost more times)
public KeyPointDetectionResult predict(Bitmap ARGB8888Bitmap,
String savedImagePath,
float confThreshold) {
// confThreshold is for visualizing only.
if (mCxxContext == 0) {
return new KeyPointDetectionResult();
}
// Only support ARGB8888 bitmap in native now.
KeyPointDetectionResult result = predictNative(
mCxxContext, ARGB8888Bitmap, true,
savedImagePath, true, confThreshold);
if (result == null) {
return new KeyPointDetectionResult();
}
return result;
}
private boolean init_(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption) {
if (!mInitialized) {
mCxxContext = bindNative(
modelFile,
paramsFile,
configFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
} else {
// release current native context and bind a new one.
if (release()) {
mCxxContext = bindNative(
modelFile,
paramsFile,
configFile,
runtimeOption);
if (mCxxContext != 0) {
mInitialized = true;
}
return mInitialized;
}
return false;
}
}
// Bind predictor from native context.
private native long bindNative(String modelFile,
String paramsFile,
String configFile,
RuntimeOption runtimeOption);
// Call prediction from native context with rendering.
private native KeyPointDetectionResult predictNative(long CxxContext,
Bitmap ARGB8888Bitmap,
boolean saveImage,
String savePath,
boolean rendering,
float confThreshold);
// Release buffers allocated in native context.
private native boolean releaseNative(long CxxContext);
// Initializes at the beginning.
static {
FastDeployInitializer.init();
}
}

View File

@@ -22,12 +22,6 @@ public class PaddleSegModel {
init_(modelFile, paramsFile, configFile, new RuntimeOption()); init_(modelFile, paramsFile, configFile, new RuntimeOption());
} }
// Is vertical screen or not, for PP-HumanSeg on vertical screen,
// this flag must be 'true'.
public void setVerticalScreenFlag(boolean flag) {
mIsVerticalScreen = flag;
}
// Constructor with custom runtime option // Constructor with custom runtime option
public PaddleSegModel(String modelFile, public PaddleSegModel(String modelFile,
String paramsFile, String paramsFile,
@@ -43,6 +37,17 @@ public class PaddleSegModel {
return init_(modelFile, paramsFile, configFile, runtimeOption); return init_(modelFile, paramsFile, configFile, runtimeOption);
} }
// Deprecated. Please use setIsVerticalScreen instead.
public void setVerticalScreenFlag(boolean flag) {
mIsVerticalScreen = flag;
}
// Is vertical screen or not, for PP-HumanSeg on vertical screen,
// this flag must be 'true'.
public void setIsVerticalScreen(boolean flag) {
mIsVerticalScreen = flag;
}
public boolean release() { public boolean release() {
mInitialized = false; mInitialized = false;
if (mCxxContext == 0) { if (mCxxContext == 0) {

View File

@@ -7,6 +7,7 @@ android {
defaultConfig { defaultConfig {
minSdkVersion 15 minSdkVersion 15
//noinspection ExpiredTargetSdkVersion
targetSdkVersion 28 targetSdkVersion 28
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"