diff --git a/fastdeploy/vision/segmentation/ppseg/model.cc b/fastdeploy/vision/segmentation/ppseg/model.cc index 3d3b158a3..5fade8cb0 100644 --- a/fastdeploy/vision/segmentation/ppseg/model.cc +++ b/fastdeploy/vision/segmentation/ppseg/model.cc @@ -375,4 +375,4 @@ void PaddleSegModel::DisableNormalizeAndPermute() { } // namespace segmentation } // namespace vision -} // namespace fastdeploy +} // namespace fastdeploy \ No newline at end of file diff --git a/java/android/app/src/main/AndroidManifest.xml b/java/android/app/src/main/AndroidManifest.xml index 0f963ffe4..858ef04f6 100644 --- a/java/android/app/src/main/AndroidManifest.xml +++ b/java/android/app/src/main/AndroidManifest.xml @@ -15,14 +15,14 @@ android:roundIcon="@mipmap/ic_launcher_round" android:supportsRtl="true" android:theme="@style/AppTheme"> - + diff --git a/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/facedet/FaceDetMainActivity.java b/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/facedet/FaceDetMainActivity.java index eaf3d3348..fa2f1aeb9 100644 --- a/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/facedet/FaceDetMainActivity.java +++ b/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/facedet/FaceDetMainActivity.java @@ -320,8 +320,13 @@ public class FaceDetMainActivity extends Activity implements View.OnClickListene public void initView() { TYPE = REALTIME_DETECT; - CameraSurfaceView.EXPECTED_PREVIEW_WIDTH = 720; - CameraSurfaceView.EXPECTED_PREVIEW_HEIGHT = 360; + // (1) EXPECTED_PREVIEW_WIDTH should mean 'height' and EXPECTED_PREVIEW_HEIGHT + // should mean 'width' if the camera display orientation is 90 | 270 degree + // (Hold the phone upright to record video) + // (2) Smaller resolution is more suitable for lite face detection + // on mobile phone. So, we set this preview size (480,480) here. + CameraSurfaceView.EXPECTED_PREVIEW_WIDTH = 480; + CameraSurfaceView.EXPECTED_PREVIEW_HEIGHT = 480; svPreview = (CameraSurfaceView) findViewById(R.id.sv_preview); svPreview.setOnTextureChangedListener(this); svPreview.switchCamera(); // Front camera for HumanSeg diff --git a/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/segmentation/SegmentationMainActivity.java b/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/segmentation/SegmentationMainActivity.java index c1d9c72b6..3420581c2 100644 --- a/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/segmentation/SegmentationMainActivity.java +++ b/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/segmentation/SegmentationMainActivity.java @@ -310,8 +310,13 @@ public class SegmentationMainActivity extends Activity implements View.OnClickLi public void initView() { TYPE = REALTIME_DETECT; - CameraSurfaceView.EXPECTED_PREVIEW_WIDTH = 720; - CameraSurfaceView.EXPECTED_PREVIEW_HEIGHT = 360; + // (1) EXPECTED_PREVIEW_WIDTH should mean 'height' and EXPECTED_PREVIEW_HEIGHT + // should mean 'width' if the camera display orientation is 90 | 270 degree + // (Hold the phone upright to record video) + // (2) Smaller resolution is more suitable for Lite Portrait HumanSeg. + // So, we set this preview size (480,480) here. + CameraSurfaceView.EXPECTED_PREVIEW_WIDTH = 480; + CameraSurfaceView.EXPECTED_PREVIEW_HEIGHT = 480; svPreview = (CameraSurfaceView) findViewById(R.id.sv_preview); svPreview.setOnTextureChangedListener(this); svPreview.switchCamera(); // Front camera for HumanSeg diff --git a/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java b/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java index 6da24c2f6..eabeb74f4 100644 --- a/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java +++ b/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java @@ -126,7 +126,7 @@ public class Utils { } public static Camera.Size getOptimalPreviewSize(List sizes, int w, int h) { - final double ASPECT_TOLERANCE = 0.1; + final double ASPECT_TOLERANCE = 0.3; double targetRatio = (double) w / h; if (sizes == null) return null; diff --git a/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java b/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java index 9269a4828..11d9fcdcb 100644 --- a/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java +++ b/java/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java @@ -30,8 +30,8 @@ public class CameraSurfaceView extends GLSurfaceView implements Renderer, SurfaceTexture.OnFrameAvailableListener { private static final String TAG = CameraSurfaceView.class.getSimpleName(); - public static int EXPECTED_PREVIEW_WIDTH = 1280; - public static int EXPECTED_PREVIEW_HEIGHT = 720; + public static int EXPECTED_PREVIEW_WIDTH = 1280; // 1920 + public static int EXPECTED_PREVIEW_HEIGHT = 720; // 960 protected int numberOfCameras; protected int selectedCameraId; @@ -99,6 +99,16 @@ public class CameraSurfaceView extends GLSurfaceView implements Renderer, private int vcTex2Screen; private int tcTex2Screen; + public void setBitmapReleaseMode(boolean mode) { + synchronized (this) { + bitmapReleaseMode = mode; + } + } + + public Bitmap getBitmap() { + return ARGB8888ImageBitmap; // may null or recycled. + } + public interface OnTextureChangedListener { boolean onTextureChanged(Bitmap ARGB8888ImageBitmap); } @@ -236,16 +246,6 @@ public class CameraSurfaceView extends GLSurfaceView implements Renderer, GLES20.glFlush(); } - public void setBitmapReleaseMode(boolean mode) { - synchronized (this) { - bitmapReleaseMode = mode; - } - } - - public Bitmap getBitmap() { - return ARGB8888ImageBitmap; // may null or recycled. - } - private float[] transformTextureCoordinates(float[] coords, float[] matrix) { float[] result = new float[coords.length]; float[] vt = new float[4]; @@ -287,20 +287,28 @@ public class CameraSurfaceView extends GLSurfaceView implements Renderer, public void openCamera() { if (disableCamera) return; camera = Camera.open(selectedCameraId); - List supportedPreviewSizes = camera.getParameters().getSupportedPreviewSizes(); - Size previewSize = Utils.getOptimalPreviewSize(supportedPreviewSizes, EXPECTED_PREVIEW_WIDTH, - EXPECTED_PREVIEW_HEIGHT); Camera.Parameters parameters = camera.getParameters(); - parameters.setPreviewSize(previewSize.width, previewSize.height); - if (parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { - parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); - } - camera.setParameters(parameters); int degree = Utils.getCameraDisplayOrientation(context, selectedCameraId); camera.setDisplayOrientation(degree); boolean rotate = degree == 90 || degree == 270; - textureWidth = rotate ? previewSize.height : previewSize.width; - textureHeight = rotate ? previewSize.width : previewSize.height; + int adjusted_width = rotate ? EXPECTED_PREVIEW_HEIGHT : EXPECTED_PREVIEW_WIDTH; + int adjusted_height = rotate ? EXPECTED_PREVIEW_WIDTH : EXPECTED_PREVIEW_HEIGHT; + + List supportedPreviewSizes = camera.getParameters().getSupportedPreviewSizes(); + + Size previewSize = Utils.getOptimalPreviewSize( + supportedPreviewSizes, adjusted_width, adjusted_height); + + textureWidth = previewSize.width; + textureHeight = previewSize.height; + + parameters.setPreviewSize(previewSize.width, previewSize.height); + camera.setParameters(parameters); + + if (parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + } + // Destroy FBO and draw textures GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glDeleteFramebuffers(1, fbo, 0);