Android one minute understanding of simple native FaceDetector face recognition to solve the problem of unclear recording video of SurfaceView+MediaRecorder

Recently, I found that when I logged into the background management system, I found that the uploaded authentication video was too vague, so I found that I did not set the frame frequency for the MediaRecorder.

// Set the frame frequency to make the recorded video clearer
mRecorder.setVideoEncodingBitRate(5*1024*1024);

1. Turn on the camera directly to get the desired bitmap. Why YuvImage is used : https://blog.csdn.net/illidantao/article/details/51366047

    try {
                    camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
                } catch (Exception e) {
                    e.printStackTrace();
                }

              
                try {
                    camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
                camera.setDisplayOrientation(90);
                camera.startPreview();
            }
            camera.setPreviewCallback(new Camera.PreviewCallback() {

                @Override
                public void onPreviewFrame(final byte[] data, Camera camera) {
                    Camera.Size size = camera.getParameters().getPreviewSize();
                    YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width,
                            size.height, null);
                    if (image != null) {
                        ByteArrayOutputStream stream = new ByteArrayOutputStream();
                        image.compressToJpeg(new Rect(0, 0, size.width, size.height),
                                80, stream);
                        Bitmap bmp = BitmapFactory.decodeByteArray(
                                stream.toByteArray(), 0, stream.size());
                        Matrix matrix = new Matrix();
                        matrix.reset();
                        matrix.setRotate(-90);
                        bmp = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), matrix, true);
                        Bitmap bitmap = bmp.copy(Bitmap.Config.RGB_565, true);
                        findFace(bitmap);
                        try {
                            stream.close();
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }
                }

            });

There is a FaceDetector class in the Android native SDK, which passes in a Bitmap format of data; B, which can only recognize Face images whose distance between two eyes is greater than 20 pixels (which can be modified in the framework layer of course); C, which can only detect the position of the Face (the center point and distance of both eyes), and can't match the Face (find the specified Face). The following coordinates are used to obtain the specific coordinates of the Face's center point. The number of faces to be grabbed is passed in through FaceDetector.Face[1]. If you want to grab multiple faces, Face [*] is OK. findFaces(bitmap,face) gets an int value. If it is greater than 1, it means success. Then the next operation is started.  

public void findFace(Bitmap bitmap) {
    FaceDetector faceDetector = new FaceDetector(bitmap.getWidth(), bitmap.getHeight(), 1);
    FaceDetector.Face[] face = new FaceDetector.Face[1];
    int faces = faceDetector.findFaces(bitmap, face);
    if (faces > 0) {
        camera.stopPreview();
        preview = false;
        camera.setPreviewCallback(null);
        camera.release();
        initMediaRecorder();
    }
}

2. When the verification is successful, 6 seconds of video recording will be realized.

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().setFormat(PixelFormat.TRANSLUCENT);
    setContentView(R.layout.activity_uservideo);
    
    surfaceview.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    surfaceview.getHolder().setFixedSize(200, 200);
    surfaceview.getHolder().addCallback(new SurfaceViewCallback());
}
 public void initMediaRecorder() {
        if (mRecorder == null) {
            mRecorder = new MediaRecorder(); // Create MediaRecorder
        }

        try {
            initCamera();

            // These two items need to be placed before setOutputFormat
            mRecorder.setCamera(videocamera);
            mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
            // Set output file format
            mRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
            // These two items need to be placed after setOutputFormat
            mRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
//            mRecorder.setOrientationHint(90);
//            mRecorder.setVideoSize(320, 240);
            CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_1080P);
            mRecorder.setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
            // Set the frame frequency to make the recorded video clearer
            mRecorder.setVideoEncodingBitRate(5*1024*1024);
            // mRecorder.setVideoFrameRate(20);
            mRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
            file = Constants.FOLDER_PHOTO + System.currentTimeMillis() + ".mp4";
            mRecorder.setOutputFile(file);

            mRecorder.prepare();
            mRecorder.start();   // Recording is now started
            setCountBack();
        } catch (IOException e1) {
            e1.printStackTrace();
        }
    }
private void initCamera() {
    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
    cameraCount = Camera.getNumberOfCameras(); // get cameras number

    for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
        Camera.getCameraInfo(camIdx, cameraInfo); // get camerainfo
        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            // It represents the orientation of the camera. At present, there are two defined values: camera? Facing? Front and camera? Facing? Back
            try {
                videocamera = Camera.open(camIdx);
                videocamera.setDisplayOrientation(90);
                videocamera.cancelAutoFocus();
            } catch (RuntimeException e) {
                e.printStackTrace();
            }
        }
    }
    videocamera.unlock();
}

 

void setCountBack() {
    timeCount = 5;
    timetext.setText(timeCount + "second");
    handler.postDelayed(new Runnable() {
        @Override
        public void run() {
            if (timeCount > 0) {
                timetext.setText(--timeCount + "second");
                handler.postDelayed(this, 1000);
                return;
            }

            timetext.setText("");

            if (mRecorder == null) {
                return;
            }

            try {
                mRecorder.stop();
                //Upload video
                uploadVideo();
            } catch (Exception e) {
                ToastUtil.showToast(AuthRecordingVideoActivity.this, getString(R.string.error_recoder));
            }
        }
    }, 1000);


}

About SurfaceView preview Camera stretch: https://blog.csdn.net/illidantao/article/details/51366047

Tags: SurfaceView Android SDK

Posted on Mon, 03 Feb 2020 11:16:14 -0500 by cljones81