In the previous chapter we have through FFMPEG Rtmp stream successfully resolved to NV21 data, next I will talk about how to draw NV21 through OpenGL, and how to use NV21 face recognition, and draw the face frame.

OpenGL data rendering

OpenGL rendering needs to set up vertex shaders and fragment shaders, the author provides a shader tool class GLUtil, which defines the normal effect, gray effect and emrelief effect, you can refer to. In addition, the texture data should be set according to the width and height of the frame. The last step is to extract Y, U and V components after passing in the raw data, and then draw them. The author has defined a RtmpGLSurfaceView to give you a reference.

public class RtmpGLSurfaceView extends GLSurfaceView {
    private static final String TAG = "CameraGLSurfaceView"; Private int frameWidth, frameHeight; private boolean isMirror; private int rotateDegree = 0; // Determine if preview data is passed in to avoid a green background (y, u, v are all 0) during initializationfalse; Private int radius = 0; private ByteBuffer yBuf = null, uBuf = null, vBuf = null; Private int[] yTexture = new int[1]; private int[] uTexture = new int[1]; private int[] vTexture = new int[1]; // private byte[] yArray; private byte[] uArray; private byte[] vArray; private static final int FLOAT_SIZE_BYTES = 4; // Effect of fragment shader Private String fragmentShaderCode = glutil. FRAG_SHADER_NORMAL; private FloatBuffer squareVertices = null; private FloatBuffer coordVertices = null; private boolean rendererReady =false;
    float[] coordVertice = null;

    public RtmpGLSurfaceView(Context context) {
        this(context, null);
    }

    public RtmpGLSurfaceView(Context context, AttributeSet attrs) {
        super(context, attrs);
        setEGLContextClientVersion(2); // Set Renderer to GLSurfaceViewsetRenderer(new YUVRenderer()); // Only draw the view if the drawing data changessetRenderMode(RENDERMODE_WHEN_DIRTY);
        setOutlineProvider(new ViewOutlineProvider() { @Override public void getOutline(View view, Outline outline) { Rect rect = new Rect(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight()); outline.setRoundRect(rect, radius); }});setClipToOutline(true);
    }

    public void turnRound() {
        invalidateOutline();
    }

    public int getRadius() {
        return radius;
    }

    public void setRadius(int radius) { this.radius = radius; } /** * set different fragmentShaderCode for different preview effects ** @param fragmentShaderCode fragmentShaderCode */ public voidsetFragmentShaderCode(String fragmentShaderCode) {
        this.fragmentShaderCode = fragmentShaderCode;
    }

    public void init(boolean isMirror, int rotateDegree, int frameWidth, int frameHeight) {
        if (this.frameWidth == frameWidth
                && this.frameHeight == frameHeight
                && this.rotateDegree == rotateDegree
                && this.isMirror == isMirror) {
            return;
        }
        dataInput = false; this.frameWidth = frameWidth; this.frameHeight = frameHeight; this.rotateDegree = rotateDegree; this.isMirror = isMirror; yArray = new byte[this.frameWidth * this.frameHeight]; uArray = new byte[this.frameWidth * this.frameHeight / 4]; vArray = new byte[this.frameWidth * this.frameHeight / 4]; int yFrameSize = this.frameHeight * this.frameWidth; int uvFrameSize = yFrameSize >> 2; yBuf = ByteBuffer.allocateDirect(yFrameSize); yBuf.order(ByteOrder.nativeOrder()).position(0); uBuf = ByteBuffer.allocateDirect(uvFrameSize); uBuf.order(ByteOrder.nativeOrder()).position(0); vBuf = ByteBuffer.allocateDirect(uvFrameSize); vBuf.order(ByteOrder.nativeOrder()).position(0); SquareVertices = bytebuffer. allocateDirect(glutil.square_vertices. Length * FLOAT_SIZE_BYTES) .order(ByteOrder.nativeOrder()) .asFloatBuffer(); squareVertices.put(GLUtil.SQUARE_VERTICES).position(0); // Texture coordinatesif (isMirror) {
            switch (rotateDegree) {
                case 0:
                    coordVertice = GLUtil.MIRROR_COORD_VERTICES;
                    break;
                case 90:
                    coordVertice = GLUtil.ROTATE_90_MIRROR_COORD_VERTICES;
                    break;
                case 180:
                    coordVertice = GLUtil.ROTATE_180_MIRROR_COORD_VERTICES;
                    break;
                case 270:
                    coordVertice = GLUtil.ROTATE_270_MIRROR_COORD_VERTICES;
                    break;
                default:
                    break; }}else {
            switch (rotateDegree) {
                case 0:
                    coordVertice = GLUtil.COORD_VERTICES;
                    break;
                case 90:
                    coordVertice = GLUtil.ROTATE_90_COORD_VERTICES;
                    break;
                case 180:
                    coordVertice = GLUtil.ROTATE_180_COORD_VERTICES;
                    break;
                case 270:
                    coordVertice = GLUtil.ROTATE_270_COORD_VERTICES;
                    break;
                default:
                    break; } } coordVertices = ByteBuffer.allocateDirect(coordVertice.length * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer(); coordVertices.put(coordVertice).position(0); } /** * create OpenGL Program and associate variables in GLSL ** @param fragmentShaderCode fragmentShaderCode */ private void createGLProgram(String) fragmentShaderCode) { int programHandleMain = GLUtil.createShaderProgram(fragmentShaderCode);if(programHandleMain ! Gles20. glUseProgram(programHandleMain); / / get the vertex shader variable int glPosition = GLES20. GlGetAttribLocation (programHandleMain,"attr_position");
            int textureCoord = GLES20.glGetAttribLocation(programHandleMain, "attr_tc"); / / get fragment shader variable int ySampler = GLES20. GlGetUniformLocation (programHandleMain,"ySampler");
            int uSampler = GLES20.glGetUniformLocation(programHandleMain, "uSampler");
            int vSampler = GLES20.glGetUniformLocation(programHandleMain, "vSampler"); Gles20.gl_texture0 and ySampler bind gles20.gl_texture1 and uSampler bind gles20.gl_texture2 and vSampler bind ** The second parameter of glUniform1i represents the layer number */ gles20. glUniform1i(ySampler, 0); GLES20.glUniform1i(uSampler, 1); GLES20.glUniform1i(vSampler, 2); GLES20.glEnableVertexAttribArray(glPosition); GLES20.glEnableVertexAttribArray(textureCoord); /** * set Vertex Shader data */ squarevertex.position (0); GLES20.glVertexAttribPointer(glPosition, GLUtil.COUNT_PER_SQUARE_VERTICE, GLES20.GL_FLOAT,false, 8, squareVertices);
            coordVertices.position(0);
            GLES20.glVertexAttribPointer(textureCoord, GLUtil.COUNT_PER_COORD_VERTICES, GLES20.GL_FLOAT, false, 8, coordVertices);
        }
    }

    public class YUVRenderer implements Renderer {
        private void initRenderer() {
            rendererReady = false; createGLProgram(fragmentShaderCode); Gles20.glenable (gles20.gl_texture_2d); CreateTexture (frameWidth, frameHeight, gles20. GL_LUMINANCE, yTexture); createTexture(frameWidth / 2, frameHeight / 2, GLES20.GL_LUMINANCE, uTexture); createTexture(frameWidth / 2, frameHeight / 2, GLES20.GL_LUMINANCE, vTexture); rendererReady =true; } @Override public void onSurfaceCreated(GL10 unused, EGLConfig config) { initRenderer(); } private void createTexture(int width, int height, int format, Int [] textureId) {// Create texture gles20.glgentextures (1, textureId, 0); Gles20.glbindtexture (gles20.gl_texture_2d, textureId[0]); /** * {@link GLES20#GL_TEXTURE_WRAP_S} represents the left and right texture wrap mode
             * {@link GLES20#GL_TEXTURE_WRAP_T} represents up and down texture wrap mode
             *
             *  {@link GLES20# GL_REPEAT} : repeat
             *  {@link GLES20#GL_MIRRORED_REPEAT} : Mirror repetition
             *  {@link GLES20#GL_CLAMP_TO_EDGE} : Ignore border interception* * For example we use {@link GLES20# GL_REPEAT} :* * The squareVertices coordVertices are the same as the textureView vertices. 2f, 2f, 2f, 2f, 2f, 2f, 2f, 2f, 2f, 2f, 2f, 2f, 2f, 2f, 2f 2.0F, 0.0f, -> Split into 4 identical blocks compared to textureView preview (bottom left, bottom right, top left, Gles20.gl_texture_2d, gles20.gl_texture_2d, gles20.gl_texture_2d, gles20.gl_texture_2d, gles20.gl_texture_2d, gles20.gl_texture_2d, gles20.gl_texture_2d GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT); /** * {@link GLES20#GL_TEXTURE_MIN_FILTER} represents the case where the displayed texture is smaller than the one loaded in
             * {@link GLES20#GL_TEXTURE_MAG_FILTER} represents what happens when the displayed texture is larger than the one loaded in
             *
             *  {@link GLES20#GL_NEAREST} : Use the color of the nearest pixel in the texture as the color of the pixel to be drawn
             *  {@link GLES20#GL_LINEAR} : Using the closest coordinates of several colors in the texture, using a weighted average algorithm to get the color of the pixel to be drawn*/ GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, format, width, height, 0, format, GLES20.GL_UNSIGNED_BYTE, null); } @override public void onDrawFrame(GL10 gl) {// Activate, bind, and set data for each textureif(dataInput) { //y GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTexture[0]); GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, frameWidth, frameHeight, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yBuf); //u GLES20.glActiveTexture(GLES20.GL_TEXTURE1); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uTexture[0]); GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, frameWidth >> 1, frameHeight >> 1, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, uBuf); //v GLES20.glActiveTexture(GLES20.GL_TEXTURE2); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, vTexture[0]); GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, frameWidth >> 1, frameHeight >> 1, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, vBuf); Gles20.gldrawarrays (gles20.gl_triangLE_strip, 0, 4); } } @Override public void onSurfaceChanged(GL10 unused, int width, int height) { GLES20.glViewport(0, 0, width, height);  }} @param data NV21 data */ public void refreshFrameNV21(byte[] data) {if (rendererReady) {
            yBuf.clear();
            uBuf.clear();
            vBuf.clear();
            putNV21(data, frameWidth, frameHeight);
            dataInput = true; requestRender(); }} /** * @param data */ public void refreshYv12 (byte[] data) {if (rendererReady) {
            yBuf.clear();
            uBuf.clear();
            vBuf.clear();
            putYV12(data, frameWidth, frameHeight);
            dataInput = true; requestRender(); ** @param SRC NV21 frame * @param width * @param height */ private void putNV21(byte[] src, int width, int height) { int ySize = width * height; int frameSize = ySize * 3 / 2; Arraycopy (SRC, 0, yArray, 0, ySize); // ArrayCopy (SRC, 0, yArray, 0, ySize); int k = 0; Int index = ySize;while(index < frameSize) { vArray[k] = src[index++]; uArray[k++] = src[index++]; } yBuf.put(yArray).position(0); uBuf.put(uArray).position(0); vBuf.put(vArray).position(0); } @param SRC @param width @param height @param height private void putYV12(byte[] src, int width, int height) { int ySize = width * height; int frameSize = ySize * 3 / 2; Arraycopy (SRC, 0, yArray, 0, ySize); // ArrayCopy (SRC, 0, yArray, 0, ySize); int k = 0; Int index = ySize;while(index < frameSize) { vArray[k] = src[index++]; uArray[k++] = src[index++]; } yBuf.put(yArray).position(0); uBuf.put(uArray).position(0); vBuf.put(vArray).position(0); }}Copy the code

GLUtil is the definition and initialization code for vertex shaders and fragment shaders.

public class GLUtil {
    private static final String TAG = "GLUtil"; Private static String VERTEX_SHADER = private static String VERTEX_SHADER =" attribute vec4 attr_position; \n" +
                    " attribute vec2 attr_tc; \n" +
                    " varying vec2 tc; \n" +
                    " void main() {\n" +
                    " gl_Position = attr_position; \n" +
                    " tc = attr_tc; \n" +
                    "}"; /** * public static String FRAG_SHADER_NORMAL ="precision mediump float; \n" +
            " varying vec2 tc; \n" +
                    " uniform sampler2D ySampler; \n" +
                    " uniform sampler2D uSampler; \n" +
                    " uniform sampler2D vSampler; \n" +
                    "Const mat3 convertMat = mat3(1.0, 1.0, 1.0, 0, -0.344, 1.77, 1.403, -0.714,0); \n" +
                    " void main()\n" +
                    " {\n" +
                    " vec3 yuv; \n" +
                    " yuv.x = texture2D(ySampler, tc).r; \n" +
                    Y = texture2D(uSampler, tc).r-0.5; \n" +
                    "Yuv. z = texture2D(vSampler, tc).r-0.5; \n" +
                    "Gl_FragColor = vec4(convertMat * yuv, 1.0); \n" +
                    "}"; /** * segment shader, grayscale effect. */ public static String FRAG_SHADER_GRAY = public static String FRAG_SHADER_GRAY ="precision mediump float; \n" +
            " varying vec2 tc; \n" +
                    " uniform sampler2D ySampler; \n" +
                    " void main()\n" +
                    " {\n" +
                    " vec3 yuv; \n" +
                    " yuv.x = texture2D(ySampler, tc).r; \n" +
                    "Gl_FragColor = vec4(yuv.x), 1.0); \n" +
                    "}"; /** * segment shader, emboss effect. Public static String FRAG_SHADER_SCULPTURE = public static String FRAG_SHADER_SCULPTURE ="precision mediump float; \n" +
                    "varying vec2 tc; \n" +
                    " uniform sampler2D ySampler; \n" +
                    "Const vec2 texSize = vec2(100.0, 100.0); \n" +
                    "Const Vec4 sculptureColor = VEC4 (0.5, 0.5, 0.5, 1.0); \n" +
                    "\n" +
                    "void main()\n" +
                    "{\n" +
                    "    vec2 upLeftCoord = vec2(tc.x-1.0/texSize.x, tc.y-1.0/texSize.y);\n" +
                    " vec4 curColor = texture2D(ySampler, tc); \n" +
                    " vec4 upLeftColor = texture2D(ySampler, upLeftCoord); \n" +
                    " vec4 delColor = curColor - upLeftColor; \n" +
                    "Gl_FragColor = VEC4 (delColor), 0.0) + sculptureColor; \n" +
                    "}"; Static final int COUNT_PER_SQUARE_VERTICE = 2; Static final int COUNT_PER_COORD_VERTICES = 2; //COORD_VERTICES = 2; /** ** static finalfloatF [] SQUARE_VERTICES = {1.0, 1.0 f, 1.0 f to 1.0 f, 1.0 f, f 1.0, 1.0 f, 1.0 f}; / * * * * the original figures show 0, 1 * * * * * * * * * * * 1 * * * * * * * * * * * * * * * * 0, 0 * * * * * * * * * * * 1, 0 * / static finalfloatF [] COORD_VERTICES = {0.0, 1.0 f to 1.0 f to 1.0 f to 0.0 f to 0.0 f to 1.0 f to 0.0 f}; / 90 degrees counterclockwise show * * * * 1 * * * * * * * * * * * 1, 0 * * * * * * * * * * * * * * * * 0, 1 * * * * * * * * * * * 0, 0 * / static finalfloat[] ROTATE_90_COORD_VERTICES = {0f, 0f, 0f, 0f, 0f, 0f}; / * * * 180 degrees counterclockwise shows * 0, 1 * * * * * * * * * * * 1, 1 * * * * * * * * * * * * * * * * 0, 0 * * * * * * * * * * * 1, 0 * / static finalfloat[] ROTATE_180_COORD_VERTICES = {0f, 0f, 0f, 0f, 0f, 0f, 0f}; / * * * 270 degrees counterclockwise shows * 0, 1 * * * * * * * * * * * 1, 1 * * * * * * * * * * * * * * * * 0, 0 * * * * * * * * * * * 1, 0 * / static finalfloat[] ROTATE_270_COORD_VERTICES = {0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f}; / mirror according to * * * * 1, 1 * * * * * * * * * * * 0, 1 * * * * * * * * * * * * * * * * 1, 0 * * * * * * * * * * * 0, 0 * / static finalfloat[] MIRROR_COORD_VERTICES = {0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f}; / * * * mirror and the 90 degrees counterclockwise to display * 0, 1 * * * * * * * * * * * 0, 0 * * * * * * * * * * * * * * * * 1, 1 * * * * * * * * * * * 1, 0 * / static finalfloat[] ROTATE_90_MIRROR_COORD_VERTICES = {0f, 0f, 0f, 0f, 0f, 0f, 0f}; / * * * mirror 1, 0 and 180 degrees counterclockwise to display * * * * * * * * * * * * 0, 0 * * * * * * * * * * * * * * * * 1 * * * * * * * * * * * 0, 1 * / static finalfloat[] ROTATE_180_MIRROR_COORD_VERTICES = {0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f}; / * * * mirror 1, 0 and 270 degrees counterclockwise to display * * * * * * * * * * * * 1 * * * * * * * * * * * * * * * * 0, 0 * * * * * * * * * * * 0, 1 * / static finalfloat[] ROTATE_270_MIRROR_COORD_VERTICES = {0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f}; /** * create OpenGL Program and link to ** @param fragmentShaderCode fragmentShaderCode * @return*/ static int createShaderProgram(String fragmentShaderCode) {int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER); int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode); // create an empty OpenGL ES Program int mProgram = gles20.glCreateProgram (); GlAttachShader (mProgram, vertexShader); // Add vertex shader to program gles20.glattachShader (mProgram, vertexShader); // Add fragmentShader to program gles20. glAttachShader(mProgram, fragmentShader); OpenGL ES Program gles20.gllinkProgram (mProgram); IntBuffer linked = intbuffer.allocate (1); GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linked);if (linked.get(0) == 0) {
            return- 1; }returnmProgram; } /** * load shader ** @paramtypeShader type, which can be a fragment shader {@link GLES20#GL_FRAGMENT_SHADER {@link GLES20#GL_VERTEX_SHADER}* @param shaderCode shaderCode * @return*/ private static int loadShader(inttype, String shaderCode) {// Create an empty shader int shader = gles20.glcreateshader (type); Gles20.glshadersource (shader, shaderCode); // compile shader gles20.glcompileshader (shader); // Check whether the compilation is successful IntBuffer compiled = IntBuffer. Allocate (1); GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled);if (compiled.get(0) == 0) {
            return 0;
        }
        returnshader; }}Copy the code

GLSurfaceView is not difficult to call, just need to set up the shader effect, set up the callback, initialize, and finally need to draw NV21 set up, the following is the author’s example for your reference.

   // Set the shader effect
    surfaceView.setFragmentShaderCode(GLUtil.FRAG_SHADER_NORMAL);
    // Set the SurfaceHolder callback
    surfaceView.getHolder().addCallback(this);
    / / GLSurfaceView initialization
    surfaceView.init(false.0, frameWidth, frameHeight);
    // Draw NV21 data
    surfaceView.refreshFrameNV21(data);
Copy the code

Effect:

Two, face tracking and face frame drawing

After the above steps, we have drawn the Rtmp flow data, then we only have the last step, face recognition and drawing face frame, face recognition with the help of face recognition engine, the use of the engine I will be divided into the following steps for you to explain.

  • The engine to activate
  • Engine initialization
  • Face tracking
  • Face frame drawing

1. The engine is activated

Log in to The Rainbow Software Developer Center to apply for the Android version of the engine and obtain the engine package and activation code.

AndroidManifest add permissions, Android7.0 and above please dynamically apply permissions

<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.INTERNET" />
Copy the code
// Activate the interface
 FaceEngine.activeOnline(this, Constants.APP_ID, Constants.SDK_KEY);
Copy the code

2. Initialize the engine

During initialization, select the recognition mode as Video mode, select the recognition Angle as full Angle (single Angle is better), 16 is the proportion of face to the long side, the larger the setting, the smaller the face can be detected, the maximum value is 32, set the maximum detection of 5 faces, because we only need to detect faces, so only load the face detection function.

faceEngine = new FaceEngine();
// Initialize the interface
int code = faceEngine.init(this, DetectMode.ASF_DETECT_MODE_VIDEO, DetectFaceOrientPriority.ASF_OP_ALL_OUT,
            16.5, FaceEngine.ASF_FACE_DETECT);
if(code ! = ErrorInfo.MOK) {// An error is reported when initialization fails
    Toast.makeText(MainActivity.this."faceEngineInit Error", Toast.LENGTH_LONG).show();
}
Copy the code

3. Face tracking

In the above RtmpPlayer onFrameAvailable we can get parsing NV21 data, directly into the engine can get face data.

/ / face detection interface int. Code = faceEngine detectFaces (data, frameWidth, frameHeight, faceEngine CP_PAF_NV21, faceInfos);if(code ! = errorInfo.mok) {// The face detection interface is faultylogPrint error code log. I (TAG,"onFrameAvailable: detect Error");
    return;
}
Copy the code

4 face frame drawing

The drawing of face frame is relatively complicated, and the obtained face information needs to be drawn on canvas. This paper will directly draw DrawHelper and faceRectView in ArcFace Demo to achieve this.

DrawHelper

public class DrawHelper {
    private int previewWidth, previewHeight, canvasWidth, canvasHeight, cameraDisplayOrientation, cameraId;
    private boolean isMirror;
    private boolean mirrorHorizontal = false, mirrorVertical = false; /** * Create a draw helper object, And set the drawing parameters * * @param previewWidth * @Param previewHeight * @Param canvasWidth Width of the drawing control * @Param canvasHeight Height of the drawn control * @param cameraDisplayOrientation orientation * @param cameraId cameraId * @param isMirror whether the camera is displayed horizontally (if the camera is displayed in a mirror, set this parameter totrue@param mirrorHorizontal is compatible with some devices, and the horizontal is again mirrored. @param mirrorVertical is compatible with some devices. */ public DrawHelper(int previewWidth, int previewHeight, int canvasWidth, int canvasHeight, int cameraDisplayOrientation, int cameraId, boolean isMirror, boolean mirrorHorizontal, boolean mirrorVertical) { this.previewWidth = previewWidth; this.previewHeight = previewHeight; this.canvasWidth = canvasWidth; this.canvasHeight = canvasHeight; this.cameraDisplayOrientation = cameraDisplayOrientation; this.cameraId = cameraId; this.isMirror = isMirror; this.mirrorHorizontal = mirrorHorizontal; this.mirrorVertical = mirrorVertical; } public void draw(FaceRectView faceRectView, List<DrawInfo> drawInfoList) {if (faceRectView == null) {
            return;
        }
        faceRectView.clearFaceInfo();
        if (drawInfoList == null || drawInfoList.size() == 0) {
            return; } faceRectView.addFaceInfo(drawInfoList); } /** * adjust the face frame to draw ** @param ftRect FT face frame * @returnAdjustRect */ Public adjustRect(adjustRect ftRect) {int AdjustWidth = this.adjustWidth; int previewHeight = this.previewHeight; int canvasWidth = this.canvasWidth; int canvasHeight = this.canvasHeight; int cameraDisplayOrientation = this.cameraDisplayOrientation; int cameraId = this.cameraId; boolean isMirror = this.isMirror; boolean mirrorHorizontal = this.mirrorHorizontal; boolean mirrorVertical = this.mirrorVertical;if (ftRect == null) {
            return null;
        }

        Rect rect = new Rect(ftRect);
        float horizontalRatio;
        float verticalRatio;
        if (cameraDisplayOrientation % 180 == 0) {
            horizontalRatio = (float) canvasWidth / (float) previewWidth;
            verticalRatio = (float) canvasHeight / (float) previewHeight;
        } else {
            horizontalRatio = (float) canvasHeight / (float) previewWidth;
            verticalRatio = (float) canvasWidth / (float) previewHeight;
        }
        rect.left *= horizontalRatio;
        rect.right *= horizontalRatio;
        rect.top *= verticalRatio;
        rect.bottom *= verticalRatio;

        Rect newRect = new Rect();
        switch (cameraDisplayOrientation) {
            caseZero:if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    newRect.left = canvasWidth - rect.right;
                    newRect.right = canvasWidth - rect.left;
                } else {
                    newRect.left = rect.left;
                    newRect.right = rect.right;
                }
                newRect.top = rect.top;
                newRect.bottom = rect.bottom;
                break;
            case 90:
                newRect.right = canvasWidth - rect.top;
                newRect.left = canvasWidth - rect.bottom;
                if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    newRect.top = canvasHeight - rect.right;
                    newRect.bottom = canvasHeight - rect.left;
                } else {
                    newRect.top = rect.left;
                    newRect.bottom = rect.right;
                }
                break;
            case 180:
                newRect.top = canvasHeight - rect.bottom;
                newRect.bottom = canvasHeight - rect.top;
                if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    newRect.left = rect.left;
                    newRect.right = rect.right;
                } else {
                    newRect.left = canvasWidth - rect.right;
                    newRect.right = canvasWidth - rect.left;
                }
                break;
            case 270:
                newRect.left = rect.top;
                newRect.right = rect.bottom;
                if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    newRect.top = rect.left;
                    newRect.bottom = rect.right;
                } else {
                    newRect.top = canvasHeight - rect.right;
                    newRect.bottom = canvasHeight - rect.left;
                }
                break;
            default:
                break;
        }

        /**
         * isMirror mirrorHorizontal finalIsMirrorHorizontal
         * true         true                false
         * false        false               false
         * true         false               true
         * false        true                true
         *
         * XOR
         */
        if (isMirror ^ mirrorHorizontal) {
            int left = newRect.left;
            int right = newRect.right;
            newRect.left = canvasWidth - right;
            newRect.right = canvasWidth - left;
        }
        if (mirrorVertical) {
            int top = newRect.top;
            int bottom = newRect.bottom;
            newRect.top = canvasHeight - bottom;
            newRect.bottom = canvasHeight - top;
        }
        returnnewRect; } /** * Draw data information to view, if {@link DrawInfo#getName()} {@link DrawInfo#getName()}* * @param Canvas requires the canvas of the view to be drawn * @param drawInfo drawing information * @Param faceRectThickness of the face frame * @param paint brush */ public static void drawFaceRect(Canvas canvas, DrawInfo drawInfo, int faceRectThickness, Paint paint) {if (canvas == null || drawInfo == null) {
            return;
        }
        paint.setStyle(Paint.Style.STROKE);
        paint.setStrokeWidth(faceRectThickness);
        paint.setColor(drawInfo.getColor());
        paint.setAntiAlias(true); Path mPath = new Path(); Rect = drawinfo.getRect (); mPath.moveTo(rect.left, rect.top + rect.height() / 4); mPath.lineTo(rect.left, rect.top); mPath.lineTo(rect.left + rect.width() / 4, rect.top); // Right upper mPath. MoveTo (rect.right-rect.width () / 4, rect.top); mPath.lineTo(rect.right, rect.top); mPath.lineTo(rect.right, rect.top + rect.height() / 4); // mPath. MoveTo (rect.right, rect.bottom-rect.height () / 4); mPath.lineTo(rect.right, rect.bottom); mPath.lineTo(rect.right - rect.width() / 4, rect.bottom); MoveTo (rect.left + rect.width() / 4, rect.bottom); mPath.lineTo(rect.left, rect.bottom); mPath.lineTo(rect.left, rect.bottom - rect.height() / 4); canvas.drawPath(mPath, paint); Paint. SetStrokeWidth (1);if (drawInfo.getName() == null) {
            paint.setStyle(Paint.Style.FILL_AND_STROKE);
            paint.setTextSize(rect.width() / 8);

            String str = (drawInfo.getSex() == GenderInfo.MALE ? "MALE" : (drawInfo.getSex() == GenderInfo.FEMALE ? "FEMALE" : "UNKNOWN"))
                    + ","
                    + (drawInfo.getAge() == AgeInfo.UNKNOWN_AGE ? "UNKNWON" : drawInfo.getAge())
                    + ","
                    + (drawInfo.getLiveness() == LivenessInfo.ALIVE ? "ALIVE" : (drawInfo.getLiveness() == LivenessInfo.NOT_ALIVE ? "NOT_ALIVE" : "UNKNOWN"));
            canvas.drawText(str, rect.left, rect.top - 10, paint);
        } else {
            paint.setStyle(Paint.Style.FILL_AND_STROKE);
            paint.setTextSize(rect.width() / 8);
            canvas.drawText(drawInfo.getName(), rect.left, rect.top - 10, paint);
        }
    }

    public void setPreviewWidth(int previewWidth) {
        this.previewWidth = previewWidth;
    }

    public void setPreviewHeight(int previewHeight) {
        this.previewHeight = previewHeight;
    }

    public void setCanvasWidth(int canvasWidth) {
        this.canvasWidth = canvasWidth;
    }

    public void setCanvasHeight(int canvasHeight) {
        this.canvasHeight = canvasHeight;
    }

    public void setCameraDisplayOrientation(int cameraDisplayOrientation) {
        this.cameraDisplayOrientation = cameraDisplayOrientation;
    }

    public void setCameraId(int cameraId) {
        this.cameraId = cameraId;
    }

    public void setMirror(boolean mirror) {
        isMirror = mirror;
    }

    public int getPreviewWidth() {
        return previewWidth;
    }

    public int getPreviewHeight() {
        return previewHeight;
    }

    public int getCanvasWidth() {
        return canvasWidth;
    }

    public int getCanvasHeight() {
        return canvasHeight;
    }

    public int getCameraDisplayOrientation() {
        return cameraDisplayOrientation;
    }

    public int getCameraId() {
        return cameraId;
    }

    public boolean isMirror() {
        return isMirror;
    }

    public boolean isMirrorHorizontal() {
        return mirrorHorizontal;
    }

    public void setMirrorHorizontal(boolean mirrorHorizontal) {
        this.mirrorHorizontal = mirrorHorizontal;
    }

    public boolean isMirrorVertical() {
        return mirrorVertical;
    }

    public void setMirrorVertical(boolean mirrorVertical) { this.mirrorVertical = mirrorVertical; }}Copy the code

FaceRectView

public class FaceRectView extends View { private CopyOnWriteArrayList<DrawInfo> drawInfoList = new CopyOnWriteArrayList<>(); // Private Paint Paint; Private static final int DEFAULT_FACE_RECT_THICKNESS = 6; public FaceRectView(Context context) { this(context, null); } public FaceRectView(Context context, @Nullable AttributeSet attrs) { super(context, attrs); paint = new Paint(); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas);if(drawInfoList ! = null && drawInfoList.size() > 0) {for (int i = 0; i < drawInfoList.size(); i++) {
                DrawHelper.drawFaceRect(canvas, drawInfoList.get(i), DEFAULT_FACE_RECT_THICKNESS, paint);
            }
        }
    }

    public void clearFaceInfo() { drawInfoList.clear(); postInvalidate(); } public void addFaceInfo(DrawInfo faceInfo) { drawInfoList.add(faceInfo); postInvalidate(); } public void addFaceInfo(List<DrawInfo> faceInfoList) { drawInfoList.addAll(faceInfoList); postInvalidate(); }}Copy the code

We just need to overlay the original GLSurface with another layer of FaceRectView and draw the box onto the FaceRectView. The two layers are superimposed on each other to achieve the desired effect. The following is an example.

/** * DrawHelper initialization ** @param previewWidth previewWidth * @param previewHeight previewHeight * @param canvasWidth width of the draw control * @param CanvasHeight height of the drawing control * @param cameraDisplayOrientation orientation * @param cameraId cameraId * @param isMirror whether the camera is displayed horizontally. Set totrue@param mirrorHorizontal is compatible with some devices, and the horizontal is again mirrored. @param mirrorVertical is compatible with some devices. */ drawHelper = new drawHelper (width, height, facerectView.getwidth (), facerectView.getheight (), 0, 0,false.false.false); // Get the face datafor (int i = 0; i < faceInfos.size(); i++) {
    drawInfoList.add(new DrawInfo(drawHelper.adjustRect(faceInfos.get(i).getRect()),
            GenderInfo.UNKNOWN, AgeInfo.UNKNOWN_AGE, LivenessInfo.UNKNOWN, Color.YELLOW,
            String.valueOf(faceInfos.get(i).getFaceId())));
}
drawHelper.draw(faceRectView, drawInfoList);
Copy the code

Effect:

Third, the appendix

Below is the demo code, as well as the link to the previous article, for your reference, if you are helpful, you can point a star for my demo.

Android Face Recognition based on RTMP video Stream

RtmpPlayerDemo engineering code (including display and face rendering)