Next, use H264 or H265 to achieve mobile phone screen casting

An overview of the

In this part, the preview data of the camera is converted into H265 bit stream and sent to the receiver

  • The camera data is NV21, but the hardware codec MediaCodec does not support NV21, so you need to convert NV21 to YUV420
  • Camera manufacturers are horizontal, so choose 90 degrees
  • Since the camera is horizontal, the YUV420 data should also be rotated by 90. (Note: Since the data is rotated, the width of the MediaCodec becomes the height of the camera.)

MainActivity

Request the camera permission, open the Socket, and preview the camera data

public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback {

    private static final int REQUEST_CODE_CAMERA = 100;
    private SurfaceHolder mSurfaceHolder;
    private PushSocket mPushSocket;


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        SurfaceView surfaceview = findViewById(R.id.surfaceview);
        surfaceview.getHolder().addCallback(this);


    }



    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        if (requestCode == REQUEST_CODE_CAMERA && grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
            // If the application is successful, you can take photos
           initSocket();

            Toast.makeText(this."I have access.", Toast.LENGTH_SHORT).show();
        } else {
            Toast.makeText(this."CAMERA PERMISSION DENIED", Toast.LENGTH_SHORT).show(); }}private void initSocket(a) {
        mPushSocket = new PushSocket(this,mSurfaceHolder);
        mPushSocket.start();
    }


    @Override
    public void surfaceCreated(@NonNull SurfaceHolder holder) {
        mSurfaceHolder = holder;
    }

    @Override
    public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {}@Override
    public void surfaceDestroyed(@NonNull SurfaceHolder holder) {}public void start(View view) {
        if(! ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
            ActivityCompat.requestPermissions(this.new String[]{Manifest.permission.CAMERA}, REQUEST_CODE_CAMERA);
        } else{ initSocket(); }}}Copy the code

Socket

Basically, it turns the camera data into H265 and sends it out

public class PushSocket implements Camera.PreviewCallback {
    private static final String TAG = "PushSocket";
    private WebSocket mWebSocket;

    private Camera mCamera;
    private Camera.Size mSize;

    private SurfaceHolder mSurfaceHolder;
    private byte[] mBuffer;
    private byte[] nv12;
    private MediaCodec mMediaCodec;
    private byte[] mYuv420;
    /** * port number */
    private static final int PORT = 13001;

    private Context mContext;

    public PushSocket(Context context, SurfaceHolder surfaceHolder) {
        mContext = context;
        mSurfaceHolder = surfaceHolder;
    }

    public void start(a) {

        webSocketServer.start();
        initCamera();
    }

    private WebSocketServer webSocketServer = new WebSocketServer(new InetSocketAddress(PORT)) {
        @Override
        public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
            mWebSocket = webSocket;
        }

        @Override
        public void onClose(WebSocket conn, int code, String reason, boolean remote) {
            Log.i(TAG, "OnClose: close the socket");
        }

        @Override
        public void onMessage(WebSocket webSocket, String message) {}@Override
        public void onError(WebSocket conn, Exception e) {
            Log.i(TAG, "onError: " + e.toString());
        }

        @Override
        public void onStart(a) {}};/** * Send data **@param bytes
     */
    public void sendData(byte[] bytes) {
        if(mWebSocket ! =null&& mWebSocket.isOpen()) { mWebSocket.send(bytes); }}/** * Close the Socket */
    public void close(a) {
        try {
            mWebSocket.close();
            webSocketServer.stop();
        } catch(IOException | InterruptedException e) { e.printStackTrace(); }}private void initCamera(a) {
        mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
        Camera.Parameters parameters = mCamera.getParameters();
        mSize = parameters.getPreviewSize();
        try {
            mCamera.setPreviewDisplay(mSurfaceHolder);
            // Adjust the preview direction because the camera manufacturers are horizontal
            mCamera.setDisplayOrientation(90);
            // Buffer data
            mBuffer = new byte[mSize.width * mSize.height * 3 / 2];
            mCamera.addCallbackBuffer(mBuffer);
            mCamera.setPreviewCallbackWithBuffer(this);
// What about the output data
            mCamera.startPreview();

            // The width and height of the camera are used, so I write it here
            initEncode();
        } catch(IOException e) { e.printStackTrace(); }}private void initEncode(a) {
        try {
            mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);
            // Since the camera data is rotated, the width and height here will become the width and width
            MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, mSize.height, mSize.width);
            mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1080 * 1920);
            mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
            mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
            mMediaCodec.configure(mediaFormat, null.null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mMediaCodec.start();
            // Because YUV420 takes up 3/2 bytes per pixel
            int bufferLength = mSize.width*mSize.height*3/2;
            nv12 = new byte[bufferLength];
            mYuv420 = new byte[bufferLength];
        } catch(IOException e) { e.printStackTrace(); }}public int encodeFrame(byte[] input) {

        // Because the data of the camera is NV21, only the camera is in this format, there is no such code in the hardware encoding
        // change to YUV420
        nv12 =YuvUtils.nv21toYUV420(input);
        // Since the camera is horizontal, the data is also horizontal
        YuvUtils.portraitData2Raw(nv12, mYuv420, mSize.width, mSize.height);

        int inputBufferIndex = mMediaCodec.dequeueInputBuffer(100000);
        if (inputBufferIndex >= 0) {
            ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(inputBufferIndex);
            inputBuffer.clear();
            inputBuffer.put(mYuv420);
            mMediaCodec.queueInputBuffer(inputBufferIndex, 0, mYuv420.length, System.currentTimeMillis(), 0);
        }
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 100000);
        while (outputBufferIndex >= 0) {
            ByteBuffer outputBuffer = mMediaCodec.getOutputBuffer(outputBufferIndex);
            dealFrame(outputBuffer, bufferInfo);
// saveFile(outputBuffer,bufferInfo);
            mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
            outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);

        }
        return 0;
    }
    public static final int NAL_I = 19;
    public static final int NAL_VPS = 32;
    private byte[] vps_sps_pps_buf;

    private void dealFrame(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
        int offset = 4;
        if (byteBuffer.get(2) = =0x01) {
            offset = 3;
        }
        int type = (byteBuffer.get(offset) & 0x7E) > >1;
        // vps_sps_pps frame is recorded
        if (type == NAL_VPS) {
            vps_sps_pps_buf = new byte[bufferInfo.size];
            byteBuffer.get(vps_sps_pps_buf);
        } else if (type == NAL_I) {
            // I frame, vpS_SPS_pps frame inserted before I frame
            final byte[] bytes = new byte[bufferInfo.size];
            byteBuffer.get(bytes);

            byte[] newBuf = new byte[vps_sps_pps_buf.length + bytes.length];
            System.arraycopy(vps_sps_pps_buf, 0, newBuf, 0, vps_sps_pps_buf.length);
            System.arraycopy(bytes, 0, newBuf, vps_sps_pps_buf.length, bytes.length);
            sendData(newBuf);
            Log.v(TAG, "I frame video data" + Arrays.toString(bytes));
        } else {
            // B frames and P frames are sent directly
            final byte[] bytes = new byte[bufferInfo.size]; byteBuffer.get(bytes); sendData(bytes); }}@Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        encodeFrame(data);
        mCamera.addCallbackBuffer(data);
    }
    private void saveFile(ByteBuffer buffer,MediaCodec.BufferInfo bufferInfo) {
        byte[] bytes = new byte[bufferInfo.size]; buffer.get(bytes); YuvUtils.writeBytes(bytes,mContext); YuvUtils.writeContent(bytes,mContext); }}Copy the code

YUV tools

Mainly rotating work

public class YuvUtils {
    private static final String TAG = "YuvUtils";
    static  byte[] yuv420;

    public static byte[] nv21toYUV420(byte[] nv21) {
        int  size = nv21.length;
         yuv420 = new byte[size];
        int len = size * 2 / 3;
        System.arraycopy(nv21, 0, yuv420, 0, len);
        int i = len;
        while(i < size - 1){
            yuv420[i] = nv21[i + 1];
            yuv420[i + 1] = nv21[i];
            i += 2;
        }
        return yuv420;
    }

    public static void portraitData2Raw(byte[] data,byte[] output,int width,int height) {
        int y_len = width * height;
        // The uv data height is half of the y data height
        int uvHeight = height >> 1;
        int k = 0;
        for (int j = 0; j < width; j++) {
            for (int i = height - 1; i >= 0; i--) { output[k++] = data[width * i + j]; }}for (int j = 0; j < width; j += 2) {
            for (int i = uvHeight - 1; i >= 0; i--) {
                output[k++] = data[y_len + width * i + j];
                output[k++] = data[y_len + width * i + j + 1]; }}}public  static  void writeBytes(byte[] array, Context context) {
        FileOutputStream writer = null;
        try {
            // Open a file writer. The second argument in the constructor, true, means to append the file
            writer = new FileOutputStream(context.getFilesDir() + "/codec.h265".true);
            writer.write(array);
            writer.write('\n');


        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if(writer ! =null) { writer.close(); }}catch(IOException e) { e.printStackTrace(); }}}public  static String writeContent(byte[] array, Context context) {
        char[] HEX_CHAR_TABLE = {
                '0'.'1'.'2'.'3'.'4'.'5'.'6'.'7'.'8'.'9'.'A'.'B'.'C'.'D'.'E'.'F'
        };
        StringBuilder sb = new StringBuilder();
        for (byte b : array) {
            sb.append(HEX_CHAR_TABLE[(b & 0xf0) > >4]);
            sb.append(HEX_CHAR_TABLE[b & 0x0f]);
        }
        Log.i(TAG, "writeContent: " + sb.toString());
        FileWriter writer = null;
        try {
            // Open a file writer. The second argument in the constructor, true, means to append the file
            writer = new FileWriter(context.getFilesDir() + "/codecH265.txt".true);
            writer.write(sb.toString());
            writer.write("\n");
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if(writer ! =null) { writer.close(); }}catch(IOException e) { e.printStackTrace(); }}returnsb.toString(); }}Copy the code

Also used toJava-WebSocket

In buid.gradle for each app, don’t forget to add INTERNET permissions to the manifest file

Implementation “org. Java – websocket: Java – websocket: 1.4.0.” “

The source addressgithub