Many developers are struggling with how to build a stable and low-latency RTSP or RTMP player on Windows platform when they are making AR, VR or education products. If a player is completely redeveloped based on Unity3d, it will cost a lot and take a long time, which is not suitable for fast production. We think the best way at present is to integrate the mature and stable players on the existing Native platform, callback RGB/YUV data to the upper layer, and the upper layer can do drawing.

Without further ado, take multiplexing on Windows:

1.Native Player SDK supports one of the uncompressed image formats, such as RGB/YUV420/NV12

For example, on Windows platform, we call YUV (NT_SP_E_VIDEO_FRAME_FROMAT_I420). In this paper, we call The Windows platform RTSP and RTMP player SDK of Github live SDK(Github) as an example. The specific code is as follows:

    public void Play(int sel)
    {
        if (videoctrl[sel].is_running)
        {
            Debug.Log("Already playing...");
            return;
        }

        lock (videoctrl[sel].frame_lock_)
        {
            videoctrl[sel].cur_video_frame_ = null;
        }

        OpenPlayer(sel);

        if (videoctrl[sel].player_handle_ == IntPtr.Zero)
            return;

        // Set the play URL
        NTSmartPlayerSDK.NT_SP_SetURL(videoctrl[sel].player_handle_, videoctrl[sel].videoUrl);

        /* ++ Parameter Settings can be added here ++ */

        int play_buffer_time_ = 100;
        NTSmartPlayerSDK.NT_SP_SetBuffer(videoctrl[sel].player_handle_, play_buffer_time_);                 // Set the buffer time

        int is_using_tcp = 0;        / / TCP model
        NTSmartPlayerSDK.NT_SP_SetRTSPTcpMode(videoctrl[sel].player_handle_, is_using_tcp);

        int timeout = 10;
        NTSmartPlayerSDK.NT_SP_SetRtspTimeout(videoctrl[sel].player_handle_, timeout);

        int is_auto_switch_tcp_udp = 1;
        NTSmartPlayerSDK.NT_SP_SetRtspAutoSwitchTcpUdp(videoctrl[sel].player_handle_, is_auto_switch_tcp_udp);

        Boolean is_mute_ = false;
        NTSmartPlayerSDK.NT_SP_SetMute(videoctrl[sel].player_handle_, is_mute_ ? 1 : 0);                    // Whether to mute playback

        int is_fast_startup = 1;
        NTSmartPlayerSDK.NT_SP_SetFastStartup(videoctrl[sel].player_handle_, is_fast_startup);              // Set the quick start mode

        Boolean is_low_latency_ = false;
        NTSmartPlayerSDK.NT_SP_SetLowLatencyMode(videoctrl[sel].player_handle_, is_low_latency_ ? 1 : 0);    // Set whether to enable the low latency mode

        // Set the rotation Angle (0, 90, 180, 270 degrees are valid, other values are invalid)
        int rotate_degrees = 0;
        NTSmartPlayerSDK.NT_SP_SetRotation(videoctrl[sel].player_handle_, rotate_degrees);
		
		int volume = 100;
		NTSmartPlayerSDK.NT_SP_SetAudioVolume(videoctrl[sel].player_handle_, volume);	// Set the volume. The range is [0, 100]. 0 is mute, 100 is the maximum volume, and the default is 100
		

        // Set the upload/download rate
        int is_report = 0;
        int report_interval = 1;
        NTSmartPlayerSDK.NT_SP_SetReportDownloadSpeed(videoctrl[sel].player_handle_, is_report, report_interval);
        /* -- Parameter Settings can be added here -- */

        //video frame callback (YUV/RGB)
        videoctrl[sel].video_frame_call_back_ = new SP_SDKVideoFrameCallBack(NT_SP_SetVideoFrameCallBack);
        NTSmartPlayerSDK.NT_SP_SetVideoFrameCallBack(videoctrl[sel].player_handle_, (Int32)NT.NTSmartPlayerDefine.NT_SP_E_VIDEO_FRAME_FORMAT.NT_SP_E_VIDEO_FRAME_FROMAT_I420, window_handle_, videoctrl[sel].video_frame_call_back_);

        UInt32 flag = NTSmartPlayerSDK.NT_SP_StartPlay(videoctrl[sel].player_handle_);

        if (flag == DANIULIVE_RETURN_OK)
        {
            videoctrl[sel].is_need_get_frame_ = true;
            Debug.Log("Play successful");
        }
        else
        {
            videoctrl[sel].is_need_get_frame_ = false;
            Debug.LogError("Play failed");
        }

        videoctrl[sel].is_running = true;
    }
Copy the code

2. Process the callback data

Private void SDKVideoFrameCallBack(UInt32 Status, IntPtr frame, int sel) {// Get the callback frame, NT_SP_VideoFrame Video_frame = (NT_SP_VideoFrame)Marshal.PtrToStructure(frame, typeof(NT_SP_VideoFrame)); VideoFrame u3d_frame = new VideoFrame(); u3d_frame.width_ = video_frame.width_; u3d_frame.height_ = video_frame.height_; u3d_frame.timestamp_ = (UInt64)video_frame.timestamp_; int d_y_stride = video_frame.width_; int d_u_stride = (video_frame.width_ + 1) / 2; int d_v_stride = d_u_stride; int d_y_size = d_y_stride * video_frame.height_; int d_u_size = d_u_stride * ((video_frame.height_ + 1) / 2); int d_v_size = d_u_size; int u_v_height = ((u3d_frame.height_ + 1) / 2); u3d_frame.y_stride_ = d_y_stride; u3d_frame.u_stride_ = d_u_stride; u3d_frame.v_stride_ = d_v_stride; u3d_frame.y_data_ = new byte[d_y_size]; u3d_frame.u_data_ = new byte[d_u_size]; u3d_frame.v_data_ = new byte[d_v_size]; CopyFramePlane(u3d_frame.y_data_, d_y_stride, video_frame.plane0_, video_frame.stride0_, u3d_frame.height_); CopyFramePlane(u3d_frame.u_data_, d_u_stride, video_frame.plane1_, video_frame.stride1_, u_v_height); CopyFramePlane(u3d_frame.v_data_, d_v_stride, video_frame.plane2_, video_frame.stride2_, u_v_height); lock (videoctrl[sel].frame_lock_ ) { videoctrl[sel].cur_video_frame_ = u3d_frame; //Debug.LogError("sel: " + sel + " w:" + u3d_frame.width_ + "h:" + u3d_frame.height_); }}Copy the code

3.Unity3D creates the corresponding RGB/YUV420 Shader, and obtains the image data to fill the texture

    private void UpdateYUVTexture(VideoFrame video_frame, int sel)
    {
        if (video_frame.y_data_ == null || video_frame.u_data_ == null || video_frame.v_data_ == null)
        {
            Debug.Log("video frame with null..");
            return;
        }

        if(videoctrl[sel].yTexture_ ! =null)
        {
            videoctrl[sel].yTexture_.LoadRawTextureData(video_frame.y_data_);
            videoctrl[sel].yTexture_.Apply();
        }

        if(videoctrl[sel].uTexture_ ! =null)
        {
            videoctrl[sel].uTexture_.LoadRawTextureData(video_frame.u_data_);
            videoctrl[sel].uTexture_.Apply();
        }

        if(videoctrl[sel].vTexture_ ! =null) { videoctrl[sel].vTexture_.LoadRawTextureData(video_frame.v_data_); videoctrl[sel].vTexture_.Apply(); }}Copy the code

4. The specific effects are as follows

conclusion

Under Unity3d, if you do multiplex playback, first of all, make sure that the module of pull stream decoding data called has the ability to call back YUV/RGB data. After coming back up, the upper layer can directly refresh the display. Is it not so complex as you think?