Just like the RTMP or RTSP player under Unity3D, many developers are struggling with how to collect and encode data and push it to the streaming media server in real time with high efficiency and low delay in Unity environment, so as to realize the low delay push-pull streaming scheme under Unity scene.

There are two options for screen capture:

1. Directly encapsulate the native Screen acquisition project of Android, provide the interface in Unity, get the screen permission, get the screen data and push;

2. If you just need to get the Unity of the form or the camera data out, can get to push under the Unity of the original data, and then to encapsulate the native RTMP push flow interface, call native SDK for data delivery, the benefits of this approach is that you can customize need to collect the data content, as long as according to the native SDK provides the interface, Data docking can be completed, see the specific implementation of this article.

This paper takes Android platform as an example to introduce RTMP streaming of Android platform under Unity environment. Data collection is completed in Unity, data coding is pushed, and daniu Live SDK (official) is called to push the interface of SDK native library with external secondary encapsulation of Android platform RTMP live. Efficient implementation of RTMP push. Without further ado, let’s see the picture first.

The following picture shows the screen capture in Unity environment of Android platform, the encoding is pushed to RTMP server, and then the player of Windows platform pulls RTMP stream for playback. In order to facilitate the delay effect, the current time is displayed in Unity window of Android side, as can be seen, the overall delay is in milliseconds:

Data collection push

Unity data collection is relatively simple, you can easily get RGB24 data:

texture_ = new Texture2D(video_width_, video_height_, TextureFormat.RGB24, false);

texture_.ReadPixels(new Rect(0, 0, video_width_, video_height_), 0, 0, false);

texture_.Apply();
  
Copy the code

We then call texture_.geTrawTextureData (); Get the data.

To get the data, call native SDK encapsulated NT_PB_U3D_OnCaptureVideoRGB24PtrData () interface, complete the data delivery.

Simple call process

    private void Start()
    {
        game_object_ = this.gameObject.name;

        AndroidJavaClass android_class = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
        java_obj_cur_activity_ = android_class.GetStatic<AndroidJavaObject>("currentActivity");
        pusher_obj_ = new AndroidJavaObject("com.daniulive.smartpublisher.SmartPublisherUnity3d");

        NT_PB_U3D_Init();

        //NT_U3D_SetSDKClientKey("", "", 0);

        btn_encode_mode_.onClick.AddListener(OnEncodeModeBtnClicked);

        btn_pusher_.onClick.AddListener(OnPusherBtnClicked);

        btn_mute_.onClick.AddListener(OnMuteBtnClicked);
    }
Copy the code

When the interface is initialized, the Push() interface is called

Public void Push() {if (is_running) {debug.log (" pushed..") ); return; } if (texture_ ! = null) { UnityEngine.Object.Destroy(texture_); texture_ = null; } video_width_ = Screen.width; video_height_ = Screen.height; scale_width_ = (video_width_ + 1) / 2; scale_height_ = (video_height_ + 1) / 2; if (scale_width_ % 2 ! = 0) { scale_width_ = scale_width_ + 1; } if (scale_height_ % 2 ! = 0) { scale_height_ = scale_height_ + 1; } texture_ = new Texture2D(video_width_, video_height_, TextureFormat.RGB24, false); String URL = input_url_.text.trim (); if (! RTMP: / / url. The StartsWith (" ")) {push_url_ = "RTMP: / / 192.168.0.199:1935 / HLS/stream1"; } else { push_url_ = url; } OpenPusher(); if (pusher_handle_ == 0) return; NT_PB_U3D_Set_Game_Object(pusher_handle_, game_object_); /* ++ Pre-push parameters can be added here ++ */ InitAndSetConfig(); NT_PB_U3D_SetPushUrl(pusher_handle_, push_url_); /* -- Push parameters can be added here -- */ int flag = NT_PB_U3D_StartPublisher(pusher_handle_); If (flag == DANIULIVE_RETURN_OK) {debug.log (" Push succeeded..") ); } else {debug.logerror (" push failed..") ); } is_running = true; }Copy the code

Call OpenPusher ()

private void OpenPusher() { if ( java_obj_cur_activity_ == null ) { Debug.LogError("getApplicationContext is null"); return; } int audio_opt = 1; int video_opt = 1; pusher_handle_ = NT_PB_U3D_Open(audio_opt, video_opt, video_width_, video_height_); if (pusher_handle_ ! = 0) Debug.Log("NT_PB_U3D_Open success"); else Debug.LogError("NT_PB_U3D_Open fail"); }Copy the code

InitAndSetConfig()

private void InitAndSetConfig() { if (is_hw_encode_) { int h264HWKbps = setHardwareEncoderKbps(true, video_width_, video_height_); Debug.Log("h264HWKbps: " + h264HWKbps); int isSupportH264HWEncoder = NT_PB_U3D_SetVideoHWEncoder(pusher_handle_, h264HWKbps); if (isSupportH264HWEncoder == 0) { Debug.Log("Great, it supports h.264 hardware encoder!" ); } } else { if (is_sw_vbr_mode_) //H.264 software encoder { int is_enable_vbr = 1; int video_quality = CalVideoQuality(video_width_, video_height_, true); int vbr_max_bitrate = CalVbrMaxKBitRate(video_width_, video_height_); NT_PB_U3D_SetSwVBRMode(pusher_handle_, is_enable_vbr, video_quality, vbr_max_bitrate); //NT_PB_U3D_SetSWVideoEncoderSpeed(pusher_handle_, 2); } } NT_PB_U3D_SetAudioCodecType(pusher_handle_, 1); NT_PB_U3D_SetFPS(pusher_handle_, 25); NT_PB_U3D_SetGopInterval(pusher_handle_, 25*2); //NT_PB_U3D_SetSWVideoBitRate(pusher_handle_, 600, 1200); }Copy the code

ClosePusher()

private void ClosePusher() { if (texture_ ! = null) { UnityEngine.Object.Destroy(texture_); texture_ = null; } int flag = NT_PB_U3D_StopPublisher(pusher_handle_); If (flag == DANIULIVE_RETURN_OK) {debug.log (" Stop successfully..") ); } else {debug.logerror (" stop failed..") ); } flag = NT_PB_U3D_Close(pusher_handle_); If (flag == DANIULIVE_RETURN_OK) {debug.log (" closed successfully..") ); } else {debug.logerror (" close failure..") ); } pusher_handle_ = 0; NT_PB_U3D_UnInit(); is_running = false; }Copy the code

For testing purposes, Update() refreshes the current time:

Private void Update() {// Get the current time hour = datetime.now.hour; minute = DateTime.Now.Minute; millisecond = DateTime.Now.Millisecond; second = DateTime.Now.Second; year = DateTime.Now.Year; month = DateTime.Now.Month; day = DateTime.Now.Day; GameObject.Find("Canvas/Panel/LableText").GetComponent<Text>().text = string.Format("{0:D2}:{1:D2}:{2:D2}:{3:D2} " + "{4:D4}/{5:D2}/{6:D2}", hour, minute, second, millisecond, year, month, day); }Copy the code

Related Event processing

public void onNTSmartEvent(string param) { if (! Param.contains (",")) {debug.log ("[onNTSmartEvent] Android passed parameter error "); return; } string[] strs = param.Split(','); string player_handle =strs[0]; string code = strs[1]; string param1 = strs[2]; string param2 = strs[3]; string param3 = strs[4]; string param4 = strs[5]; Debug.Log("[onNTSmartEvent] code: 0x" + Convert.ToString(Convert.ToInt32(code), 16)); String publisher_event = ""; Switch (convert.toint32 (code)) {case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_STARTED: publisher_event = "start.." ; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTING: Publisher_event =" ; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTION_FAILED: Publisher_event = "Connection failed.." ; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTED: publisher_event = "Connected successfully.." ; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_DISCONNECTED: publisher_event = "disconnected.." ; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_STOP: publisher_event = "close.." ; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_RECORDER_START_NEW_FILE: publisher_event = "Start a new video file:" + param3; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_ONE_RECORDER_FILE_FINISHED: publisher_Event = "A video file has been generated:" + param3; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_SEND_DELAY: publisher_Event = "Send delay :" + param1 +" Frames :" + param2; break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_CAPTURE_IMAGE: publisher_event = "snapshot:" + param1 + "path:" + param3; If (convert.toint32 (param1) == 0) {publisher_event = publisher_Event + "Snapshot capture succeeded.." ; } else {publisher_event = publisher_event + "Failed to intercept snapshot.." ; } break; Case eventid. EVENT_DANIULIVE_ERC_PUBLISHER_RTSP_URL: publisher_Event = "RTSP service URL: "+ param3; break; case EVENTID.EVENT_DANIULIVE_ERC_PUSH_RTSP_SERVER_RESPONSE_STATUS_CODE: publisher_event = "RTSP status code received, codeID: " + param1 + ", RTSP URL: " + param3; break; Case eventid. EVENT_DANIULIVE_ERC_PUSH_RTSP_SERVER_NOT_SUPPORT: publisher_event = "the server does not support RTSP push. " + param3; break; } Debug.Log(publisher_event); }Copy the code

conclusion

Through the above process, you can achieve the Unity environment screen or camera data, millisecond experience of RTMP push and play, interested developers can refer to.