Some developers mentioned that on the basis of RTMP push terminal, they hope to call back the encoded audio and video data, so that developers can connect with third-party systems, such as GB28181.

To do this, we add the following interface:

1. Set audio and video callback

Corresponding interface:

	/**
	 * Set Audio Encoded Data Callback.
	 *
	 * @param audio_encoded_data_callback: Audio Encoded Data Callback.
	 *
	 * @return {0} if successful
	 */
	public native int SmartPublisherSetAudioEncodedDataCallback(long handle, Object audio_encoded_data_callback);

	/**
	 * Set Video Encoded Data Callback.
	 *
	 * @param video_encoded_data_callback: Video Encoded Data Callback.
	 *
	 * @return {0} if successful
	 */
	public native int SmartPublisherSetVideoEncodedDataCallback(long handle, Object video_encoded_data_callback);
Copy the code

Set the callback:

libPublisher.SmartPublisherSetAudioEncodedDataCallback(publisherHandle, new PublisherAudioEncodedDataCallback());

libPublisher.SmartPublisherSetVideoEncodedDataCallback(publisherHandle, new PublisherVideoEncodedDataCallback());
Copy the code

2. Implement PublisherAudioEncodedDataCallback and PublisherVideoEncodedDataCallback:

    class PublisherAudioEncodedDataCallback implements NTAudioDataCallback
    {
        private int audio_buffer_size = 0;
        private int param_info_size = 0;

        private ByteBuffer audio_buffer_ = null;
        private ByteBuffer parameter_info_ = null;

        @Override
        public ByteBuffer getAudioByteBuffer(int size)
        {
            //Log.i("getAudioByteBuffer", "size: " + size);

            if( size < 1 )
            {
                return null;
            }

            if( size <= audio_buffer_size && audio_buffer_ ! =null )
            {
                return audio_buffer_;
            }

            audio_buffer_size = size + 512;
            audio_buffer_size = (audio_buffer_size+0xf) & (~0xf);

            audio_buffer_ = ByteBuffer.allocateDirect(audio_buffer_size);

            // Log.i("getAudioByteBuffer", "size: " + size + " buffer_size:" + audio_buffer_size);

            return audio_buffer_;
        }

        @Override
        public ByteBuffer getAudioParameterInfo(int size)
        {
            //Log.i("getAudioParameterInfo", "size: " + size);

            if(size < 1)
            {
                return null;
            }

            if( size <= param_info_size && parameter_info_ ! =null )
            {
                return  parameter_info_;
            }

            param_info_size = size + 32;
            param_info_size = (param_info_size+0xf) & (~0xf);

            parameter_info_ = ByteBuffer.allocateDirect(param_info_size);

            //Log.i("getAudioParameterInfo", "size: " + size + " buffer_size:" + param_info_size);

            return parameter_info_;
        }

        public void onAudioDataCallback(int ret, int audio_codec_id, int sample_size, int is_key_frame, long timestamp, int sample_rate, int channel, int parameter_info_size, long reserve)
        {
            Log.i("onAudioDataCallback"."ret: " + ret + ", audio_codec_id: " + audio_codec_id + ", sample_size: " + sample_size + ", timestamp: " + timestamp +
            		",sample_rate:" + sample_rate + ",chn: " + channel + ", parameter_info_size:" + parameter_info_size);

            if ( audio_buffer_ == null)
                return;

            audio_buffer_.rewind();

            if ( ret == 0&& publisherHandle2 ! =0) { libPublisher.SmartPublisherPostAudioEncodedData(publisherHandle2, audio_codec_id, audio_buffer_, sample_size, is_key_frame, timestamp, parameter_info_, parameter_info_size); }}}class PublisherVideoEncodedDataCallback implements NTVideoDataCallback
    {
        private int video_buffer_size = 0;

        private ByteBuffer video_buffer_ = null;

        @Override
        public ByteBuffer getVideoByteBuffer(int size)
        {
            //Log.i("getVideoByteBuffer", "size: " + size);

            if( size < 1 )
            {
                return null;
            }

            if( size <= video_buffer_size && video_buffer_ ! =null )
            {
                return  video_buffer_;
            }

            video_buffer_size = size + 1024;
            video_buffer_size = (video_buffer_size+0xf) & (~0xf);

            video_buffer_ = ByteBuffer.allocateDirect(video_buffer_size);

            // Log.i("getVideoByteBuffer", "size: " + size + " buffer_size:" + video_buffer_size);

            return video_buffer_;
        }

        public void onVideoDataCallback(int ret, int video_codec_id, int sample_size, int is_key_frame, long timestamp, int width, int height, long presentation_timestamp)
        {
            Log.i("onVideoDataCallback"."ret: " + ret + ", video_codec_id: " + video_codec_id + ", sample_size: " + sample_size + ", is_key_frame: "+ is_key_frame +  ", timestamp: " + timestamp +
            		",width: " + width + ", height:" + height + ",presentation_timestamp:" + presentation_timestamp);

            if ( video_buffer_ == null)
                return;

            video_buffer_.rewind();

            if ( ret == 0&& publisherHandle2 ! =0) { libPublisher.SmartPublisherPostVideoEncodedData(publisherHandle2, video_codec_id, video_buffer_, sample_size, is_key_frame, timestamp, presentation_timestamp); }}}Copy the code

3. Provide interfaces to start and stop callback data:

	/** * Start output Encoded Data **@return {0} if successful
	 */
	public native int SmartPublisherStartOutputEncodedData(long handle);

	/**
	 *  Stop output Encoded Data
	 *
	 * @return {0} if successful
	 */
	public native int SmartPublisherStopOutputEncodedData(long handle);
Copy the code

4. Upper-layer Demo invocation example:

    class ButtonEncodedDataCallbackListener implements OnClickListener {
        public void onClick(View v) {
            if (isEncodedDatacallbackRunning) {
                stopEncodedDataCallback();

                if(! isPushing && ! isRTSPPublisherRunning && ! isRecording) { ConfigControlEnable(true);
                }

                btnEncodedDataCallback.setText("Start encoding data callback");
                isEncodedDatacallbackRunning = false;

                if(publisherHandle2 ! =0) {
                   libPublisher.SmartPublisherStopPublisher(publisherHandle2);
                    libPublisher.SmartPublisherClose(publisherHandle2);
                    publisherHandle2 = 0;
                }

                return;
            }

            Log.i(TAG, "onClick start encoded data callback..");

            if (libPublisher == null)
                return;

            if(! isPushing && ! isRTSPPublisherRunning && ! isRecording) { InitAndSetConfig(); } libPublisher.SmartPublisherSetAudioEncodedDataCallback(publisherHandle,new PublisherAudioEncodedDataCallback());
            libPublisher.SmartPublisherSetVideoEncodedDataCallback(publisherHandle, new PublisherVideoEncodedDataCallback());

            int startRet = libPublisher.SmartPublisherStartOutputEncodedData(publisherHandle);
            if(startRet ! =0) {
                isEncodedDatacallbackRunning = false;

                Log.e(TAG, "Failed to start encoded data callback.");
                return;
            }

            if(! isPushing && ! isRTSPPublisherRunning && ! isRecording) {if (pushType == 0 || pushType == 1) {
                    CheckInitAudioRecorder();    //enable pure video publisher..
                }

                ConfigControlEnable(false);
            }

            btnEncodedDataCallback.setText("Stop encoding data callback");
            isEncodedDatacallbackRunning = true;

            int audio_opt = 2;
            int video_opt = 2;

            publisherHandle2 = libPublisher.SmartPublisherOpen(myContext, audio_opt, video_opt,
                    videoWidth, videoHeight);

            if (publisherHandle2 == 0) {
                Log.e(TAG, "sdk open failed!");
                return;
            }

            String relayUrl = "rtmp://player.daniulive.com:1935/hls/stream8888"; libPublisher.SmartPublisherSetURL(publisherHandle2, relayUrl); libPublisher.SmartPublisherStartPublisher(publisherHandle2); }};Copy the code
    // Stop the post-encoding data callback
    private void stopEncodedDataCallback(a) {
        if(! isEncodedDatacallbackRunning) {return;
        }
        if(! isPushing && ! isRTSPPublisherRunning && ! isRecording) {if(audioRecord_ ! =null) {
                Log.i(TAG, "stopRecorder, call audioRecord_.StopRecording..");

                audioRecord_.Stop();

                if(audioRecordCallback_ ! =null) {
                    audioRecord_.RemoveCallback(audioRecordCallback_);
                    audioRecordCallback_ = null;
                }

                audioRecord_ = null; }}if(libPublisher ! =null) {
            libPublisher.SmartPublisherStopOutputEncodedData(publisherHandle);
        }

        if(! isPushing && ! isRTSPPublisherRunning && ! isRecording) {if(publisherHandle ! =0) {
                if(libPublisher ! =null) {
                    libPublisher.SmartPublisherClose(publisherHandle);
                    publisherHandle = 0; }}}}Copy the code

This demo for the convenience of demonstration, another start a new push instance, audio and video encoded data, through the new instance, call the encoded audio and video data interface, continue to push RTMP out.

The advantage of this Demo is that the audio and video data function after callback encoding can be combined with push RTMP, video recording, built-in RTSP service SDK, or used alone.