With the popularity of paperless, smart classroom and other scenarios, many enterprises or developers begin to seek a more efficient, stable and low-latency RTMP same-screen solution. In this paper, the same-screen demo of Danulivestream SDK(Github) (corresponding project: This section uses SmartServicePublisherV2 as an example to describe how to collect encoded and pushed RTMP data from a streaming media server.
System requirements: Android 5.0 or above.
No more nonsense, on the code:
Get screen Windows width and height, if need to scale, according to a certain proportion can be scaled:
private void createScreenEnvironment() {
sreenWindowWidth = mWindowManager.getDefaultDisplay().getWidth();
screenWindowHeight = mWindowManager.getDefaultDisplay().getHeight();
Log.i(TAG, "screenWindowWidth: " + sreenWindowWidth + ",screenWindowHeight: "
+ screenWindowHeight);
if (sreenWindowWidth > 800)
{
if (screenResolution == SCREEN_RESOLUTION_STANDARD)
{
scale_rate = SCALE_RATE_HALF;
sreenWindowWidth = align(sreenWindowWidth / 2, 16);
screenWindowHeight = align(screenWindowHeight / 2, 16);
}
else if(screenResolution == SCREEN_RESOLUTION_LOW)
{
scale_rate = SCALE_RATE_TWO_FIFTHS;
sreenWindowWidth = align(sreenWindowWidth * 2 / 5, 16);
}
}
Log.i(TAG, "After adjust mWindowWidth: " + sreenWindowWidth + ", mWindowHeight: " + screenWindowHeight);
int pf = mWindowManager.getDefaultDisplay().getPixelFormat();
Log.i(TAG, "display format:" + pf);
DisplayMetrics displayMetrics = new DisplayMetrics();
mWindowManager.getDefaultDisplay().getMetrics(displayMetrics);
mScreenDensity = displayMetrics.densityDpi;
mImageReader = ImageReader.newInstance(sreenWindowWidth,
screenWindowHeight, 0x1, 6);
mMediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
}
Copy the code
Get the image data and pass it to processScreenImage() :
private void setupMediaProjection(a) {
mMediaProjection = mMediaProjectionManager.getMediaProjection(
MainActivity.mResultCode, MainActivity.mResultData);
}
private void setupVirtualDisplay(a) {
mVirtualDisplay = mMediaProjection.createVirtualDisplay(
"ScreenCapture", sreenWindowWidth, screenWindowHeight,
mScreenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mImageReader.getSurface(), null.null);
mImageReader.setOnImageAvailableListener(
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = mImageReader.acquireLatestImage();
if(image ! =null) {
processScreenImage(image);
//image.close();}}},null);
}
Copy the code
Put the data in the image list:
private void pushImage(Image image)
{
if ( null ==image )
return;
final int image_list_max_count = 1;
LinkedList<Image> close_images = null;
synchronized (image_list_lock)
{
if (image_list.size() > image_list_max_count )
{
close_images = new LinkedList();
while ( image_list.size() > image_list_max_count)
{
close_images.add(image_list.poll());
}
}
image_list.add(image);
}
if( close_images ! =null )
{
while( !close_images.isEmpty() ) {
Image i = close_images.poll();
if( i ! =null )
{
i.close();
//Log.i("PushImage", "drop image");}}}}Copy the code
Call the RTMP initialization and parameter setting interface of danniu Live SDK:
libPublisher = new SmartPublisherJniV2();
private void InitAndSetConfig(a) {
// At the beginning, you need to set whether to collect audio or video
publisherHandle = libPublisher.SmartPublisherOpen(this.getApplicationContext(),
audio_opt, video_opt, sreenWindowWidth,
screenWindowHeight);
if ( publisherHandle == 0 )
{
return;
}
Log.i(TAG, "publisherHandle=" + publisherHandle);
libPublisher.SetSmartPublisherEventCallbackV2(publisherHandle, new EventHandeV2());
if(videoEncodeType == 1)
{
int h264HWKbps = setHardwareEncoderKbps(true, sreenWindowWidth,
screenWindowHeight);
Log.i(TAG, "h264HWKbps: " + h264HWKbps);
int isSupportH264HWEncoder = libPublisher
.SetSmartPublisherVideoHWEncoder(publisherHandle, h264HWKbps);
if (isSupportH264HWEncoder == 0) {
Log.i(TAG, "Great, it supports h.264 hardware encoder!"); }}else if (videoEncodeType == 2)
{
int hevcHWKbps = setHardwareEncoderKbps(false, sreenWindowWidth,
screenWindowHeight);
Log.i(TAG, "hevcHWKbps: " + hevcHWKbps);
int isSupportHevcHWEncoder = libPublisher
.SetSmartPublisherVideoHevcHWEncoder(publisherHandle, hevcHWKbps);
if (isSupportHevcHWEncoder == 0) {
Log.i(TAG, "Great, it supports hevc hardware encoder!"); }}if(is_sw_vbr_mode)
{
int is_enable_vbr = 1;
int video_quality = CalVideoQuality(sreenWindowWidth,
screenWindowHeight, true);
int vbr_max_bitrate = CalVbrMaxKBitRate(sreenWindowWidth,
screenWindowHeight);
libPublisher.SmartPublisherSetSwVBRMode(publisherHandle, is_enable_vbr, video_quality, vbr_max_bitrate);
}
// For audio, see the SmartPublisher project
/* if (! is_speex) { // set AAC encoder libPublisher.SmartPublisherSetAudioCodecType(publisherHandle, 1); } else { // set Speex encoder libPublisher.SmartPublisherSetAudioCodecType(publisherHandle, 2); libPublisher.SmartPublisherSetSpeexEncoderQuality(publisherHandle, 8); } libPublisher.SmartPublisherSetNoiseSuppression(publisherHandle, is_noise_suppression ? 1:0); libPublisher.SmartPublisherSetAGC(publisherHandle, is_agc ? 1:0); * /
// libPublisher.SmartPublisherSetClippingMode(publisherHandle, 0);
//libPublisher.SmartPublisherSetSWVideoEncoderProfile(publisherHandle, sw_video_encoder_profile);
//libPublisher.SmartPublisherSetSWVideoEncoderSpeed(publisherHandle, sw_video_encoder_speed);
// libPublisher.SetRtmpPublishingType(publisherHandle, 0);
libPublisher.SmartPublisherSetFPS(publisherHandle, 18); // The frame rate can be adjusted
libPublisher.SmartPublisherSetGopInterval(publisherHandle, 18*3);
//libPublisher.SmartPublisherSetSWVideoBitRate(publisherHandle, 1200, 2400); // For soft coding, the maximum bit rate is usually twice the average bit rate
libPublisher.SmartPublisherSetSWVideoEncoderSpeed(publisherHandle, 3);
//libPublisher.SmartPublisherSaveImageFlag(publisherHandle, 1);
}
Copy the code
After initialization and parameter Settings, set the URL for RTMP push and call the SartPublisher() interface to push:
// If you push and record at the same time, you can set it once
InitAndSetConfig();
if ( publisherHandle == 0 )
{
stopScreenCapture();
return;
}
if(push_type == PUSH_TYPE_RTMP)
{
String publishURL = intent.getStringExtra("PUBLISHURL");
Log.i(TAG, "publishURL: " + publishURL);
if(libPublisher.SmartPublisherSetURL(publisherHandle, publishURL) ! =0) {
stopScreenCapture();
Log.e(TAG, "Failed to set publish stream URL..");
if(publisherHandle ! =0) {
if(libPublisher ! =null) {
libPublisher.SmartPublisherClose(publisherHandle);
publisherHandle = 0; }}return; }}// Start the thread passing data
post_data_thread = new Thread(new DataRunnable());
Log.i(TAG, "new post_data_thread..");
is_post_data_thread_alive = true;
post_data_thread.start();
Copy the code
int startRet = libPublisher.SmartPublisherStartPublisher(publisherHandle);
if(startRet ! =0) {
isPushingRtmp = false;
Log.e(TAG, "Failed to start push rtmp stream..");
return;
}
// If you push and record at the same time, Audio should start once
CheckInitAudioRecorder();
Copy the code
Once you start pushing, pass the data to the underlying SDK:
public class DataRunnable implements Runnable{
private final static String TAG = "DataRunnable==> ";
@Override
public void run(a) {
// TODO Auto-generated method stub
Log.i(TAG, "post data thread is running..");
ByteBuffer last_buffer = null;
Image last_image = null;
long last_post_time = System.currentTimeMillis();
while (is_post_data_thread_alive)
{
boolean is_skip = false;
/* synchronized (data_list_lock) { if ( data_list.isEmpty()) { if((System.currentTimeMillis() - last_post_time) > frame_added_interval_setting) { if(last_buffer ! = null) {log. I (TAG, "frame in..." ); } else { is_skip = true; } } else { is_skip = true; } } else { last_buffer = data_list.get(0); data_list.remove(0); }} * /
Image new_image = popImage();
if ( new_image == null )
{
if((System.currentTimeMillis() - last_post_time) > frame_added_interval_setting)
{
if(last_image ! =null)
{
Log.i(TAG, "Frame filling...");
}
else
{
is_skip = true; }}else
{
is_skip = true; }}else
{
if( last_image ! =null )
{
last_image.close();
}
last_image = new_image;
}
if( is_skip )
{
// Log.i("OnScreenImage", "is_skip");
try {
Thread.sleep(5); / / sleep 5 ms
} catch(InterruptedException e) { e.printStackTrace(); }}else
{
//if( last_buffer ! = null && publisherHandle ! = 0 && (isPushing || isRecording || isRTSPPublisherRunning) )
if( last_image ! =null&& publisherHandle ! =0 && (isPushingRtmp || isRecording || isRTSPPublisherRunning) )
{
long post_begin_time = System.currentTimeMillis();
final Image.Plane[] planes = last_image.getPlanes();
if( planes ! =null && planes.length > 0 )
{
libPublisher.SmartPublisherOnCaptureVideoRGBAData(publisherHandle, planes[0].getBuffer(), planes[0].getRowStride(),
last_image.getWidth(), last_image.getHeight());
}
last_post_time = System.currentTimeMillis();
long post_cost_time = last_post_time - post_begin_time;
if ( post_cost_time >=0 && post_cost_time < 10 )
{
try {
Thread.sleep(10-post_cost_time);
} catch(InterruptedException e) { e.printStackTrace(); }}/* libPublisher.SmartPublisherOnCaptureVideoRGBAData(publisherHandle, last_buffer, row_stride_, width_, height_); * /
Int left = 100; int left = 100; int cliped_left = 0; int top = 0; int cliped_top = 0; int cliped_width = width_; int cliped_height = height_; if(scale_rate == SCALE_RATE_HALF) { cliped_left = left / 2; cliped_top = top / 2; // Cliped_width = (width_ *3)/4; // Cliped_height = height_; } else if(scale_rate == SCALE_RATE_TWO_FIFTHS) { cliped_left = left * 2 / 5; cliped_top = top * 2 / 5; // Cliped_width = (width_ *3)/4; // Cliped_height = height_; } if(cliped_width % 2 ! = 0) { cliped_width = cliped_width + 1; } if(cliped_height % 2 ! = 0) { cliped_height = cliped_height + 1; } if ( (cliped_left + cliped_width) > width_) { Log.e(TAG, " invalid cliped region settings, cliped_left: " + cliped_left + " cliped_width:" + cliped_width + " width:" + width_); return; } if ( (cliped_top + cliped_height) > height_) { Log.e(TAG, "invalid cliped region settings, cliped_top: " + cliped_top + " cliped_height:" + cliped_height + " height:" + height_); return; } //Log.i(TAG, " clipLeft: " + cliped_left + " clipTop: " + cliped_top + " clipWidth: " + cliped_width + " clipHeight: " + cliped_height); libPublisher.SmartPublisherOnCaptureVideoClipedRGBAData(publisherHandle, last_buffer, row_stride_, width_, height_, cliped_left, cliped_top, cliped_width, cliped_height ); * /
// Log.i(TAG, "post data: " + last_post_time + " cost:" + post_cost_time);
}
else
{
try {
Thread.sleep(10);
} catch(InterruptedException e) { e.printStackTrace(); }}}}if( last_image ! =null)
{
last_image.close();
last_image = null; }}}Copy the code
Turn off collection push:
public void onDestroy(a) {
// TODO Auto-generated method stub
Log.i(TAG, "Service stopped..");
stopScreenCapture();
clearAllImages();
if( is_post_data_thread_alive && post_data_thread ! =null )
{
Log.i(TAG, "onDestroy close post_data_thread++");
is_post_data_thread_alive = false;
try {
post_data_thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
post_data_thread = null;
Log.i(TAG, "onDestroy post_data_thread closed--");
}
if (isPushingRtmp || isRecording || isRTSPPublisherRunning)
{
if(audioRecord_ ! =null) {
Log.i(TAG, "surfaceDestroyed, call StopRecording..");
audioRecord_.Stop();
if(audioRecordCallback_ ! =null) {
audioRecord_.RemoveCallback(audioRecordCallback_);
audioRecordCallback_ = null;
}
audioRecord_ = null;
}
stopPush();
isPushingRtmp = false;
stopRecorder();
isRecording = false;
stopRtspPublisher();
isRTSPPublisherRunning = false;
stopRtspService();
isRTSPServiceRunning = false;
if(publisherHandle ! =0) {
if(libPublisher ! =null) {
libPublisher.SmartPublisherClose(publisherHandle);
publisherHandle = 0;
}
}
}
libPublisher.UnInitRtspServer();
super.onDestroy();
}
Copy the code
The above is the general process of data collection, coding and push on Android platform. Interested developers can refer to it.