background
This paper mainly talks about the technical scheme design of RTMP based on Android platform. The infrastructure diagram is as follows:
Networking Precautions
1. Networking: wireless networking, good AP modules are required to support large concurrent traffic, push end to AP, preferably connected to a cable network;
2. Server deployment: THE SRS or NGINX server can be deployed on the same machine as the Windows teacher machine.
3. Teacher: If the teacher has a mobile PAD, it can be directly pushed to the RTMP server and shared.
4. On the student end: Directly pull the RTMP stream from the server and play it.
5. Interaction between teachers and students: If students want to share screen data with other students as a demonstration case, they only need to request the same screen and push the data back to the RTMP server for other students to view.
6. Extended monitoring: If further technical solutions are needed, for example, the teacher side wants to monitor the screen situation of the student side, there are two solutions, such as the student side directly push RTMP, or the student side starts the built-in RTSP service, and the teacher side can watch it at any time when it wants to (or polling and playing).
The Android end butt
How is the push resolution set or scaled?
Android devices, especially high-score screens, get a very large original width and height of the video. If the original resolution is pushed, the encoding and upward pressure will be great. Therefore, it is generally recommended to scale the video properly, such as 2/3 width and height.
private void createScreenEnvironment() {
sreenWindowWidth = mWindowManager.getDefaultDisplay().getWidth();
screenWindowHeight = mWindowManager.getDefaultDisplay().getHeight();
Log.i(TAG, "screenWindowWidth: " + sreenWindowWidth + ",screenWindowHeight: "
+ screenWindowHeight);
if (sreenWindowWidth > 800)
{
if (screenResolution == SCREEN_RESOLUTION_STANDARD)
{
scale_rate = SCALE_RATE_HALF;
sreenWindowWidth = align(sreenWindowWidth / 2, 16);
screenWindowHeight = align(screenWindowHeight / 2, 16);
}
else if(screenResolution == SCREEN_RESOLUTION_LOW)
{
scale_rate = SCALE_RATE_TWO_FIFTHS;
sreenWindowWidth = align(sreenWindowWidth * 2 / 5, 16);
}
}
Log.i(TAG, "After adjust mWindowWidth: " + sreenWindowWidth + ", mWindowHeight: " + screenWindowHeight);
int pf = mWindowManager.getDefaultDisplay().getPixelFormat();
Log.i(TAG, "display format:" + pf);
DisplayMetrics displayMetrics = new DisplayMetrics();
mWindowManager.getDefaultDisplay().getMetrics(displayMetrics);
mScreenDensity = displayMetrics.densityDpi;
mImageReader = ImageReader.newInstance(sreenWindowWidth,
screenWindowHeight, 0x1, 6);
mMediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
}
Copy the code
Horizontal and vertical screen automatic adaptation
Somehow screen state, the acquisition of wide screen is high, if somehow the screen switch, at this time, namely screen adaptation problems need to be taken into account, to ensure that such as vertical screen mode, switch to the landscape, can be automatically adapted on both ends of the push-pull flow, namely screen automatically adaptation, need to restart the encoder side, pull flow automatically to fit wide high change, the automatic playback.
public void onConfigurationChanged(Configuration newConfig) {
try {
super.onConfigurationChanged(newConfig);
if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
Log.i(TAG, "onConfigurationChanged cur: LANDSCAPE");
} else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
Log.i(TAG, "onConfigurationChanged cur: PORTRAIT");
}
if(isPushingRtmp || isRecording || isRTSPPublisherRunning) { stopScreenCapture(); clearAllImages(); createScreenEnvironment(); setupVirtualDisplay(); }}catch (Exception ex) {
}
}
Copy the code
Fill the frame strategy
Many people don’t understand why want to fill the frame, in fact, the acquisition of the screen, the screen does not move, not always have the data, this time, a better approach is to save the last frame data, setting a certain frame interval, not because the frame spacing is too large, lead to broadcast data terminal receives less than a few seconds, and, of course, if the server can cache the GOP, The problem was solved.
Abnormal network handling and event callback mechanism
Answer: If RTMP is used, network jitter or other network exceptions occur, a good reconnection mechanism and status feedback mechanism are required.
class EventHandeV2 implements NTSmartEventCallbackV2 {
@Override
public void onNTSmartEventCallbackV2(long handle, int id, long param1, long param2, String param3, String param4, Object param5) {
Log.i(TAG, "EventHandeV2: handle=" + handle + " id:" + id);
String publisher_event = "";
switch (id) {
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_STARTED:
publisher_event = "Start...";
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTING:
publisher_event = "In connection with...";
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTION_FAILED:
publisher_event = "Connection failed..";
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_CONNECTED:
publisher_event = "Connection successful..";
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_DISCONNECTED:
publisher_event = "Connection down...";
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_STOP:
publisher_event = "Close..";
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_RECORDER_START_NEW_FILE:
publisher_event = "Start a new video file:" + param3;
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_ONE_RECORDER_FILE_FINISHED:
publisher_event = "Generated a video file:" + param3;
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_SEND_DELAY:
publisher_event = "Transmission delay:" + param1 + "Frames." + param2;
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_CAPTURE_IMAGE:
publisher_event = "Snapshot:" + param1 + "Path:" + param3;
if (param1 == 0) {
publisher_event = publisher_event + "Snapshot intercepted successfully..";
} else {
publisher_event = publisher_event + "Failed to capture snapshot...";
}
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUBLISHER_RTSP_URL:
publisher_event = "RTSP service URL:" + param3;
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUSH_RTSP_SERVER_RESPONSE_STATUS_CODE:
publisher_event ="RTSP status code received, codeID: " + param1 + ", RTSP URL: " + param3;
break;
case NTSmartEventID.EVENT_DANIULIVE_ERC_PUSH_RTSP_SERVER_NOT_SUPPORT:
publisher_event ="Server does not support RTSP push, push RTSP URL:" + param3;
break;
}
String str = "Current callback status:" + publisher_event;
Log.i(TAG, str);
Message message = newMessage(); message.what = PUBLISHER_EVENT_MSG; message.obj = publisher_event; handler.sendMessage(message); }}Copy the code
Partial screen data collection
Answer: In many scenarios, 3/4 of the classroom will be used for students to read, and 1/4 of the classroom will be used for some operations such as instructions. At this time, we need to consider the screen area clipping:
/** * Drop clipped RGBA data **@param data: RGBA data
*
* @param rowStride: stride information
*
* @param width: width
*
* @param height: height
*
* @paramClipedLeft: left; ClipedTop:; Clipedwidth: clipped width; ClipedHeight: clipped height; Make sure that the width and height of the cut down are even * *@return {0} if successful
*/
public native int SmartPublisherOnCaptureVideoClipedRGBAData(long handle, ByteBuffer data, int rowStride, int width, int height, int clipedLeft, int clipedTop, int clipedWidth, int clipedHeight);
Copy the code
Text, picture watermarking
In many scenarios, the user with the same screen will display the company logo and certain text information on the push terminal. At this time, we need to consider the watermarking problem of text and picture:
/** * Set Text water-mark **@param fontSize: it should be "MEDIUM", "SMALL", "BIG"
*
* @param waterPostion: it should be "TOPLEFT", "TOPRIGHT", "BOTTOMLEFT", "BOTTOMRIGHT".
*
* @param xPading, yPading: the distance of the original picture.
*
* <pre> The interface is only used for setting font water-mark when publishing stream. </pre>
*
* @return {0} if successful
*/
public native int SmartPublisherSetTextWatermark(long handle, String waterText, int isAppendTime, int fontSize, int waterPostion, int xPading, int yPading);
/** * Set Text water-mark font file name **@param fontFileName: font full file name, e.g: /system/fonts/DroidSansFallback.ttf
*
* @return {0} if successful
*/
public native int SmartPublisherSetTextWatermarkFontFileName(long handle, String fontFileName);
/** * Set picture water-mark **@param picPath: the picture working path, e.g: /sdcard/logo.png
*
* @param waterPostion: it should be "TOPLEFT", "TOPRIGHT", "BOTTOMLEFT", "BOTTOMRIGHT".
*
* @param picWidth, picHeight: picture width & height
*
* @param xPading, yPading: the distance of the original picture.
*
* <pre> The interface is only used for setting picture(logo) water-mark when publishing stream, with "*.png" format </pre>
*
* @return {0} if successful
*/
public native int SmartPublisherSetPictureWatermark(long handle, String picPath, int waterPostion, int picWidth, int picHeight, int xPading, int yPading);
Copy the code
Access to the screen | data collection
Before collecting push, you need to obtain screen permissions. After you get the screen data, call the SDK interface to complete push or video operation:
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private boolean startScreenCapture(a) {
Log.i(TAG, "startScreenCapture..");
setupMediaProjection();
setupVirtualDisplay();
return true;
}
private int align(int d, int a) {
return (((d) + (a - 1)) & ~(a - 1));
}
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
private void createScreenEnvironment(a) {
sreenWindowWidth = mWindowManager.getDefaultDisplay().getWidth();
screenWindowHeight = mWindowManager.getDefaultDisplay().getHeight();
Log.i(TAG, "screenWindowWidth: " + sreenWindowWidth + ",screenWindowHeight: "
+ screenWindowHeight);
if (sreenWindowWidth > 800)
{
if (screenResolution == SCREEN_RESOLUTION_STANDARD)
{
scale_rate = SCALE_RATE_HALF;
sreenWindowWidth = align(sreenWindowWidth / 2.16);
screenWindowHeight = align(screenWindowHeight / 2.16);
}
else if(screenResolution == SCREEN_RESOLUTION_LOW)
{
scale_rate = SCALE_RATE_TWO_FIFTHS;
sreenWindowWidth = align(sreenWindowWidth * 2 / 5.16);
screenWindowHeight = align(screenWindowHeight * 2 / 5.16);
}
}
Log.i(TAG, "After adjust mWindowWidth: " + sreenWindowWidth + ", mWindowHeight: " + screenWindowHeight);
int pf = mWindowManager.getDefaultDisplay().getPixelFormat();
Log.i(TAG, "display format:" + pf);
DisplayMetrics displayMetrics = new DisplayMetrics();
mWindowManager.getDefaultDisplay().getMetrics(displayMetrics);
mScreenDensity = displayMetrics.densityDpi;
mImageReader = ImageReader.newInstance(sreenWindowWidth,
screenWindowHeight, 0x1.6);
mMediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
}
@SuppressLint("NewApi")
private void setupMediaProjection(a) {
mMediaProjection = mMediaProjectionManager.getMediaProjection(
MainActivity.mResultCode, MainActivity.mResultData);
}
@SuppressLint("NewApi")
private void setupVirtualDisplay(a) {
mVirtualDisplay = mMediaProjection.createVirtualDisplay(
"ScreenCapture", sreenWindowWidth, screenWindowHeight,
mScreenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mImageReader.getSurface(), null.null);
mImageReader.setOnImageAvailableListener(
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = mImageReader.acquireLatestImage();
if(image ! =null) {
processScreenImage(image);
//image.close();}}},null);
}
private void startRecorderScreen(a) {
Log.i(TAG, "start recorder screen..");
if (startScreenCapture()) {
new Thread() {
@Override
public void run(a) {
Log.i(TAG, "start record.."); } }.start(); }}private ByteBuffer deepCopy(ByteBuffer source) {
int sourceP = source.position();
int sourceL = source.limit();
ByteBuffer target = ByteBuffer.allocateDirect(source.remaining());
target.put(source);
target.flip();
source.position(sourceP);
source.limit(sourceL);
return target;
}
/** * Process image data as desired. */
@SuppressLint("NewApi")
private void processScreenImage(Image image) {
if(! isPushingRtmp && ! isRecording &&! isRTSPPublisherRunning) { image.close();return;
}
/* final Image.Plane[] planes = image.getPlanes(); width_ = image.getWidth(); height_ = image.getHeight(); row_stride_ = planes[0].getRowStride(); ByteBuffer buf = deepCopy(planes[0].getBuffer()); * /
// Log.i("OnScreenImage", "new image");
pushImage(image);
}
@SuppressLint("NewApi")
private void stopScreenCapture(a) {
if(mVirtualDisplay ! =null) {
mVirtualDisplay.release();
mVirtualDisplay = null; }}Copy the code
Base initialization
private void InitAndSetConfig(a) {
// At the beginning, you need to set whether to collect audio or video
publisherHandle = libPublisher.SmartPublisherOpen(this.getApplicationContext(),
audio_opt, video_opt, sreenWindowWidth,
screenWindowHeight);
if ( publisherHandle == 0 )
{
return;
}
Log.i(TAG, "publisherHandle=" + publisherHandle);
libPublisher.SetSmartPublisherEventCallbackV2(publisherHandle, new EventHandeV2());
if(videoEncodeType == 1)
{
int h264HWKbps = setHardwareEncoderKbps(true, sreenWindowWidth,
screenWindowHeight);
Log.i(TAG, "h264HWKbps: " + h264HWKbps);
int isSupportH264HWEncoder = libPublisher
.SetSmartPublisherVideoHWEncoder(publisherHandle, h264HWKbps);
if (isSupportH264HWEncoder == 0) {
Log.i(TAG, "Great, it supports h.264 hardware encoder!"); }}else if (videoEncodeType == 2)
{
int hevcHWKbps = setHardwareEncoderKbps(false, sreenWindowWidth,
screenWindowHeight);
Log.i(TAG, "hevcHWKbps: " + hevcHWKbps);
int isSupportHevcHWEncoder = libPublisher
.SetSmartPublisherVideoHevcHWEncoder(publisherHandle, hevcHWKbps);
if (isSupportHevcHWEncoder == 0) {
Log.i(TAG, "Great, it supports hevc hardware encoder!"); }}if(is_sw_vbr_mode)
{
int is_enable_vbr = 1;
int video_quality = CalVideoQuality(sreenWindowWidth,
screenWindowHeight, true);
int vbr_max_bitrate = CalVbrMaxKBitRate(sreenWindowWidth,
screenWindowHeight);
libPublisher.SmartPublisherSetSwVBRMode(publisherHandle, is_enable_vbr, video_quality, vbr_max_bitrate);
}
// For audio, see the SmartPublisher project
/* if (! is_speex) { // set AAC encoder libPublisher.SmartPublisherSetAudioCodecType(publisherHandle, 1); } else { // set Speex encoder libPublisher.SmartPublisherSetAudioCodecType(publisherHandle, 2); libPublisher.SmartPublisherSetSpeexEncoderQuality(publisherHandle, 8); } libPublisher.SmartPublisherSetNoiseSuppression(publisherHandle, is_noise_suppression ? 1:0); libPublisher.SmartPublisherSetAGC(publisherHandle, is_agc ? 1:0); * /
// libPublisher.SmartPublisherSetClippingMode(publisherHandle, 0);
//libPublisher.SmartPublisherSetSWVideoEncoderProfile(publisherHandle, sw_video_encoder_profile);
//libPublisher.SmartPublisherSetSWVideoEncoderSpeed(publisherHandle, sw_video_encoder_speed);
// libPublisher.SetRtmpPublishingType(publisherHandle, 0);
libPublisher.SmartPublisherSetFPS(publisherHandle, 18); // The frame rate can be adjusted
libPublisher.SmartPublisherSetGopInterval(publisherHandle, 18*3);
//libPublisher.SmartPublisherSetSWVideoBitRate(publisherHandle, 1200, 2400); // For soft coding, the maximum bit rate is usually twice the average bit rate
libPublisher.SmartPublisherSetSWVideoEncoderSpeed(publisherHandle, 3);
//libPublisher.SmartPublisherSaveImageFlag(publisherHandle, 1);
}
Copy the code
Ready to push RTSP video | | start service
@SuppressWarnings("deprecation")
@Override
public void onStart(Intent intent, int startId) {
// TODO Auto-generated method stub
super.onStart(intent, startId);
Log.i(TAG, "onStart++");
if (libPublisher == null)
return;
clearAllImages();
screenResolution = intent.getExtras().getInt("SCREENRESOLUTION");
videoEncodeType = intent.getExtras().getInt("VIDEOENCODETYPE");
push_type = intent.getExtras().getInt("PUSHTYPE");
Log.i(TAG, "push_type: " + push_type);
mWindowManager = (WindowManager) getSystemService(Service.WINDOW_SERVICE);
// Window manager
createScreenEnvironment();
startRecorderScreen();
// If you push and record at the same time, you can set it once
InitAndSetConfig();
if ( publisherHandle == 0 )
{
stopScreenCapture();
return;
}
if(push_type == PUSH_TYPE_RTMP)
{
String publishURL = intent.getStringExtra("PUBLISHURL");
Log.i(TAG, "publishURL: " + publishURL);
if(libPublisher.SmartPublisherSetURL(publisherHandle, publishURL) ! =0) {
stopScreenCapture();
Log.e(TAG, "Failed to set publish stream URL..");
if(publisherHandle ! =0) {
if(libPublisher ! =null) {
libPublisher.SmartPublisherClose(publisherHandle);
publisherHandle = 0; }}return; }}// Start the thread passing data
post_data_thread = new Thread(new DataRunnable());
Log.i(TAG, "new post_data_thread..");
is_post_data_thread_alive = true;
post_data_thread.start();
// Video related ++
is_need_local_recorder = intent.getExtras().getBoolean("RECORDER");
if(is_need_local_recorder)
{
ConfigRecorderParam();
int startRet = libPublisher.SmartPublisherStartRecorder(publisherHandle);
if( startRet ! =0 )
{
isRecording = false;
Log.e(TAG, "Failed to start recorder..");
}
else
{
isRecording = true; }}// Video related --
if(push_type == PUSH_TYPE_RTMP)
{
Log.i(TAG, "RTMP Pusher mode..");
// Push stream correlation ++
int startRet = libPublisher.SmartPublisherStartPublisher(publisherHandle);
if(startRet ! =0) {
isPushingRtmp = false;
Log.e(TAG, "Failed to start push rtmp stream..");
return;
}
else
{
isPushingRtmp = true;
}
// Push stream correlation --
}
else if(push_type == PUSH_TYPE_RTSP)
{
Log.i(TAG, "RTSP Internal Server mode..");
rtsp_handle_ = libPublisher.OpenRtspServer(0);
if (rtsp_handle_ == 0) {
Log.e(TAG, "Failed to create RTSP Server instance! Please check SDK validity.");
} else {
int port = 8554;
if(libPublisher.SetRtspServerPort(rtsp_handle_, port) ! =0) {
libPublisher.CloseRtspServer(rtsp_handle_);
rtsp_handle_ = 0;
Log.e(TAG, "Failed to create RTSP server port! Please check for duplicate ports or ports out of range!");
}
//String user_name = "admin";
//String password = "12345";
//libPublisher.SetRtspServerUserNamePassword(rtsp_handle_, user_name, password);
if (libPublisher.StartRtspServer(rtsp_handle_, 0) = =0) {
Log.i(TAG, "RTSP Server started successfully!");
} else {
libPublisher.CloseRtspServer(rtsp_handle_);
rtsp_handle_ = 0;
Log.e(TAG, "Failed to start RTSP Server! Please check whether the set port is occupied!");
return;
}
isRTSPServiceRunning = true;
}
if(isRTSPServiceRunning)
{
Log.i(TAG, "onClick start rtsp publisher..");
String rtsp_stream_name = "stream1";
libPublisher.SetRtspStreamName(publisherHandle, rtsp_stream_name);
libPublisher.ClearRtspStreamServer(publisherHandle);
libPublisher.AddRtspStreamServer(publisherHandle, rtsp_handle_, 0);
if (libPublisher.StartRtspStream(publisherHandle, 0) != 0) {
Log.e(TAG, "Failed to call publish RTSP stream interface!");
return;
}
isRTSPPublisherRunning = true; }}// If you push and record at the same time, Audio should start once
CheckInitAudioRecorder();
Log.i(TAG, "onStart--");
}
private void stopPush(a) {
if(! isPushingRtmp) {return;
}
if(! isRecording && ! isRTSPPublisherRunning) {if(audioRecord_ ! =null) {
Log.i(TAG, "stopPush, call audioRecord_.StopRecording..");
audioRecord_.Stop();
if(audioRecordCallback_ ! =null) {
audioRecord_.RemoveCallback(audioRecordCallback_);
audioRecordCallback_ = null;
}
audioRecord_ = null; }}if(libPublisher ! =null) {
libPublisher.SmartPublisherStopPublisher(publisherHandle);
}
if(! isRecording && ! isRTSPPublisherRunning) {if(publisherHandle ! =0) {
if(libPublisher ! =null) {
libPublisher.SmartPublisherClose(publisherHandle);
publisherHandle = 0; }}}}Copy the code
Stop push video | | RTSP service
private void stopRecorder(a) {
if(! isRecording) {return;
}
if(! isPushingRtmp && ! isRTSPPublisherRunning) {if(audioRecord_ ! =null) {
Log.i(TAG, "stopRecorder, call audioRecord_.StopRecording..");
audioRecord_.Stop();
if(audioRecordCallback_ ! =null) {
audioRecord_.RemoveCallback(audioRecordCallback_);
audioRecordCallback_ = null;
}
audioRecord_ = null; }}if(libPublisher ! =null) {
libPublisher.SmartPublisherStopRecorder(publisherHandle);
}
if(! isPushingRtmp && ! isRTSPPublisherRunning) {if(publisherHandle ! =0) {
if(libPublisher ! =null) {
libPublisher.SmartPublisherClose(publisherHandle);
publisherHandle = 0; }}}}// Stop publishing RTSP streams
private void stopRtspPublisher(a) {
if(! isRTSPPublisherRunning) {return;
}
if(! isPushingRtmp && ! isRecording) {if(audioRecord_ ! =null) {
Log.i(TAG, "stopRtspPublisher, call audioRecord_.StopRecording..");
audioRecord_.Stop();
if(audioRecordCallback_ ! =null) {
audioRecord_.RemoveCallback(audioRecordCallback_);
audioRecordCallback_ = null;
}
audioRecord_ = null; }}if(libPublisher ! =null) {
libPublisher.StopRtspStream(publisherHandle);
}
if(! isPushingRtmp && ! isRecording) {if(publisherHandle ! =0) {
if(libPublisher ! =null) {
libPublisher.SmartPublisherClose(publisherHandle);
publisherHandle = 0; }}}}// Stop RTSP service
private void stopRtspService(a) {
if(! isRTSPServiceRunning) {return;
}
if(libPublisher ! =null&& rtsp_handle_ ! =0) {
libPublisher.StopRtspServer(rtsp_handle_);
libPublisher.CloseRtspServer(rtsp_handle_);
rtsp_handle_ = 0; }}Copy the code
Interested developers can refer to it.