# # # introduction
In the last article we decoded the video, so this time we will play the decoded data. That’s drawing on the interface.
### Video play
#### create automatically with SurfaceView
The SurfaceView is used because video needs to refresh the screen quickly and in real time.
public class VideoView extends SurfaceView { public VideoView(Context context) { this(context, null, 0); } public VideoView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public VideoView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } private void init() {// Initialize the pixel to draw in the format of RGBA_8888 (most colorful) SurfaceHolder holder = getHolder(); holder.setFormat(PixelFormat.RGBA_8888); }}Copy the code
Here we customize a SurfaceView and specify the output format as RGBA_8888, which is the most colorful format.
Then add it to the root layout:
<? The XML version = "1.0" encoding = "utf-8"? > <FrameLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:app="http://schemas.android.com/apk/res-auto" xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent" android:layout_height="match_parent"> <com.nan.ffmpeg.view.VideoView android:id="@+id/sv_video" android:layout_width="match_parent" android:layout_height="match_parent" /> <Button Android :id="@+id/btn_play" Android :layout_width="wrap_content" Android :layout_height="wrap_content" Android :layout_height="wrap_content" </FrameLayout>Copy the code
#### Create a playback controller class
public class VideoPlayer {
static {
System.loadLibrary("avutil-54");
System.loadLibrary("swresample-1");
System.loadLibrary("avcodec-56");
System.loadLibrary("avformat-56");
System.loadLibrary("swscale-3");
System.loadLibrary("postproc-53");
System.loadLibrary("avfilter-5");
System.loadLibrary("avdevice-56");
System.loadLibrary("ffmpeg-lib");
}
public native void render(String input, Surface surface);
}
Copy the code
#### Native method implementation
#include "libavcodec/avcodec.h" #include "libavcodec/avcodec.h" #include "libavcodec/avcodec.h" #include <android/native_window_jni.h> #include <android/native_window.h> #include "libyuv.h" JNIEXPORT void JNICALL Java_com_nan_ffmpeg_utils_VideoPlayer_render(JNIEnv *env, jobject instance, Const char *input = (*env)->GetStringUTFChars(env, input_, 0); //1. Register all components, such as initializing some global variables, initializing network, etc. Av_register_all (); //avcodec_register_all(); AVFormatContext *pFormatCtx = avformat_alloc_context(); If (avformat_open_INPUT (&pFormatCtx, INPUT, NULL, NULL)! = 0) {LOGE("%s", "can't open input video file "); return; } // the second parameter is a dictionary, which indicates what information you need to obtain. If (avformat_find_stream_info(pFormatCtx, NULL) < 0) {LOGE("%s", "can't get video file info "); return; Int v_stream_idx = -1;} // Get the index position of the video stream // iterate over all types of streams (audio stream, video stream, subtitle stream) int i = 0; //number of streams for (; i < pFormatCtx->nb_streams; If (pFormatCtx->streams[I]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {v_stream_idx = I; break; }} the if (v_stream_idx = = 1) {LOGE (" % s ", "can't find the video stream \ n"); return; AVCodecContext *pCodecCtx = streamctx ->streams[v_stream_idx]->codec; AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id); If (pCodec == NULL) {LOGE("%s", "can't find the decoder, or the video is encrypted \n"); return; } //5. If (avCodec_open2 (pCodecCtx, pCodec, NULL) < 0) {LOGE("%s", "decoder cannot open \n"); return; AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket)); av_malloc(sizeof(AVPacket)); AVFrame *yuv_frame = AV_frame_alloc (); AVFrame *yuv_frame = av_frame_alloc(); AVFrame *rgb_frame = av_frame_alloc(); int got_picture, ret; int frame_count = 0; // form ANativeWindow *pWindow = ANativeWindow_fromSurface(env, surface); // Draw buffer ANativeWindow_Buffer out_buffer; While (av_read_frame(pFormatCtx, Packet) >= 0) {if (packet->stream_index == v_stream_idx) {//7. Decode a frame of video compression data, get video pixel data RET = AVCODEC_decode_videO2 (pCodecCtx, YUv_frame, & GOt_picture, packet); If (ret < 0) {LOGE("%s", "decoded error "); return; If (got_picture) {if (got_picture) {if (got_picture) {if (got_picture) { ANativeWindow_setBuffersGeometry(pWindow, pCodecCtx->width, pCodecCtx->height, WINDOW_FORMAT_RGBA_8888); ANativeWindow_lock(pWindow, &out_buffer, NULL); // Fix buffer // initialize buffer // set attributes, pixel format, width and height //rgb_frame buffer is Window buffer, same, Avpicture_fill ((AVPicture *) rGB_frame, out_buffer. Bits, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height); // Convert YUV data to RGBA 8888 data //FFmpeg can convert, but there are problems, So we use the library to do libyuv / / https://chromium.googlesource.com/external/libyuv / / parameters are the size of the data, corresponding to a line / / I420ToARGB (yuv_frame - > data [0]. yuv_frame->linesize[0], // yuv_frame->data[1], yuv_frame->linesize[1], // yuv_frame->data[2], yuv_frame->linesize[2], // rgb_frame->data[0], rgb_frame->linesize[0], // pCodecCtx->width, pCodecCtx->height); I420ToARGB(yuv_frame->data[0], yuv_frame->linesize[0], yuv_frame->data[2], yuv_frame->linesize[2], yuv_frame->data[1], yuv_frame->linesize[1], rgb_frame->data[0], rgb_frame->linesize[0], pCodecCtx->width, pCodecCtx->height); //3, unlock window ANativeWindow_unlockAndPost(pWindow); frame_count++; LOGI(" frame %d ", frame_count); } // Free av_free_packet(packet); } av_frame_free(&yuv_frame); avcodec_close(pCodecCtx); avformat_free_context(pFormatCtx); (*env)->ReleaseStringUTFChars(env, input_, input); }Copy the code
Note that we use the yuvlib library to convert YUV data to RGB data. This library download address is https://chromium.googlesource.com/external/libyuv.
Modify the last line of android. mk file to print dynamic library instead of static library:
include $(BUILD_SHARED_LIBRARY)
Copy the code
Create your own jni directory, copy all files into it, and run the following command to compile yuvlib:
ndk-build jni
Copy the code
The pre-compiled so library is then output and used in Android Studio. The CMake script needs to add:
Add_library (YUV SHARED IMPORTED) set_target_properties(YUV PROPERTIES IMPORTED_LOCATION ${path_project}/app/libs/${ANDROID_ABI}/libyuv.so )Copy the code
Set the include path of some header files if necessary:
# configuration header file contains the path include_directories (${path_project} / app/SRC/main/CPP/include/yuv)Copy the code
Finally, include the corresponding header file when using:
#include "libyuv.h"
Copy the code
Another thing to note is that we are going to use the window’s native rendering, so we need to introduce window-related headers:
#include <android/native_window_jni.h> #include <android/native_window.h>Copy the code
These header files are used to compile the Android library in the same way as the log library:
# find_library(android-lib Android)Copy the code
Remember to link to your own library:
Target_link_libraries (ffmpeg-lib ${log-lib} ${android-lib} avutil swresample avCodec avFormat swscale postproc avfilter avdevice yuv )Copy the code
#### window’s native drawing process
Drawing requires a Surface object.
Native drawing steps:
- Lock the Window.
- Initialize the buffer, set the size, and assign the buffer.
- Unlock and draw to the window.
# # # # test
@Override public void onClick(View v) { String inputVideo = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separatorChar + "input.mp4"; switch (v.getId()) { case R.id.btn_play: sv_video = (SurfaceView) findViewById(R.id.sv_video); Mplayer.render (inputVideo, sv_video.getholder ().getSurface())); break; }}Copy the code
If you feel that my words are helpful to you, welcome to pay attention to my public number:
My group welcomes everyone to come in and discuss all kinds of technical and non-technical topics. If you are interested, please add my wechat huannan88 and I will take you into our group.