Download the official FFmpeg Linux source code

For example: ffmpeg.org/releases/ff…

2 Download the NDK and decompress it

For example: dl.google.com/android/rep…

Downloads# unzip android-ndk-r14b-linux-x86_64.zip
Copy the code

3 Decompress the FFmpeg source package

Downloads/ffmpeg# tar - ZXVF ffmpeg - 2.6.9. Tar. Gz
Copy the code

Modify ffMPEG source code configuration

Locate the configure file in the source root directory

Downloads/ffmpeg/ffmpeg - 2.6.9 # subl configureCopy the code

Modified as follows:

#SLIBNAME_WITH_MAJOR='$(SLIBNAME).$(LIBMAJOR)'
#LIB_INSTALL_EXTRA_CMD='$$(RANLIB) "$(LIBDIR)/$(LIBNAME)"'
#SLIB_INSTALL_NAME='$(SLIBNAME_WITH_VERSION)'
#SLIB_INSTALL_LINKS='$(SLIBNAME_WITH_MAJOR) $(SLIBNAME)'
SLIBNAME_WITH_MAJOR='$(SLIBPREF)$(FULLNAME)-$(LIBMAJOR)$(SLIBSUF)'
LIB_INSTALL_EXTRA_CMD='$$(RANLIB)"$(LIBDIR)/$(LIBNAME)"'
SLIB_INSTALL_NAME='$(SLIBNAME_WITH_MAJOR)'
SLIB_INSTALL_LINKS='$(SLIBNAME)'
Copy the code

5. Compile FFmpeg and compile armV7A scripts

Create the build_v7a.sh file in the FFmpeg source directory. For example, to compile the ARMV7A dynamic library, write the following script. Other platforms are referred to separately.

#! /bin/bash
make clean
export NDK=/home/lch/Downloads/android-ndk-r14b
export SYSROOT=$NDK/platforms/android-19/arch-arm/
export TOOLCHAIN=$NDK/ toolchains/arm - Linux - androideabi - 4.9 / prebuilt/Linux - x86_64export CPU=armv7-a
export PREFIX=$(pwd)/android/$CPU
export ADDI_CFLAGS="-marm -march=armv7-a"
export ADDI_LDFLAGS="-marm -march=armv7-a"

./configure --target-os=linux \
--prefix=$PREFIX --arch=arm \
--disable-doc \
--enable-shared \
--disable-static \
--disable-yasm \
--disable-symver \
--enable-gpl \
--disable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--disable-ffserver \
--disable-doc \
--disable-symver \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--enable-cross-compile \
--sysroot=$SYSROOT \
--extra-cflags="-Os -fpic $ADDI_CFLAGS" \
--extra-ldflags="$ADDI_LDFLAGS"

make clean
make
make install

Copy the code

6 compilation

chmod 777 -R build_v7a.sh
./build_v7a.sh
Copy the code

7 Compilation Result

8 Test FFmpeg on Android

8.1 Create a JNI folder in app/ SRC /main and copy the Android /armv7-a/include folder to the JNI folder.

8.2 Create jniLibs/armeabi-v7a folder in app/ SRC /main/ and copy the android/armv7-a/lib/ so file without soft connection to jniLibs/armeabi-v7a.

8.3 Writing native methods

public class LchJni {
    static {
        System.loadLibrary("lch_decode");
    }
    public static native void decode(String src, String dest);
}
Copy the code

Generate the JNI header file and cut the generated com_lch_jnitest_lchjni.h to app/ SRC /main/jni

jni/app/src/main/java/com/lch/jnitest# javac -h . LchJni.java 
Copy the code

8.4 Compile lchdecode. C in app/ SRC /main/jni with the following code:

//
// Created by root on 20-8-15.
//

#include "com_lch_jnitest_LchJni.h"

#include <android/log.h>

#include "libavcodec/avcodec.h"
// Package format
#include "libavformat/avformat.h"
/ / zoom
#include "libswscale/swscale.h"

#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO, "lchjni", FORMAT, ##__VA_ARGS__);
#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR, "lchjni", FORMAT, ##__VA_ARGS__);


JNIEXPORT void JNICALL Java_com_lch_jnitest_LchJni_decode
(JNIEnv *env, jclass jclz, jstring input_jstr, jstring output_jstr){
    LOGE("%s"."jni decode ffmpeg");

    const char* input_cstr = (*env) -> GetStringUTFChars(env, input_jstr, NULL);
    const char* output_cstr = (*env) -> GetStringUTFChars(env, output_jstr, NULL);

    1. Register all components
    av_register_all();

    // Encapsulate the format context
    AVFormatContext* pFormatCtx = avformat_alloc_context();
    //2. Open the input video file and return 0 successfully. The third parameter is NULL, indicating that the file format is automatically detected
    if (avformat_open_input(&pFormatCtx, input_cstr, NULL.NULL) != 0) {
        LOGE("%s"."Failed to open input video file");
        return;
    }

    //3. Obtain the video file information
    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        LOGE("%s"."Failed to get video file information");
        return;
    }

    // Find the location of the video stream
    // Walk through all types of streams (video, audio and possibly subtitles) to find the location of the video stream
    int video_stream_index = - 1;
    int i = 0;
    for(; i < pFormatCtx -> nb_streams; i++) {
        if (pFormatCtx->streams[i]->codec-> codec_type == AVMEDIA_TYPE_VIDEO) {
            video_stream_index = i;
            break; }}// Codec context
    AVCodecContext* pCodecCtx = pFormatCtx->streams[video_stream_index]->codec;
    PCodecCtx ->codec cannot get the decoder
    AVCodec* pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if (pCodec == NULL) {
        LOGE("%s"."Failed to find decoder");
        return;
    }

    //5. Open the decoder
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGE("%s"."Failed to open decoder");
        return;
    }

    // Encode data
    AVPacket* pPacket = (AVPacket*)av_malloc(sizeof(AVPacket));

    // Pixel data (decoded data)
    AVFrame* pFrame = av_frame_alloc();
    AVFrame* pYuvFrame = av_frame_alloc();

    FILE* fp_yuv = fopen(output_cstr, "wb");

    // Memory can only be allocated if AVFrame pixel format and screen size are specified
    // The buffer allocates memory
    uint8_t* out_buffer = (uint8_t*)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
    // Initialize the buffer
    avpicture_fill((AVPicture*)pYuvFrame, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

    //srcW: width of source image
    //srcH: height of the source image
    //srcFormat: the pixel format of the source image
    //dstW: target image width
    //dstH: Target image height
    //dstFormat: The pixel format of the target image
    //flags: Sets the algorithm used for image stretching
    struct SwsContext* pSwsCtx = sws_getContext(
            pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
            pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P,
            SWS_BILINEAR, NULL.NULL.NULL);

    int got_frame, len, frameCount = 0;
    //6. Read compressed video data AVPacket frame by frame from input file
    while(av_read_frame(pFormatCtx, pPacket) >= 0) {
        if (pPacket->stream_index == video_stream_index) {
            //7. Decode a frame of compressed data AVPacket --> AVFrame. If the third parameter is 0, the decoding is complete
            len = avcodec_decode_video2(pCodecCtx, pFrame, &got_frame, pPacket);

            if (len < 0) {
                LOGE("%s"."Decoding failed");
                return;
            }
            //AVFrame ---> YUV420P
            SrcSlice [] and DST [] input and output data
            //srcStride[], dstStride[] input, output screen line of data size AVFrame conversion is a line line conversion
            //srcSliceY the first column of input data to be transcoded starts at 0
            //srcSliceH input screen height
            sws_scale(pSwsCtx,
                      pFrame->data, pFrame->linesize, 0, pFrame->height,
                      pYuvFrame->data, pYuvFrame->linesize);

            // Non-0 indicates decoding
            if (got_frame) {
                // The product of the width and height of the image is the total number of pixels of the video, and each pixel contains one y, with u corresponding to 1/4 y and v corresponding to 1/4 Y
                int yuv_size = pCodecCtx->width * pCodecCtx->height;
                // Write data to y
                fwrite(pYuvFrame->data[0].1, yuv_size, fp_yuv);
                // Write data to u
                fwrite(pYuvFrame->data[1].1, yuv_size/4, fp_yuv);
                // Write data to v
                fwrite(pYuvFrame->data[2].1, yuv_size/4, fp_yuv);

                LOGI("Decode frame % D", frameCount++);
            }
            av_free_packet(pPacket);
        }
    }

    fclose(fp_yuv);
    av_frame_free(&pFrame);
    av_frame_free(&pYuvFrame);
    avcodec_free_context(&pCodecCtx);
    avformat_free_context(pFormatCtx);

    (*env) -> ReleaseStringUTFChars(env, input_jstr, input_cstr);
    (*env) -> ReleaseStringUTFChars(env, output_jstr, output_cstr);
}
Copy the code

8.5 Create cmakelists. TXT in app directory

Cmake_minimum_required (VERSION 3.4.1) set(path_project /home/lch/test/jni) add_library(lCH_decode SHARED src/main/jni/lchdecode.c) add_library(avcodec SHARED IMPORTED) set_target_properties(avcodec PROPERTIES IMPORTED_LOCATION ${path_project}/app/src/main/jniLibs/${ANDROID_ABI}/libavcodec-56.so IMPORTED_NO_SONAME 1) add_library(avdevice SHARED IMPORTED) set_target_properties(avdevice PROPERTIES IMPORTED_LOCATION ${path_project}/app/src/main/jniLibs/${ANDROID_ABI}/libavdevice-56.so IMPORTED_NO_SONAME 1) add_library(avfilter SHARED IMPORTED) set_target_properties(avfilter PROPERTIES IMPORTED_LOCATION ${path_project}/app/src/main/jniLibs/${ANDROID_ABI}/libavfilter-5.so IMPORTED_NO_SONAME 1) add_library(avformat SHARED IMPORTED) set_target_properties(avformat PROPERTIES IMPORTED_LOCATION ${path_project}/app/src/main/jniLibs/${ANDROID_ABI}/libavformat-56.so IMPORTED_NO_SONAME 1) add_library(avutil SHARED IMPORTED) set_target_properties(avutil PROPERTIES IMPORTED_LOCATION ${path_project}/app/src/main/jniLibs/${ANDROID_ABI}/libavutil-54.so IMPORTED_NO_SONAME 1) add_library(postproc SHARED IMPORTED) set_target_properties(postproc PROPERTIES IMPORTED_LOCATION ${path_project}/app/src/main/jniLibs/${ANDROID_ABI}/libpostproc-53.so) add_library(swresample SHARED IMPORTED) set_target_properties(swresample PROPERTIES IMPORTED_LOCATION ${path_project}/app/src/main/jniLibs/${ANDROID_ABI}/libswresample-1.so IMPORTED_NO_SONAME 1) add_library(swscale SHARED IMPORTED) set_target_properties(swscale PROPERTIES IMPORTED_LOCATION ${path_project}/app/src/main/jniLibs/${ANDROID_ABI}/libswscale-3.so IMPORTED_NO_SONAME 1) include_directories(src/main/jni/include) find_library(log-lib log ) target_link_libraries(lch_decode avutil avcodec avdevice swresample swscale avfilter avformat postproc ${log-lib})Copy the code

8.6 App /build.gradle configuration is as follows

apply plugin: 'com.android.application' android {compileSdkVersion 29 buildToolsVersion "29.0.3" defaultConfig {applicationId Com. LCH. "jnitest" 15 targetSdkVersion 29 versionCode minSdkVersion 1 versionName testInstrumentationRunner "1.0" "androidx.test.runner.AndroidJUnitRunner" externalNativeBuild { cmake { cppFlags "" } ndk { abiFilters 'armeabi-v7a' } }  } buildTypes { release { minifyEnabled false proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' } } sourceSets { main{ jniLibs.srcDirs = ['src/main/jniLibs'] } } externalNativeBuild { cmake { path file('CMakeLists.txt') } } } dependencies { implementation fileTree(dir: 'libs', include: [' *. Jar ']) implementation 'androidx. Appcompat: appcompat: 1.0.2' implementation 'androidx. Constraintlayout: constraintlayout: 1.1.3' testImplementation junit: junit: '4.12' androidTestImplementation 'androidx. Test. Ext: junit: 1.1.0' androidTestImplementation 'androidx. Test. Espresso: espresso - core: 3.1.1'}Copy the code

Add permissions

 <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS" />

Copy the code

Test the decoding

String input = new File(Environment.getExternalStorageDirectory(),"input.mp4").getAbsolutePath();
                String output = new File(Environment.getExternalStorageDirectory(),"output.avi").getAbsolutePath();
                LchJni.decode(in
Copy the code

8.7 Test Results