preface
This series is based on iJkPlayer source code to summarize and analyze, mainly from the Java layer – JNI layer – C layer, as a player’s overall process, the overall structure is as follows.
Major structure
IjkMediaPlayer, FFPlayer, and IJKFF_Pipeline are created during native_setUp initialization, and VideoState is created through stream_open during prepare. These constructs are very important throughout the entire process
IjkMediaPlayer
A Player representing the native layer, bound one-to-one to the Java layer. Encapsulate as a Java to C entry point.
struct IjkMediaPlayer {
volatile int ref_count;
pthread_mutex_t mutex;
FFPlayer *ffplayer;
int (*msg_loop)(void*);
SDL_Thread *msg_thread;
SDL_Thread _msg_thread;
int mp_state;
char *data_source;
void *weak_thiz;
int restart;
int restart_from_beginning;
int seek_req;
};
Copy the code
FFPlayer
Specific player, internal player, including encoding, output, etc.; Once the code goes to ff_ffplay.c, it’s all using FFPlayer. Hold VideoState.
typedef struct FFPlayer {
VideoState *is;
/* extra fields */
SDL_Aout *aout;
SDL_Vout *vout;
struct IJKFF_Pipeline *pipeline;
struct IJKFF_Pipenode *node_vdec;
MessageQueue msg_queue;
}
Copy the code
VideoState
VideoState, created in stream_open, represents all states during playback.
typedef struct VideoState {
Clock audclk;
Clock vidclk;
Clock extclk;
FrameQueue pictq;
FrameQueue subpq;
FrameQueue sampq;
Decoder auddec;
Decoder viddec;
Decoder subdec;
PacketQueue audioq;
PacketQueue subtitleq;
PacketQueue videoq;
int seek_req;
double frame_timer;
int abort_request;
int force_refresh;
int paused;
}
Copy the code
IJKFF_Pipeline
Ffpipeline encapsulates the video decoder and audio decoder/output, implemented through function Pointers.
/ / ffpipeline_android. C
typedef struct IJKFF_Pipeline_Opaque {
FFPlayer *ffp;
SDL_mutex *surface_mutex;
jobject jsurface;
volatile bool is_surface_need_reconfigure;
bool (*mediacodec_select_callback)(void *opaque, ijkmp_mediacodecinfo_context *mcc);
void *mediacodec_select_callback_opaque;
SDL_Vout *weak_vout;
float left_volume;
float right_volume;
} IJKFF_Pipeline_Opaque;
/ / in ff_ffpipeline. H
typedef struct IJKFF_Pipeline_Opaque IJKFF_Pipeline_Opaque;
typedef struct IJKFF_Pipeline IJKFF_Pipeline;
struct IJKFF_Pipeline {
SDL_Class *opaque_class;
IJKFF_Pipeline_Opaque *opaque;
void (*func_destroy) (IJKFF_Pipeline *pipeline);
IJKFF_Pipenode *(*func_open_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
SDL_Aout *(*func_open_audio_output) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
IJKFF_Pipenode *(*func_init_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
int (*func_config_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
};
IJKFF_Pipeline *ffpipeline_alloc(SDL_Class *opaque_class, size_t opaque_size);
void ffpipeline_free(IJKFF_Pipeline *pipeline);
void ffpipeline_free_p(IJKFF_Pipeline **pipeline);
// Open the video decoder
IJKFF_Pipenode *ffpipeline_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp);
// Open the audio decoder
SDL_Aout *ffpipeline_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp);
// Initialize IJKFF_Pipenode asynchronously
IJKFF_Pipenode* ffpipeline_init_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp);
// Initialize the video decoder asynchronously
int ffpipeline_config_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp);
Copy the code
Initialization Process
native_setup
static void
IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
MPTRACE("%s\n", __func__);
IjkMediaPlayer *mp = ijkmp_android_create(message_loop);
JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError"."mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN);
jni_set_media_player(env, thiz, mp);
ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this));
ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
}
//ijkmp_android_create
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
// Initialize the IjkMediaPlayer structure
IjkMediaPlayer *mp = ijkmp_create(msg_loop);
if(! mp)goto fail;
/ / create SDL_Vout
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
if(! mp->ffplayer->vout)goto fail;
/ / create IJKFF_Pipeline
mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
if(! mp->ffplayer->pipeline)goto fail;
// bind s to each other
ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);
return mp;
fail:
ijkmp_dec_ref_p(&mp);
return NULL;
}
Copy the code
IjkMediaPlayer_setDataSource
int ijkmp_set_data_source(IjkMediaPlayer *mp, const char *url)
{
assert(mp);
assert(url);
MPTRACE("ijkmp_set_data_source(url=\"%s\")\n", url);
pthread_mutex_lock(&mp->mutex);
int retval = ijkmp_set_data_source_l(mp, url);
pthread_mutex_unlock(&mp->mutex);
MPTRACE("ijkmp_set_data_source(url=\"%s\")=%d\n", url, retval);
return retval;
}
static int ijkmp_set_data_source_l(IjkMediaPlayer *mp, const char *url)
{
assert(mp);
assert(url);
// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
freep((void**)&mp->data_source);
mp->data_source = strdup(url);
if(! mp->data_source)return EIJK_OUT_OF_MEMORY;
ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);
return 0;
}
Copy the code
IjkMediaPlayer_prepareAsync
Call flow IjkMediaPlayer_prepareAsync-> IJkMP_prepare_ASYNC_L -> FFP_prepare_ASYNC_L ->stream_open
stream_open
This method is a hodgepodge, encapsulating all the parameters needed in the process
static VideoState *stream_open(FFPlayer *ffp, const char *filename, AVInputFormat *iformat)
{ assert(! ffp->is); VideoState *is; is = av_mallocz(sizeof(VideoState));
if(! is)return NULL;
is->filename = av_strdup(filename);
if(! is->filename)goto fail;
is->iformat = iformat;
is->ytop = 0;
is->xleft = 0;
#if defined(__ANDROID__)
if (ffp->soundtouch_enable) {
is->handle = ijk_soundtouch_create();
}
#endif
/* start video display */
if (frame_queue_init(&is->pictq, &is->videoq, ffp->pictq_size, 1) < 0)
goto fail;
if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) < 0)
goto fail;
if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) < 0)
goto fail;
if (packet_queue_init(&is->videoq) < 0 ||
packet_queue_init(&is->audioq) < 0 ||
packet_queue_init(&is->subtitleq) < 0)
goto fail;
if(! (is->continue_read_thread = SDL_CreateCond())) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
goto fail;
}
if(! (is->video_accurate_seek_cond = SDL_CreateCond())) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
ffp->enable_accurate_seek = 0;
}
if(! (is->audio_accurate_seek_cond = SDL_CreateCond())) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
ffp->enable_accurate_seek = 0;
}
init_clock(&is->vidclk, &is->videoq.serial);
init_clock(&is->audclk, &is->audioq.serial);
init_clock(&is->extclk, &is->extclk.serial);
is->audio_clock_serial = - 1;
if (ffp->startup_volume < 0)
av_log(NULL, AV_LOG_WARNING, "-volume=%d < 0, setting to 0\n", ffp->startup_volume);
if (ffp->startup_volume > 100)
av_log(NULL, AV_LOG_WARNING, "-volume=%d > 100, setting to 100\n", ffp->startup_volume);
ffp->startup_volume = av_clip(ffp->startup_volume, 0.100);
ffp->startup_volume = av_clip(SDL_MIX_MAXVOLUME * ffp->startup_volume / 100.0, SDL_MIX_MAXVOLUME);
is->audio_volume = ffp->startup_volume;
is->muted = 0; is->av_sync_type = ffp->av_sync_type; is->play_mutex = SDL_CreateMutex(); is->accurate_seek_mutex = SDL_CreateMutex(); ffp->is = is; is->pause_req = ! ffp->start_on_prepared; is->video_refresh_tid = SDL_CreateThreadEx(&is->_video_refresh_tid, video_refresh_thread, ffp,"ff_vout");
if(! is->video_refresh_tid) { av_freep(&ffp->is);return NULL;
}
is->initialized_decoder = 0;
is->read_tid = SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, "ff_read");
if(! is->read_tid) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateThread(): %s\n", SDL_GetError());
goto fail;
}
if(ffp->async_init_decoder && ! ffp->video_disable && ffp->video_mime_type &&strlen(ffp->video_mime_type) > 0
&& ffp->mediacodec_default_name && strlen(ffp->mediacodec_default_name) > 0) {
if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2) {
decoder_init(&is->viddec, NULL, &is->videoq, is->continue_read_thread);
ffp->node_vdec = ffpipeline_init_video_decoder(ffp->pipeline, ffp);
}
}
is->initialized_decoder = 1;
return is;
fail:
is->initialized_decoder = 1;
is->abort_request = true;
if (is->video_refresh_tid)
SDL_WaitThread(is->video_refresh_tid, NULL);
stream_close(ffp);
return NULL;
}
Copy the code
- Video_refresh_thread, mainly responsible for audio and video synchronization, as well as audio and video rendering display process.
- Read_thread is responsible for opening streams, creating audio and video decoding threads, reading packets and other processes
message_loop
IjkMediaPlayer creates a thread in prepare and starts a loop, which is similar to the Looper mechanism of android message queue lock.