1. In the test, I found that playing the video with IJK player is different from that with system player (MediaPlayer). Playing the video with IJK player feels a little bit sluggish and not as smooth as that with system player.

Now let’s take a look at this problem. The reason for this problem is actually very simple. Since I set the frame loss value to 5, if I change it to 1, I can feel that the experience of the two players is similar. (Drop 5 frames and the human eye will see the difference!) .

2. Play a 4K (30fps) video on qualcomm 660, but it cannot play normally. The actual decoding frame is only 20 frames per second, but the actual playback is only 4 frames. Lead to picture lag, audio and video out of sync.

Later, it was found that the video was due to the slow decoding of many frames on the Qualcomm machine, resulting in the video being slower than the audio. When the hard decoding lost frames, it was judged that the video was always slower than the audio, resulting in the video losing frames, which appeared to be stuck.

Throw the frame principle

The first thing you need to know is where you need to drop frames, and what frame to drop?

Frame loss can be before the decoding frame or after the decoding frame,

It is necessary to judge the frame type before discarding the decoded frame. You can choose to discarding B or P frame. If you need to discarding I frame, you need to discard the whole GOP to prevent screen wasting.

It is not necessary to throw the decoded frame according to the frame type, because the frame is the decoded data, such as YUV, directly according to PTS to determine whether the audio and video are out of sync to throw.

1. Design of frame loss in FFplay

Let’s look at the design of the lost frame in FFPlay: FFPlay loses the decoded video frame

In the video_thread decoder thread, we can see that the get_video_frame function is mainly used to decode and obtain the decoded data avframe, and then detect and judge the lost frame.

static int get_video_frame(VideoState *is, AVFrame *frame) { int got_picture; // Decode avFrameif ((got_picture = decoder_decode_frame(&is->viddec, frame, NULL)) < 0)
        return- 1;if (got_picture) {
        double dpts = NAN;
        
        if(frame->pts ! = AV_NOPTS_VALUE) dpts = av_q2d(is->video_st->time_base) * frame->pts; Frame ->sample_aspect_ratio = av_guess_sample_aspect_ratio(is-> IC, is->video_st, frame); frame->sample_aspect_ratio (is-> IC, is->video_st, frame);  // The number of frames lost is greater than 0 and the synchronization is not according to videoif(framedrop>0 || (framedrop && get_master_sync_type(is) ! = AV_SYNC_VIDEO_MASTER)) {if(frame->pts ! = AV_NOPTS_VALUE) {//frame_last_filter_delay = 0 // video is slower than audio. Double diff = dpts-get_master_clock (is); double diff = dpts-get_master_clock (is); // AV_NOSYNC_THRESHOLD: synchronization threshold. If the error is too large, no correction will be made and no frames will be dropped for synchronizationif(! isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD && diff - is->frame_last_filter_delay < 0 && is->viddec.pkt_serial == is->vidclk.serial && is->videoq.nb_packets) { is->frame_drops_early++; av_frame_unref(frame); Got_picture = 0; }}}}return got_picture;
}
Copy the code

2. Frame loss design in IJK

In IJK, lost frames are also decoded video frames, which can be divided into hard decoded frame loss and soft decoded frame loss.

2.1. Hard decoding frame loss design

In the ffpipenode_android_mediacodec_vdec.c file, the func_run_sync function is mainly used to handle the entire implementation logic of the hard decoding.

/** / static int func_run_sync(IJKFF_Pipenode *node) {JNIEnv *env = NULL; IJKFF_Pipenode_Opaque *opaque = node->opaque; FFPlayer *ffp = opaque->ffp; VideoState *is = ffp->is; Decoder *d = &is->viddec; PacketQueue *q = d->queue; int ret = 0; int dequeue_count = 0; AVFrame *frame = NULL; int got_frame = 0; AVRational tb = is->video_st->time_base; AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL); double duration; double pts;if(! opaque->acodec) {return ffp_video_thread(ffp);
    }

    if(JNI_OK ! = SDL_JNI_SetupThreadEnv(&env)) { ALOGE("%s: SetupThreadEnv failed\n", __func__);
        return- 1; } frame = av_frame_alloc();if(! frame) goto fail; Enqueue_thread_func opaque->enqueue_thread = SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node,"amediacodec_input_thread");
    if(! opaque->enqueue_thread) { ALOGE("%s: SDL_CreateThreadEx failed\n", __func__); ret = -1; goto fail; } // loop pull decoded datawhile(! q->abort_request) { int64_t timeUs = opaque->acodec_first_dequeue_output_request ? 0 : AMC_OUTPUT_TIMEOUT_US; got_frame = 0; Frame ret = drain_output_buffer(env, node, timeUs, &dequeue_count, frame, &got_frame);if (opaque->acodec_first_dequeue_output_request) {
            SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex);
            opaque->acodec_first_dequeue_output_request = false; SDL_CondSignal(opaque->acodec_first_dequeue_output_cond); SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex); } // Data pull errorif(ret ! = 0) { ret = -1;if (got_frame && frame->opaque) //release buffer falseTell MediaCodec to discard this frame {SDL_VoutAndroid_releaseBufferProxyP(opaque-> Weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque,false);
            }
            goto fail;
        }
        if(got_frame) { duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0); pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb); // Set frame loss to greater than or equal to 0 and the master clock is not a video clockif(ffp->framedrop > 0 || (ffp->framedrop && ffp_get_master_sync_type(is) ! = AV_SYNC_VIDEO_MASTER)) { ffp->stat.decode_frame_count++; // Decode the frame countif(frame->pts ! = AV_NOPTS_VALUE) { double dpts = pts; Double diff = dpts-ffp_get_master_clock (is); double diff = dpts-ffp_get_master_clock (is); Frame_last_filter_delay = 0. If diff is less than 0, the video is faster than the audio frame, and frames need to be lost. The difference is smaller than the maximum synchronization value. If the value exceeds this value, no synchronization is performedif(! isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD && diff - is->frame_last_filter_delay < 0 && is->viddec.pkt_serial == Is ->vidclk.serial && is->videoq.nb_packets) {// Is ->vidclk.serial && is-> Videoq.nb_packets) { is->continuous_frame_drops_early++; // The initial value is 0if(IS ->continuous_frame_drops_early > FFP ->framedrop) {if continuous_frame_drops_early is greater than the number of frames lost, Initialize ontinuous_frame_drops_early to 0 is->continuous_frame_drops_early = 0; }else{ ffp->stat.drop_frame_count++; FFP ->stat. Drop_frame_rate = (float)(ffp->stat.drop_frame_count) / (float)(ffp->stat.decode_frame_count);
                            if(frame->opaque) {// Call mediacodec, SDL_VoutAndroid_releaseBufferProxyP(opaque-> Weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaquefalse); } av_frame_unref(frame); // Release the framecontinue; }}}} // Frame is queued and placed in the decoded video queue, Ret = FFp_queue_picture (FFP, frame, PTS, duration, AV_frame_GEt_pkt_pos (frame), is->viddec.pkt_serial);if(ret) {// Queue entry error, release bufferfalseTell MediaCodec to discard this frame and not display itif (frame->opaque) 
                    SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
              
            }
            av_frame_unref(frame);
        }
    }

fail:
    av_frame_free(&frame);
    opaque->abort = true;
    SDL_WaitThread(opaque->enqueue_thread, NULL);
    SDL_AMediaCodecFake_abort(opaque->acodec);
    if (opaque->n_buf_out) {
        free(opaque->amc_buf_out);
        opaque->n_buf_out = 0;
        opaque->amc_buf_out = NULL;
        opaque->off_buf_out = 0;
        opaque->last_queued_pts = AV_NOPTS_VALUE;
    }
    if (opaque->acodec) {
        SDL_VoutAndroid_invalidateAllBuffers(opaque->weak_vout);
        SDL_LockMutex(opaque->acodec_mutex);
        SDL_UnlockMutex(opaque->acodec_mutex);
    }
    SDL_AMediaCodec_stop(opaque->acodec);
    SDL_AMediaCodec_decreaseReferenceP(&opaque->acodec);
    ALOGI("MediaCodec: %s: exit: %d", __func__, ret);
    return ret;
#if 0 // hard solution error, soft solution
fallback_to_ffplay:
    ALOGW("fallback to ffplay decoder\n");
    return ffp_video_thread(opaque->ffp);
#endif
}
Copy the code

2.2 Soft decoding frame loss design

static int get_video_frame(FFPlayer *ffp, AVFrame *frame) { VideoState *is = ffp->is; int got_picture; Ffp_video_statistic_l (FFP); Int64_t startTime = av_gettime_relative(); // Decode avFrameif ((got_picture = decoder_decode_frame(ffp, &is->viddec, frame, NULL)) < 0)
        return- 1; / *if(frame->key_frame) {int64_t endTime = AV_gettime_relative (); int usetime = endtime - starttime; ALOGE("zmlruan>>>>>>usetime:%d",usetime); } * /if(got_picture) {double DPTS = NAN;if(frame->pts ! = AV_NOPTS_VALUE) dpts = av_q2d(is->video_st->time_base) * frame->pts; Frame ->sample_aspect_ratio = av_guess_sample_aspect_ratio(is-> IC, is->video_st, frame); frame->sample_aspect_ratio (is-> IC, is->video_st, frame);  // The number of frames lost is greater than 0 and the synchronization is not according to videoif(ffp->framedrop>0 || (ffp->framedrop && get_master_sync_type(is) ! = AV_SYNC_VIDEO_MASTER)) { ffp->stat.decode_frame_count++; / / decodingif(frame->pts ! Double diff = DPTS - get_master_clock(is); double diff = pts_get_master_clock (is); // AV_NOSYNC_THRESHOLD: synchronization threshold. If the error is too large, no correction is madeif(! isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD && diff - is->frame_last_filter_delay < 0 && is->viddec.pkt_serial == is->vidclk.serial && is->videoq.nb_packets) { is->frame_drops_early++; is->continuous_frame_drops_early++;if (is->continuous_frame_drops_early > ffp->framedrop) {
                        is->continuous_frame_drops_early = 0;
                    } else{ ffp->stat.drop_frame_count++; FFP ->stat. Drop_frame_rate = (float)(ffp->stat.drop_frame_count) / (float)(ffp->stat.decode_frame_count); av_frame_unref(frame); Got_picture = 0; // Modify the return parameter, indicating that the video frame was not obtained, dropped}}}}}return got_picture;
}
Copy the code

So we can see the implementation logic for frame loss in IJK.