The company was not very busy with the project recently, so we sorted out the things we had done before and used FFMPeg to decode the live stream. Do before a project is the smart home control system, and monitoring, the monitoring has multiple cameras, each one to display, and will preview images displayed in a different position, to achieve the effect of synchronization, using VLC decoding show before, but too many preview source, each one is to decode a, one is the cost performance, the other is a drag and drop at a time To a controller to preview a decoding once, obviously this way, does not meet the requirements,

Ideally, one source is decoded once, and the same source is displayed multiple times in different places, and the data is rendered directly. Here is what I did

Due to the limitations of the screen recording software, it cannot be operated. These three pictures show decoding once and are displayed in different places……… I just decoded one stream and showed the data in multiple locations

这是我在别人的基础上进行线程优化的,借鉴了别人的代码,我将代码封装在一个类中,调用很简单 最后面有源代码文件下载连接


@implementation CQMovieView
{
    AVFormatContext             *FormatCtx;
    AVCodecContext              *codecCtx;
    AVFrame                     *avframe;
    AVStream                    *stream;
    AVPacket                    packet;
    AVPicture                   picture;
    
    int                         videoStram;
    double                      fps;
    BOOL                        isReleaseResources;
    dispatch_queue_t            queue;
    UIImageView                 *imageView;
    
}

- (instancetype)initWithFrame:(CGRect)frame
{
    if (self = [super initWithFrame:frame]) {
        self.frame = frame;
    }
    return self;
}

-(void)Video:(NSString *)moviePath
{
    queue = dispatch_queue_create("LABEL", DISPATCH_QUEUE_SERIAL);
    
    self.cruutenPath = [moviePath copy];
    
    imageView = [[UIImageView alloc]initWithFrame:self.bounds];

    [self addSubview:imageView];
    
    
    dispatch_async(queue, ^{
        [self initializeResources:[moviePath UTF8String]];
    });
    
}

-(void)displayNextFrame:(NSTimer *)timer {
    
    NSTimeInterval startTime = [NSDate timeIntervalSinceReferenceDate];
    //    self.TimerLabel.text = [NSString stringWithFormat:@"%f s",video.currentTime];
    
    if (![self stepFrame]) {
        [timer invalidate];
        
        return;
    }
    imageView.image = self.currentImage;
    //    _ImageSubView.image = video.currentImage;
   
    float frameTime = 1.0 / ([NSDate timeIntervalSinceReferenceDate] - startTime);
    if (_lastFrameTime < 0) {
        
        _lastFrameTime = frameTime;
    } else {
        _lastFrameTime = LERP(frameTime, _lastFrameTime, 0.8);
    }
    
    
}

- (BOOL)initializeResources:(const char *)filePath {
    
    isReleaseResources = NO;
    AVCodec *pCodec;
    //注册所有解码器
    avcodec_register_all();
    av_register_all();
    avformat_network_init();
    
    //打开视屏文件
    if (avformat_open_input(&FormatCtx, filePath, NULL, NULL) != 0) {
        NSLog(@"打开文件失败");
        
    }
    
    //检查数据流
    if (avformat_find_stream_info(FormatCtx, NULL) < 0) {
        NSLog(@"检查数据流失败");
    }
    
    //根据数据流,找到第一个视屏流
    if ((videoStram = av_find_best_stream(FormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0)) < 0) {
        NSLog(@"没有找到第一个视屏流");
        //           goto initError;
        
    }
    
    //获取视屏流的编解码上下文的指针
    stream = FormatCtx->streams[videoStram];
    codecCtx = stream->codec;
#if DEBUG
    //打印视屏流的详细信息
    av_dump_format(FormatCtx, videoStram, filePath,  0);
#endif
    if (stream->avg_frame_rate.den && stream->avg_frame_rate.num) {
        fps = av_q2d(stream->avg_frame_rate);
    }else{fps = 30;}
    //查找解码器
    pCodec = avcodec_find_decoder(codecCtx->codec_id);
    if (pCodec == NULL) {
        NSLog(@"没有找到解码器");
        
    }
    //打开解码器
    if (avcodec_open2(codecCtx, pCodec, NULL) < 0) {
        NSLog(@"打开解码器失败");
        //           goto initError;
    }
    
    //分配视屏帧
    avframe = av_frame_alloc();
    _outputWidth = codecCtx->width;
    _outputHeight = codecCtx->height;
    
    dispatch_async(dispatch_get_main_queue(), ^{
        [self seekTime:0.0];
        [NSTimer scheduledTimerWithTimeInterval: 1 / fps
                                         target:self
                                       selector:@selector(displayNextFrame:)
                                       userInfo:nil
                                        repeats:YES];
    });
    
    return YES;
    
    
    return NO;
    
    
    
}
- (void)seekTime:(double)seconds {
    
    AVRational timeBase = FormatCtx->streams[videoStram]->time_base;
    int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num *seconds);
    avformat_seek_file(FormatCtx, videoStram, 0, targetFrame, targetFrame, AVSEEK_FLAG_FRAME);
    avcodec_flush_buffers(codecCtx);
}


- (BOOL)stepFrame
{
    int frameFinished = 0;
    while (!frameFinished && av_read_frame(FormatCtx, &packet) >= 0) {
        if (packet.stream_index == videoStram) {
            avcodec_decode_video2(codecCtx, avframe, &frameFinished, &packet);
        
            
        }
    }
    
    if (frameFinished == 0 && isReleaseResources == NO) {
        [self releaseResources];
    }
    return frameFinished != 0;
}

- (void)replaceTheResources:(NSString *)moviePath {
    if (!isReleaseResources) {
        [self releaseResources];
    }
    self.cruutenPath = [moviePath copy];
    
    [self initializeResources:[moviePath UTF8String]];
    
    
}

-(void)redialPaly
{
    [self initializeResources:[self.cruutenPath UTF8String]];
}
#pragma mark  重写属性访问方法
-(void)setOutputWidth:(int)newValue {
    if (_outputWidth == newValue)return;
    _outputWidth = newValue;
}

-(void)setOutputHeight:(int)newValue {
    if (_outputHeight == newValue) return;
    _outputHeight = newValue;
}

-(UIImage *)currentImage {
    if (!avframe->data[0]) return nil;
    return [self imageFromAVPicture];
    
    return nil;
}

-(double)duration {
    return (double)FormatCtx->duration / AV_TIME_BASE;
}

- (double)currentTime {
    AVRational timeBase = FormatCtx->streams[videoStram]->time_base;
    return packet.pts * (double)timeBase.num / timeBase.den;
}

- (int)sourceWidth {
    return codecCtx->width;
}
- (int)sourceHeight {
    return codecCtx->height;
}

- (double)fps {
    return fps;
}

#pragma mark - 内部方法
- (UIImage *)imageFromAVPicture
{
    avpicture_free(&picture);
    avpicture_alloc(&picture, AV_PIX_FMT_RGB24, _outputWidth, _outputHeight);
    struct SwsContext *imgConverCtx = sws_getContext(avframe->width,
                                                     avframe->height,
                                                     AV_PIX_FMT_YUV420P,
                                                     _outputWidth,
                                                     _outputHeight,
                                                     AV_PIX_FMT_RGB24,
                                                     SWS_FAST_BILINEAR,
                                                     NULL,
                                                     NULL,
                                                     NULL);
    if(imgConverCtx == nil)return nil;
    sws_scale(imgConverCtx,
              avframe->data,
              avframe->linesize,
              0,
              avframe->height,
              picture.data,
              picture.linesize);
    sws_freeContext(imgConverCtx);
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
    CFDataRef   data = CFDataCreate(kCFAllocatorDefault, picture.data[0], picture.linesize[0] * _outputHeight);
    CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
    CGColorSpaceRef   colorSpace = CGColorSpaceCreateDeviceRGB();
    CGImageRef  cgImage = CGImageCreate(_outputWidth, _outputHeight, 8, 24, picture.linesize[0], colorSpace, bitmapInfo, provider, NULL, NO, kCGRenderingIntentDefault);
    UIImage *image = [UIImage imageWithCGImage:cgImage];
    CGImageRelease(cgImage);
    CGColorSpaceRelease(colorSpace);
    CGDataProviderRelease(provider);
    CFRelease(data);   
    return image;
}


#pragma mark --------------------------
#pragma mark - 释放资源
- (void)releaseResources {
    NSLog(@"释放资源");
    //    SJLogFunc
    isReleaseResources = YES;
    // 释放RGB
    avpicture_free(&picture);
    // 释放frame
    av_packet_unref(&packet);
    // 释放YUV frame
    av_free(avframe);
    // 关闭解码器
    if (codecCtx) avcodec_close(codecCtx);
    // 关闭文件
    if (FormatCtx) avformat_close_input(&FormatCtx);
    avformat_network_deinit();
}

Copy the code

This is the file detailed code resource download code file download