GPUImageRawDataInput: GPUImageRawDataInput: GPUImageRawDataInput:

#import "RawDataViewController.h"
#import "GPUImage.h"

@interface RawDataViewController(a)

@property (nonatomic.strong) GPUImageRawDataInput *rawDataInput;
@property (nonatomic.strong) GPUImageRawDataOutput *rawDataOutput;

@property (nonatomic.strong) GPUImageBrightnessFilter *filter;
@property (nonatomic.strong) GPUImageView *filterView;

@end


@implementation RawDataViewController

- (void)viewDidLoad {
    [super viewDidLoad];

    self.filterView = [[GPUImageView alloc] initWithFrame:CGRectMake(0.100.self.view.frame.size.width, 300)];
    [self.view addSubview:self.filterView];
    
    
    // 1. UIImage -> CGImage -> CFDataRef -> UInt8 * data
    UIImage *image = [UIImage imageNamed:@"img1.jpg"];
    CGImageRef newImageSource = [image CGImage];
    CFDataRef dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(newImageSource));
    GLubyte* imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);
    
    // 2. UInt8 * data -> GPUImageRawDataInput
    self.rawDataInput = [[GPUImageRawDataInput alloc] initWithBytes:imageData size:image.size pixelFormat:GPUPixelFormatRGBA];
    
    self.filter = [[GPUImageBrightnessFilter alloc] init];
    self.filter.brightness = 0.1;
    

    [self.rawDataInput addTarget:self.filter];
    // 3. Output to GPUImageView
    [self.filter addTarget:self.filterView];

    
    // 4. Output to raw data output
    self.rawDataOutput = [[GPUImageRawDataOutput alloc] initWithImageSize:image.size resultsInBGRAFormat:YES];
    [self.filter addTarget:self.rawDataOutput];
    
    // important
    [self.filter useNextFrameForImageCapture];
    [self.rawDataInput processData];
    
    
    // 5. read data from GPUImageRawDataOutput
    [self.rawDataOutput lockFramebufferForReading];
    
    GLubyte *outputBytes = [self.rawDataOutput rawBytesForImage];
    NSInteger bytesPerRow = [self.rawDataOutput bytesPerRowInOutput];

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(a);CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, outputBytes, bytesPerRow * image.size.height, NULL);
    CGImageRef cgImage = CGImageCreate(image.size.width, image.size.height, 8.32, bytesPerRow, rgbColorSpace, kCGImageAlphaPremultipliedFirst|kCGBitmapByteOrder32Little, provider, NULL.true, kCGRenderingIntentDefault);
    
    [self.rawDataOutput unlockFramebufferAfterReading];

    // Break to this line and view outImage
    UIImage *outImage = [UIImage imageWithCGImage:cgImage];
    NSLog(@ "% @", outImage);
}

@end

Copy the code

Usage scenarios

This example can be used in the following scenarios:

  • Use FFMPEG to read a frame of video stream into RGBA data;
  • Pass data to GPUImageRawDataInput -> Add filter -> Output to GPUImageRawDataOutput
  • Fetch RGBA data from GPUImageRawDataOutput, and then give it to FFMPEG encoding to write a new video file

Note:

  • This process can be completed using GPUImage without ffmPEG; When cross-platform codec and filter is needed, OpenGL can be used to filter the video stream.
  • To view UIImage from a breakpoint in Xcode: