GPUImage源码解读(十五)

GPUImageRawDataOutput

GPUImageRawDataOutput实现了GPUImageInput协议,它可以将输入的帧缓存转换为原始数据。

  • 构造方法。构造方法最主要的任务是构造GL程序。

    ```

  • (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;

    ```

  • 初始化的时候需要指定纹理大小以及是否以BGRA形式的数据输入。如果是以BGRA的形式输入,则在选择片段着色器的时候会选择kGPUImageColorSwizzlingFragmentShaderString着色器来进行从BGRA-到RGBA的转换。

- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
{
    if (!(self = [super init]))
    {
        return nil;
    }

    self.enabled = YES;
    lockNextFramebuffer = NO;
    outputBGRA = resultsInBGRAFormat;
    imageSize = newImageSize;
    hasReadFromTheCurrentFrame = NO;
    _rawBytesForImage = NULL;
    inputRotation = kGPUImageNoRotation;

    [GPUImageContext useImageProcessingContext];
     // 如果使用了BGRA ,则选择kGPUImageColorSwizzlingFragmentShaderString着色器
    if ( (outputBGRA && ![GPUImageContext supportsFastTextureUpload]) || (!outputBGRA && [GPUImageContext supportsFastTextureUpload]) )
    {
        dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
    }
     // 否则选用kGPUImagePassthroughFragmentShaderString着色器
    else
    {
        dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
    }

    if (!dataProgram.initialized)
    {
        [dataProgram addAttribute:@"position"];
        [dataProgram addAttribute:@"inputTextureCoordinate"];

        if (![dataProgram link])
        {
            NSString *progLog = [dataProgram programLog];
            NSLog(@"Program link log: %@", progLog);
            NSString *fragLog = [dataProgram fragmentShaderLog];
            NSLog(@"Fragment shader compile log: %@", fragLog);
            NSString *vertLog = [dataProgram vertexShaderLog];
            NSLog(@"Vertex shader compile log: %@", vertLog);
            dataProgram = nil;
            NSAssert(NO, @"Filter shader link failed");
        }
    }
     // 获取统一变量和属性
    dataPositionAttribute = [dataProgram attributeIndex:@"position"];
    dataTextureCoordinateAttribute = [dataProgram attributeIndex:@"inputTextureCoordinate"];
    dataInputTextureUniform = [dataProgram uniformIndex:@"inputImageTexture"];

    return self;
}
  • 其他方法

    ```

    // Data access

    // 获取特定位置的像素向量

  • (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;

    // 每行数据大小

  • (NSUInteger)bytesPerRowInOutput;

    // 设置纹理大小

  • (void)setImageSize:(CGSize)newImageSize;

    // 锁定、与解锁帧缓存

  • (void)lockFramebufferForReading;

  • (void)unlockFramebufferAfterReading;

    ```

  • 方法实现如下:

    ``` // 获取特定位置的像素向量

  • (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;

    {

    // 将数据转为GPUByteColorVector类型

    GPUByteColorVector imageColorBytes = (GPUByteColorVector )self.rawBytesForImage;

    // NSLog(@"Row start");

    // for (unsigned int currentXPosition = 0; currentXPosition < (imageSize.width * 2.0); currentXPosition++)

    // {

    // GPUByteColorVector byteAtPosition = imageColorBytes[currentXPosition];

    // NSLog(@"%d - %d, %d, %d", currentXPosition, byteAtPosition.red, byteAtPosition.green, byteAtPosition.blue);

    // }

    // NSLog(@"Row end");

// GPUByteColorVector byteAtOne = imageColorBytes[1]; // GPUByteColorVector byteAtWidth = imageColorBytes[(int)imageSize.width - 3]; // GPUByteColorVector byteAtHeight = imageColorBytes[(int)(imageSize.height - 1) * (int)imageSize.width]; // NSLog(@"Byte 1: %d, %d, %d, byte 2: %d, %d, %d, byte 3: %d, %d, %d", byteAtOne.red, byteAtOne.green, byteAtOne.blue, byteAtWidth.red, byteAtWidth.green, byteAtWidth.blue, byteAtHeight.red, byteAtHeight.green, byteAtHeight.blue);

// 控制边界,0 < x < width, 0 < y < height,
CGPoint locationToPickFrom = CGPointZero;
locationToPickFrom.x = MIN(MAX(locationInImage.x, 0.0), (imageSize.width - 1.0));
locationToPickFrom.y = MIN(MAX((imageSize.height - locationInImage.y), 0.0), (imageSize.height - 1.0));

// 如果是BGRA输出,则把RGBA数据转为BGRA数据
if (outputBGRA)    
{
    GPUByteColorVector flippedColor = imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];
    GLubyte temporaryRed = flippedColor.red;

    flippedColor.red = flippedColor.blue;
    flippedColor.blue = temporaryRed;

    return flippedColor;
}
else
{
    // 返回某个位置的像素向量
    return imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];
}

}

// 每行数据大小

  • (NSUInteger)bytesPerRowInOutput;

    {

    return [retainedFramebuffer bytesPerRow];

    }

// 设置输出纹理大小

  • (void)setImageSize:(CGSize)newImageSize {

    imageSize = newImageSize;

    if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload]))

    {

      free(_rawBytesForImage);
      _rawBytesForImage = NULL;

    }

    }

// 锁定帧缓存

  • (void)lockFramebufferForReading;

    {

    lockNextFramebuffer = YES;

    }

// 解锁帧缓存

  • (void)unlockFramebufferAfterReading;

    {

    [retainedFramebuffer unlockAfterReading];

    [retainedFramebuffer unlock];

    retainedFramebuffer = nil;

    }

// 获取RGBA数据

  • (GLubyte )rawBytesForImage; { if ( (_rawBytesForImage == NULL) && (![GPUImageContext supportsFastTextureUpload]) ) { // 申请空间,储存读取的数据 _rawBytesForImage = (GLubyte ) calloc(imageSize.width imageSize.height 4, sizeof(GLubyte)); hasReadFromTheCurrentFrame = NO; }

    if (hasReadFromTheCurrentFrame) { return _rawBytesForImage; } else { runSynchronouslyOnVideoProcessingQueue(^{ // Note: the fast texture caches speed up 640x480 frame reads from 9.6 ms to 3.1 ms on iPhone 4S // 设置GL下文对象 [GPUImageContext useImageProcessingContext]; // 渲染到帧缓存 [self renderAtInternalSize];

          if ([GPUImageContext supportsFastTextureUpload])
          {
              // 等待绘制结束
              glFinish();
              _rawBytesForImage = [outputFramebuffer byteBuffer];
          }
          else
          {
              // 以RGBA的形式读取数据
              glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, _rawBytesForImage);
              // GL_EXT_read_format_bgra
              //            glReadPixels(0, 0, imageSize.width, imageSize.height, GL_BGRA_EXT, GL_UNSIGNED_BYTE, _rawBytesForImage);
          }
    
          hasReadFromTheCurrentFrame = YES;
    
      });
    
      return _rawBytesForImage;

    } }

    ```

Last updated