/// The AVCaptureSession used to capture from the camera
// AVCaptureSession对象
@property(readonly, retain, nonatomic) AVCaptureSession *captureSession;
/// This enables the capture session preset to be changed on the fly
// 视频输出的质量、大小的控制参数。如:AVCaptureSessionPreset640x480
@property (readwrite, nonatomic, copy) NSString *captureSessionPreset;
/// This sets the frame rate of the camera (iOS 5 and above only)
/**
Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.
*/
// 视频的帧率
@property (readwrite) int32_t frameRate;
/// Easy way to tell which cameras are present on device
// 正在使用哪个相机 前后置
@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;
@property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;
/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
// 实时日志输出
@property(readwrite, nonatomic) BOOL runBenchmark;
/// Use this property to manage camera settings. Focus point, exposure point, etc.
// 正在使用的相机对象,方便设置参数 对焦等
@property(readonly) AVCaptureDevice *inputCamera;
/// This determines the rotation applied to the output image, based on the source material
// 输出图片的方向
@property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;
/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.
// 前置相机水平镜像
@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;
// GPUImageVideoCameraDelegate代理
@property(nonatomic, assign) id<GPUImageVideoCameraDelegate> delegate;
- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
{
if (!(self = [super init]))
{
return nil;
}
// 创建音视频处理队列 audio DISPATCH_QUEUE_PRIORITY_LOW camera DISPATCH_QUEUE_PRIORITY_HIGH
cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
// 创建信号量
frameRenderingSemaphore = dispatch_semaphore_create(1);
// 变量的初始化
_frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
_runBenchmark = NO;
capturePaused = NO;
outputRotation = kGPUImageNoRotation;
internalRotation = kGPUImageNoRotation;
captureAsYUV = YES;
_preferredConversion = kColorConversion709;
// 根据传入参数获取前后相机
// Grab the back-facing or front-facing camera
_inputCamera = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == cameraPosition)
{
_inputCamera = device;
}
}
// 获取相机失败,创建失败
if (!_inputCamera) {
return nil;
}
// Create the capture session
_captureSession = [[AVCaptureSession alloc] init];
[_captureSession beginConfiguration];
// 创建video输入对象
// Add the video input
NSError *error = nil;
videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
if ([_captureSession canAddInput:videoInput])
{
[_captureSession addInput:videoInput];
}
// Add the video frame output
// 创建video输出对象
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setAlwaysDiscardsLateVideoFrames:NO];
// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
// 设置YUV的处理方式
if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
{
BOOL supportsFullYUVRange = NO;
NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
for (NSNumber *currentPixelFormat in supportedPixelFormats)
{
if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
{
supportsFullYUVRange = YES;
}
}
//支持 full YUV 格式
if (supportsFullYUVRange)
{
// 设置kCVPixelFormatType_420YpCbCr8BiPlanarFullRange格式
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
isFullYUVRange = YES;
}
else
{
// 设置kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange格式
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
isFullYUVRange = NO;
}
}
else
{
// 设置kCVPixelFormatType_32BGRA格式
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}
// 创建GL程序、获取属性位置
runSynchronouslyOnVideoProcessingQueue(^{
if (captureAsYUV)
{
[GPUImageContext useImageProcessingContext];
// if ([GPUImageContext deviceSupportsRedTextures])
// {
// yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
// }
// else
// {
if (isFullYUVRange)
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
}
else
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
}
// }
if (!yuvConversionProgram.initialized)
{
[yuvConversionProgram addAttribute:@"position"];
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
if (![yuvConversionProgram link])
{
NSString *progLog = [yuvConversionProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
yuvConversionProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
glEnableVertexAttribArray(yuvConversionPositionAttribute);
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
}
});
// 设置AVCaptureVideoDataOutputSampleBufferDelegate代理
[videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
// 添加输出
if ([_captureSession canAddOutput:videoOutput])
{
[_captureSession addOutput:videoOutput];
}
else
{
NSLog(@"Couldn't add video output");
return nil;
}
// 设置视频质量
_captureSessionPreset = sessionPreset;
[_captureSession setSessionPreset:_captureSessionPreset];
// This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset
// AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
//
// if (conn.supportsVideoMinFrameDuration)
// conn.videoMinFrameDuration = CMTimeMake(1,60);
// if (conn.supportsVideoMaxFrameDuration)
// conn.videoMaxFrameDuration = CMTimeMake(1,60);
// 提交配置
[_captureSession commitConfiguration];
return self;
}
//添加、移除音频输入输出
/** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you
can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget
later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.
*/
- (BOOL)addAudioInputsAndOutputs;
/** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs
were removed, or NO is they hadn't already been added.
*/
- (BOOL)removeAudioInputsAndOutputs;
//移除所有输入输出
/** Tear down the capture session
*/
- (void)removeInputsAndOutputs;
/// @name Manage the camera video stream
/** Start camera capturing 开始相机捕获
*/
- (void)startCameraCapture;
/** Stop camera capturing 关闭相机捕获
*/
- (void)stopCameraCapture;
/** Pause camera capturing 暂停相机捕获
*/
- (void)pauseCameraCapture;
/** Resume camera capturing 恢复相机捕获
*/
- (void)resumeCameraCapture;
// 处理视频数据
/** Process a video sample
@param sampleBuffer Buffer to process
*/
- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
// 处理音频数据
/** Process an audio sample
@param sampleBuffer Buffer to process
*/
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
// 获取相机相关参数
/** Get the position (front, rear) of the source camera
*/
- (AVCaptureDevicePosition)cameraPosition;
/** Get the AVCaptureConnection of the source camera
*/
- (AVCaptureConnection *)videoCaptureConnection;
// 变换相机
/** This flips between the front and rear cameras
*/
- (void)rotateCamera;
// 获取平均帧率
/// @name Benchmarking
/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display
*/
- (CGFloat)averageFrameDurationDuringCapture;
// 重置相关基准
- (void)resetBenchmarkAverage;
//当前是前后置判断
+ (BOOL)isBackFacingCameraPresent;
+ (BOOL)isFrontFacingCameraPresent;