GPUImage源码解读(十三)
GPUImageMovieWriter 方法不是很多,但是方法都比较长,内部处理也相对比较复杂。这里只给出了常见的方法。如果需要录制视频,可以仔细阅读
//// 初始话音频参数,如编码格式、声道数、采样率、码率
- (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;
{
_hasAudioTrack = newValue;
if (_hasAudioTrack)
{
if (_shouldPassthroughAudio)
{
// Do not set any settings so audio will be the same as passthrough
audioOutputSettings = nil;
}
else if (audioOutputSettings == nil)
{
AVAudioSession *sharedAudioSession = [AVAudioSession sharedInstance];
double preferredHardwareSampleRate;
if ([sharedAudioSession respondsToSelector:@selector(sampleRate)])
{
preferredHardwareSampleRate = [sharedAudioSession sampleRate];
}
else
{
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];
#pragma clang diagnostic pop
}
AudioChannelLayout acl; //初始化音频通道
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; //设置为单通道模式
//配置音频参数
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
nil];
/*
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];*/
}
self.assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[assetWriter addInput:assetWriterAudioInput];
assetWriterAudioInput.expectsMediaDataInRealTime = NO;
}
else
{
// Remove audio track if it exists
}
}
视频设置方法
//视频输入源 setting
- (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings;
{
isRecording = NO;
self.enabled = YES;
NSError *error = nil;
self.assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];
if (error != nil)
{
NSLog(@"Error: %@", error);
if (failureBlock)
{
failureBlock(error);
}
else
{
if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])
{
[self.delegate movieRecordingFailedWithError:error];
}
}
}
// Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.
//默认值为10S
// _captureMovieFileOutput.movieFragmentInterval = kCMTimeInvalid
assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);
// use default output settings if none specified
//设置输出视频的宽高
if (outputSettings == nil)
{
NSMutableDictionary *settings = [[NSMutableDictionary alloc] init];
[settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];
[settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];
[settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];
outputSettings = settings;
}
// custom output settings specified
else
{
NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];
NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];
NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];
NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters.");
if( [outputSettings objectForKey:@"EncodingLiveVideo"] ) {
NSMutableDictionary *tmp = [outputSettings mutableCopy];
[tmp removeObjectForKey:@"EncodingLiveVideo"];
outputSettings = tmp;
}
}
/*
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,
[NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
[compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];
[compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];
[compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];
[compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];
[compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];
[outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];
*/
float bitRate = 87500.0f * 8.0f;
int frameInterval = 24;
NSDictionary *compressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat:bitRate], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:frameInterval*2], AVVideoMaxKeyFrameIntervalKey,
nil];
NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
[NSNumber numberWithInteger:videoSize.width], AVVideoWidthKey,
[NSNumber numberWithInteger:videoSize.height], AVVideoHeightKey, // square format
compressionSettings, AVVideoCompressionPropertiesKey,
nil];
self.assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];
assetWriterVideoInput.expectsMediaDataInRealTime = YES;
// You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
nil];
// NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
// nil];
assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
//添加视频输入源
[assetWriter addInput:assetWriterVideoInput];
}
//处理音频
- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
{
if (!isRecording)
{
return;
}
@autoreleasepool {
// if (_hasAudioTrack && CMTIME_IS_VALID(startTime))
// 有音频数据才处理
if (_hasAudioTrack)
{
CFRetain(audioBuffer);
//当前时间戳
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);
// if (CMTIME_IS_INVALID(startTime))
// {
// runSynchronouslyOnContextQueue(_movieWriterContext, ^{
// if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
// {
// [assetWriter startWriting];
// }
// [assetWriter startSessionAtSourceTime:currentSampleTime];
// startTime = currentSampleTime;
// });
// }
//音频输入不准备接受更多的媒体数据
if (!assetWriterAudioInput.readyForMoreMediaData && _encodingLiveVideo)
{
NSLog(@"1: Had to drop an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
if (_shouldInvalidateAudioSampleWhenDone)
{
CMSampleBufferInvalidate(audioBuffer);
}
CFRelease(audioBuffer);
return;
}
previousAudioTime = currentSampleTime;
//if the consumer wants to do something with the audio samples before writing, let him.
//回调音频数据
if (self.audioProcessingCallback) {
//need to introspect into the opaque CMBlockBuffer structure to find its raw sample buffers.
CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(audioBuffer);
CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(audioBuffer);
AudioBufferList audioBufferList;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(audioBuffer,
NULL,
&audioBufferList,
sizeof(audioBufferList),
NULL,
NULL,
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
&buffer
);
//passing a live pointer to the audio buffers, try to process them in-place or we might have syncing issues.
for (int bufferCount=0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) {
SInt16 *samples = (SInt16 *)audioBufferList.mBuffers[bufferCount].mData;
self.audioProcessingCallback(&samples, numSamplesInBuffer);
}
}
// NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);
// 写入音频block
void(^write)() = ^() {
// while( ! assetWriterAudioInput.readyForMoreMediaData && ! _encodingLiveVideo && ! audioEncodingIsFinished ) {
// NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.5];
// NSLog(@"audio waiting...");
// [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
// }
if (!assetWriterAudioInput.readyForMoreMediaData)
{
NSLog(@"2: Had to drop an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
}
else if(assetWriter.status == AVAssetWriterStatusWriting)
{
if (![assetWriterAudioInput appendSampleBuffer:audioBuffer])
NSLog(@"Problem appending audio buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
}
else
{
//NSLog(@"Wrote an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
}
// 标记不被使用
if (_shouldInvalidateAudioSampleWhenDone)
{
CMSampleBufferInvalidate(audioBuffer);
}
CFRelease(audioBuffer);
};
// runAsynchronouslyOnContextQueue(_movieWriterContext, write);
if( _encodingLiveVideo )
{
runAsynchronouslyOnContextQueue(_movieWriterContext, write);
}
else
{
// 否则,直接写入
write();
}
}
}
}
Last updated