- (void)merge{
// 视频源路径
_videoPath = @"/Users/liter/Desktop/IMG_2546.m4v";
// 新视频路径
_path = @"/Users/liter/Desktop/123.m4v";
// 读
[self setupAssetReader];
// 写
[self setupAssetWriter];
// 读其他视频 的音频
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:@"/Users/liter/Desktop/IMG_2545.m4v"] options:nil];
AVAssetReader *readerTwo = [[AVAssetReader alloc] initWithAsset:asset error:nil];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
NSDictionary *dic = @{AVFormatIDKey :@(kAudioFormatLinearPCM),
AVLinearPCMIsBigEndianKey:@NO,
AVLinearPCMIsFloatKey:@NO,
AVLinearPCMBitDepthKey :@(16)
};
AVAssetReaderTrackOutput *trackOutputTwo = [[AVAssetReaderTrackOutput alloc]initWithTrack:audioTrack outputSettings:dic];
[readerTwo addOutput:trackOutputTwo];
//音频的一些配置包括音频各种这里为AAC,音频通道、采样率和音频的比特率
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100], AVSampleRateKey,
[ NSNumber numberWithInt: 128000], AVEncoderBitRateKey,
nil];
//初始化音频写入类
AVAssetWriterInput *audioInputTwo = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
//表明输入是否应该调整其处理为实时数据源的数据
audioInputTwo.expectsMediaDataInRealTime = YES;
//将音频输入源加入
[_assetWriter addInput:audioInputTwo];
// 加入 额外的 音频
[readerTwo startReading];
[_reader startReading];
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
NSLog(@"开始");
while ([_reader status] == AVAssetReaderStatusReading ) {
CMSampleBufferRef videoBuffer = [_videoReaderOutput copyNextSampleBuffer];
CMSampleBufferRef audioBuffer = [_audioReaderOutput copyNextSampleBuffer];
CMSampleBufferRef audioBufferTwo = [trackOutputTwo copyNextSampleBuffer];
// buffer 转图片
UIImage *image = [self imageFromSampleBuffer:videoBuffer];
if (image == nil) {
continue;
}
// 在这可对这一帧图片 做处理
// UIImage *newImage = [self addFiliterFrom:image filiterImage:_filiterImage];
// 转回可写入的buffer
CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[image CGImage] size:CGSizeMake(image.size.width, image.size.height)];
if (buffer)
{
while (!_videoInput.isReadyForMoreMediaData || !_audioInput.isReadyForMoreMediaData || !audioInputTwo.isReadyForMoreMediaData) {
usleep(1);
}
CMTime startTime = CMSampleBufferGetPresentationTimeStamp(videoBuffer);
if (audioBuffer) {
[_audioInput appendSampleBuffer:audioBuffer];
[audioInputTwo appendSampleBuffer:audioBufferTwo];
}
if(![_adaptor appendPixelBuffer:buffer withPresentationTime:startTime])
NSLog(@"FAIL");
CFRelease(buffer);
}
}
// [readerTwo cancelReading];
// __weak JBAssetReader *weakSelf = self;
[_assetWriter finishWritingWithCompletionHandler:^{
NSLog(@"完成");
// if (weakSelf.delegate) {
// [weakSelf.delegate videoPath:_path];
// }
}];
NSLog(@"...");
}
- (void)setupAssetReader {
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:_videoPath] options:nil];
_reader = [[AVAssetReader alloc] initWithAsset:asset error:nil];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
int m_pixelFormatType = kCVPixelFormatType_32BGRA;
NSMutableDictionary *options = [NSMutableDictionary dictionary];
[options setObject:@(m_pixelFormatType) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
_videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:options];
[_reader addOutput:_videoReaderOutput];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
NSDictionary *dic = @{AVFormatIDKey :@(kAudioFormatLinearPCM),
AVLinearPCMIsBigEndianKey:@NO,
AVLinearPCMIsFloatKey:@NO,
AVLinearPCMBitDepthKey :@(16)
};
_audioReaderOutput = [[AVAssetReaderTrackOutput alloc]initWithTrack:audioTrack outputSettings:dic];
[_reader addOutput:_audioReaderOutput];
}
- (void)setupAssetWriter {
_assetWriter = [AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:_path] fileType:AVFileTypeMPEG4 error:nil];
[[NSFileManager defaultManager] removeItemAtPath:_path error:nil];
NSParameterAssert(_assetWriter);
//录制视频的一些配置,分辨率,编码方式等等
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger: 480], AVVideoWidthKey,
[NSNumber numberWithInteger: 480], AVVideoHeightKey,
nil];
_videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
_videoInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(_videoInput);
NSParameterAssert([_assetWriter canAddInput:_videoInput]);
//将视频输入源加入
[_assetWriter addInput:_videoInput];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
_adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
//音频的一些配置包括音频各种这里为AAC,音频通道、采样率和音频的比特率
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100], AVSampleRateKey,
[ NSNumber numberWithInt: 128000], AVEncoderBitRateKey,
nil];
//初始化音频写入类
_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
//表明输入是否应该调整其处理为实时数据源的数据
_audioInput.expectsMediaDataInRealTime = YES;
//将音频输入源加入
[_assetWriter addInput:_audioInput];
}