• 仿照GPUImageMovieOutput写的只支持BGRA32的视频Buffer读取


    //
    //  YDVideoReaderOutput.m
    //  SportsBar
    //
    //  Created by mac on 2020/8/3.
    //  Copyright © 2020 yuedong. All rights reserved.
    //
    
    #import "YDVideoReaderOutput.h"
    #import "GPUImageMovieWriter.h"
    #import "GPUImageFilter.h"
    #import "GPUImageColorConversion.h"
    
    
    @interface YDVideoReaderOutput () <AVPlayerItemOutputPullDelegate>
    {
        BOOL audioEncodingIsFinished, videoEncodingIsFinished;
        GPUImageMovieWriter *synchronizedMovieWriter;
        AVAssetReader *reader;
        AVPlayerItemVideoOutput *playerItemOutput;
        CADisplayLink *displayLink;
        CMTime previousFrameTime, processingFrameTime;
        CFAbsoluteTime previousActualFrameTime;
        BOOL keepLooping;
    
        GLuint luminanceTexture, chrominanceTexture;
    
        GLProgram *yuvConversionProgram;
        GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
        GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
        GLint yuvConversionMatrixUniform;
        const GLfloat *_preferredConversion;
        
        BOOL isFullYUVRange;
    
        int imageBufferWidth, imageBufferHeight;
        
        dispatch_queue_t videoProcessingQueue;
    }
    
    - (void)processAsset;
    
    @end
    
    @implementation YDVideoReaderOutput
    
    @synthesize url = _url;
    @synthesize asset = _asset;
    @synthesize runBenchmark = _runBenchmark;
    @synthesize playAtActualSpeed = _playAtActualSpeed;
    @synthesize shouldRepeat = _shouldRepeat;
    
    #pragma mark -
    #pragma mark Initialization and teardown
    
    - (void)runYDSynchronouslyOnVideoProcessingQueue:(void (^)(void))block
    {
        if (dispatch_get_specific("runYDSynchronouslyOnVideoProcessingQueue"))
        {
            block();
        }else
        {
            dispatch_sync(videoProcessingQueue, block);
        }
    }
    /// 文件帧读取用这个(速率异常)
    - (id)initWithURL:(NSURL *)url;
    {
        if (!(self = [self init]))
        {
            return nil;
        }
    
        self.url = url;
        self.asset = nil;
    
        return self;
    }
    
    - (id)initWithAsset:(AVAsset *)asset;
    {
        if (!(self = [self init]))
        {
          return nil;
        }
    
        self.url = nil;
        self.asset = asset;
    
        return self;
    }
    /// 播放同时读取帧使用这个(速率一致)
    - (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
    {
        if (!(self = [self init]))
        {
            return nil;
        }
        self.url = nil;
        self.asset = nil;
        self.playerItem = playerItem;
    
        return self;
    }
    
    - (instancetype)init {
        self = [super init];
        if (self) {
            videoProcessingQueue = dispatch_queue_create("com.yd.GPUImage.read.video.queue", NULL);
            dispatch_queue_set_specific(videoProcessingQueue, "runYDSynchronouslyOnVideoProcessingQueue", (__bridge void *)self, NULL);
        }
        return self;
    }
    
    
    - (void)dealloc
    {
        [playerItemOutput setDelegate:nil queue:nil];
    }
    
    #pragma mark -
    #pragma mark Movie processing
    
    - (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
    {
        synchronizedMovieWriter = movieWriter;
        movieWriter.encodingLiveVideo = NO;
    }
    
    - (void)startProcessing
    {
        if( self.playerItem ) {
            [self processPlayerItem];
            return;
        }
        if(self.url == nil)
        {
          [self processAsset];
          return;
        }
        
        if (_shouldRepeat) keepLooping = YES;
        
        previousFrameTime = kCMTimeZero;
        previousActualFrameTime = CFAbsoluteTimeGetCurrent();
      
        NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
        AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
        
        YDVideoReaderOutput __block *blockSelf = self;
        
        [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
            dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
                NSError *error = nil;
                AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
                if (tracksStatus != AVKeyValueStatusLoaded)
                {
                    return;
                }
                blockSelf.asset = inputAsset;
                [blockSelf processAsset];
                blockSelf = nil;
            });
        }];
    }
    
    - (AVAssetReader*)createAssetReader
    {
        NSError *error = nil;
        AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
    
        NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
    //    if ([GPUImageContext supportsFastTextureUpload]) {
    //        [outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    //        isFullYUVRange = YES;
    //    }
    //    else {
            [outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
            isFullYUVRange = NO;
    //    }
        
        // Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
        AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
        readerVideoTrackOutput.alwaysCopiesSampleData = NO;
        [assetReader addOutput:readerVideoTrackOutput];
    
        NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
        BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
        AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
    
        if (shouldRecordAudioTrack)
        {
    #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
            [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
    #else
    #warning Missing OSX implementation
    #endif
            
            // This might need to be extended to handle movies with more than one audio track
            AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
            readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
            readerAudioTrackOutput.alwaysCopiesSampleData = NO;
            [assetReader addOutput:readerAudioTrackOutput];
        }
    
        return assetReader;
    }
    
    - (void)processAsset
    {
        reader = [self createAssetReader];
    
        AVAssetReaderOutput *readerVideoTrackOutput = nil;
        AVAssetReaderOutput *readerAudioTrackOutput = nil;
    
        audioEncodingIsFinished = YES;
        for( AVAssetReaderOutput *output in reader.outputs ) {
            if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
                audioEncodingIsFinished = NO;
                readerAudioTrackOutput = output;
            }
            else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
                readerVideoTrackOutput = output;
            }
        }
    
        if ([reader startReading] == NO)
        {
                NSLog(@"Error reading from file at URL: %@", self.url);
            return;
        }
    
        __unsafe_unretained YDVideoReaderOutput *weakSelf = self;
    
        if (synchronizedMovieWriter != nil)
        {
            [synchronizedMovieWriter setVideoInputReadyCallback:^{
                BOOL success = [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
    #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
                return success;
    #endif
            }];
    
            [synchronizedMovieWriter setAudioInputReadyCallback:^{
                BOOL success = [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
    #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
                return success;
    #endif
            }];
            
            [synchronizedMovieWriter enableSynchronizationCallbacks];
    
        }
        else
        {
            while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
            {
                    [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
    
                if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) )
                {
                        [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
                }
    
            }
    
            if (reader.status == AVAssetReaderStatusCompleted) {
                    
                [reader cancelReading];
    
                if (keepLooping) {
                    reader = nil;
                    dispatch_async(dispatch_get_main_queue(), ^{
                        [self startProcessing];
                    });
                } else {
                    [weakSelf endProcessing];
                }
    
            }
        }
    }
    
    - (void)processPlayerItem
    {
        typeof(self) __weak weakself = self;
        [self runYDSynchronouslyOnVideoProcessingQueue:^{
            typeof(weakself) __strong strongself = weakself;
            strongself->displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
            [strongself->displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
            [strongself->displayLink setPaused:YES];
    
            NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
            [pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
            strongself->playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
            [strongself->playerItemOutput setDelegate:self queue:strongself->videoProcessingQueue];
    
            [strongself->_playerItem addOutput:strongself->playerItemOutput];
            [strongself->playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
        }];
    }
    
    - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
    {
        // Restart display link.
        [displayLink setPaused:NO];
    }
    
    - (void)displayLinkCallback:(CADisplayLink *)sender
    {
    
        CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
    
        CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
    
        [self processPixelBufferAtTime:outputItemTime];
    
    }
    
    
    - (void)processPixelBufferAtTime:(CMTime)outputItemTime {
        if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) {
            __unsafe_unretained YDVideoReaderOutput *weakSelf = self;
            CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
            if( pixelBuffer )
                [self runYDSynchronouslyOnVideoProcessingQueue:^{
                    [weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
                    CFRelease(pixelBuffer);
                }];
        }
    }
    
    - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
    {
        if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished)
        {
            CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
            if (sampleBufferRef)
            {
                //NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));
                if (_playAtActualSpeed)
                {
                    // Do this outside of the video processing queue to not slow that down while waiting
                    CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
                    CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
                    CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
                    
                    CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
                    CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
                    
                    if (frameTimeDifference > actualTimeDifference)
                    {
                        usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
                    }
                    
                    previousFrameTime = currentSampleTime;
                    previousActualFrameTime = CFAbsoluteTimeGetCurrent();
                }
    
                __unsafe_unretained YDVideoReaderOutput *weakSelf = self;
                [self runYDSynchronouslyOnVideoProcessingQueue:^{
                    [weakSelf processMovieFrame:sampleBufferRef];
                    CMSampleBufferInvalidate(sampleBufferRef);
                    CFRelease(sampleBufferRef);
                }];
    
                return YES;
            }
            else
            {
                if (!keepLooping) {
                    videoEncodingIsFinished = YES;
                    if( videoEncodingIsFinished && audioEncodingIsFinished )
                        [self endProcessing];
                }
            }
        }
        else if (synchronizedMovieWriter != nil)
        {
            if (reader.status == AVAssetReaderStatusCompleted)
            {
                [self endProcessing];
            }
        }
        return NO;
    }
    
    - (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
    {
        if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
        {
            CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
            if (audioSampleBufferRef)
            {
                //NSLog(@"read an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef))));
                [self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];
                CFRelease(audioSampleBufferRef);
                return YES;
            }
            else
            {
                if (!keepLooping) {
                    audioEncodingIsFinished = YES;
                    if( videoEncodingIsFinished && audioEncodingIsFinished )
                        [self endProcessing];
                }
            }
        }
        else if (synchronizedMovieWriter != nil)
        {
            if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
                reader.status == AVAssetReaderStatusCancelled)
            {
                [self endProcessing];
            }
        }
        return NO;
    }
    
    - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
    {
        CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
        CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
        processingFrameTime = currentSampleTime;
        [self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
    }
    
    - (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
    {
        !self.didReadVideoBuffer?:self.didReadVideoBuffer(movieFrame,currentSampleTime);
    }
    
    - (float)progress
    {
        if ( AVAssetReaderStatusReading == reader.status )
        {
            float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
            float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
            return current / duration;
        }
        else if ( AVAssetReaderStatusCompleted == reader.status )
        {
            return 1.f;
        }
        else
        {
            return 0.f;
        }
    }
    
    
    
    - (void)endProcessing;
    {
        keepLooping = NO;
        [displayLink setPaused:YES];
    
        for (id<GPUImageInput> currentTarget in targets)
        {
            [currentTarget endProcessing];
        }
        
        if (synchronizedMovieWriter != nil)
        {
            [synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}];
            [synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];
        }
        
        if (self.playerItem && (displayLink != nil))
        {
            [displayLink invalidate]; // remove from all run loops
            displayLink = nil;
        }
    }
    
    - (void)cancelProcessing
    {
        if (reader) {
            [reader cancelReading];
        }
        [self endProcessing];
    }
    
    
    - (AVAssetReader*)assetReader {
        return reader;
    }
    
    - (BOOL)audioEncodingIsFinished {
        return audioEncodingIsFinished;
    }
    
    - (BOOL)videoEncodingIsFinished {
        return videoEncodingIsFinished;
    }
    
    @end

     创建一个播放器,同时增加一个输出output,这样播放playitem同时,就能通过这个output来获取视频流了

  • 相关阅读:
    ACL2016信息抽取与知识图谱相关论文掠影
    决策树之信息与熵的计算
    从零开始一个http服务器(六)-多路复用和压力测试
    从零开始一个http服务器(三)-返回response 构造
    从零开始一个http服务器(二)-请求request解析
    从零开始一个http服务器(一)-开始
    从零开始一个http服务器(五)-模拟cgi
    从零开始一个http服务器(四)-动态返回
    拼音转汉字
    24点牌 递归算法
  • 原文地址:https://www.cnblogs.com/yuxiaoyiyou/p/13431595.html
Copyright © 2020-2023  润新知