#import "PGVideoMovie.h" #import "GPUImageFilter.h" GLfloat kColorConversion601Default[] = { 1.164, 1.164, 1.164, 0.0, -0.392, 2.017, 1.596, -0.813, 0.0, }; GLfloat kColorConversion601FullRangeDefault[] = { 1.0, 1.0, 1.0, 0.0, -0.343, 1.765, 1.4, -0.711, 0.0, }; GLfloat kColorConversion709Default[] = { 1, 1, 1, 0, -.21482, 2.12798, 1.28033, -.38059, 0, }; GLfloat *kColorConversion601 = kColorConversion601Default; GLfloat *kColorConversion601FullRange = kColorConversion601FullRangeDefault; GLfloat *kColorConversion709 = kColorConversion709Default; NSString *const kYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING ( varying highp vec2 texCoord; uniform sampler2D luminanceTexture; uniform sampler2D chrominanceTexture; uniform mediump mat3 colorConversionMatrix; void main() { mediump vec3 yuv; lowp vec3 rgb; yuv.x = texture2D(luminanceTexture, texCoord).r; yuv.yz = texture2D(chrominanceTexture, texCoord).rg - vec2(0.5, 0.5); rgb = colorConversionMatrix * yuv; gl_FragColor = vec4(rgb, 1); } ); NSString *const kYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING ( varying highp vec2 texCoord; uniform sampler2D luminanceTexture; uniform sampler2D chrominanceTexture; uniform mediump mat3 colorConversionMatrix; void main() { mediump vec3 yuv; lowp vec3 rgb; yuv.x = texture2D(luminanceTexture, texCoord).r; yuv.yz = texture2D(chrominanceTexture, texCoord).ra - vec2(0.5, 0.5); rgb = colorConversionMatrix * yuv; gl_FragColor = vec4(rgb, 1); } ); NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING ( varying highp vec2 texCoord; uniform sampler2D luminanceTexture; uniform sampler2D chrominanceTexture; uniform mediump mat3 colorConversionMatrix; void main() { mediump vec3 yuv; lowp vec3 rgb; yuv.x = texture2D(luminanceTexture, texCoord).r - (16.0/255.0); yuv.yz = texture2D(chrominanceTexture, texCoord).ra - vec2(0.5, 0.5); rgb = colorConversionMatrix * yuv; gl_FragColor = vec4(rgb, 1); } ); @interface PGVideoMovie () { AVAssetReader *reader; AVPlayerItemVideoOutput *playerItemOutput; CADisplayLink *displayLink; CMTime previousFrameTime, processingFrameTime; CFAbsoluteTime previousActualFrameTime; BOOL keepLooping; GLuint luminanceTexture, chrominanceTexture; GLProgram *yuvConversionProgram; GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute; GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform; GLint yuvConversionMatrixUniform; const GLfloat *_preferredConversion; BOOL isFullYUVRange; int imageBufferWidth, imageBufferHeight; } - (void)processAsset; @end @implementation PGVideoMovie { bool videoEncodingIsFinished; } @synthesize asset = _asset; @synthesize shouldRepeat = _shouldRepeat; #pragma mark - #pragma mark Initialization and teardown - (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset { AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; CGSize size = [videoTrack naturalSize]; CGAffineTransform txf = [videoTrack preferredTransform]; if (size.width == txf.tx && size.height == txf.ty) return UIInterfaceOrientationLandscapeRight; else if (txf.tx == 0 && txf.ty == 0) return UIInterfaceOrientationLandscapeLeft; else if (txf.tx == 0 && txf.ty == size.width) return UIInterfaceOrientationPortraitUpsideDown; else return UIInterfaceOrientationPortrait; } - (instancetype)initWithAsset:(AVAsset *)asset { if (!(self = [super init])) { return nil; } [self yuvConversionSetup]; self.asset = asset; return self; } - (instancetype)initWithPlayerItem:(AVPlayerItem *)playerItem; { if (!(self = [super init])) { return nil; } [self yuvConversionSetup]; self.playerItem = playerItem; return self; } - (void)yuvConversionSetup { if ([GPUImageContext supportsFastTextureUpload]) { runSynchronouslyOnVideoProcessingQueue(^{ [GPUImageContext useImageProcessingContext]; _preferredConversion = kColorConversion709; isFullYUVRange = YES; yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kYUVVideoRangeConversionForRGFragmentShaderString]; if (!yuvConversionProgram.initialized) { [yuvConversionProgram addAttribute:@"position"]; [yuvConversionProgram addAttribute:@"inputTextureCoordinate"]; if (![yuvConversionProgram link]) { NSString *progLog = [yuvConversionProgram programLog]; NSLog(@"Program link log: %@", progLog); NSString *fragLog = [yuvConversionProgram fragmentShaderLog]; NSLog(@"Fragment shader compile log: %@", fragLog); NSString *vertLog = [yuvConversionProgram vertexShaderLog]; NSLog(@"Vertex shader compile log: %@", vertLog); yuvConversionProgram = nil; NSAssert(NO, @"Filter shader link failed"); } } yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"]; yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"]; yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"]; yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"]; yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"]; [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; glEnableVertexAttribArray(yuvConversionPositionAttribute); glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute); }); } } - (void)dealloc { [playerItemOutput setDelegate:nil queue:nil]; if (self.playerItem && (displayLink != nil)) { [displayLink invalidate]; displayLink = nil; } } #pragma mark - #pragma mark Movie processing //- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter; //{ // synchronizedMovieWriter = movieWriter; // movieWriter.encodingLiveVideo = NO; //} - (void)startProcessing { if (_shouldRepeat) self->keepLooping = true; if (self.playerItem != nil) { [self processPlayerItem]; } else { [self processAsset]; } } - (AVAssetReader*)createAssetReader { NSError *error = nil; AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error]; NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary]; if ([GPUImageContext supportsFastTextureUpload]) { [outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey]; isFullYUVRange = YES; } else { [outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey]; isFullYUVRange = NO; } AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings]; readerVideoTrackOutput.alwaysCopiesSampleData = false; [assetReader addOutput:readerVideoTrackOutput]; return assetReader; } - (void)processAsset { reader = [self createAssetReader]; AVAssetReaderOutput *readerVideoTrackOutput = nil; for (AVAssetReaderOutput *output in reader.outputs) { if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) { readerVideoTrackOutput = output; } } if (![reader startReading]) { return; } __unsafe_unretained PGVideoMovie *weakSelf = self; while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping)) { [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput]; } if (reader.status == AVAssetReaderStatusCompleted) { [reader cancelReading]; if (keepLooping) { reader = nil; [self startProcessing]; } else { [weakSelf endProcessing]; } } } - (void)processPlayerItem { dispatch_sync(dispatch_get_main_queue(), ^{ displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)]; [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; [displayLink setPaused:true]; }); runSynchronouslyOnVideoProcessingQueue(^{ dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue]; NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary]; if ([GPUImageContext supportsFastTextureUpload]) { [pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey]; } else { [pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey]; } playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes]; [playerItemOutput setDelegate:self queue:videoProcessingQueue]; [_playerItem addOutput:playerItemOutput]; [playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1]; }); } - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender { [displayLink setPaused:false]; } - (void)displayLinkCallback:(CADisplayLink *)sender { CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]); CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync]; [self processPixelBufferAtTime:outputItemTime]; } - (void)processPixelBufferAtTime:(CMTime)outputItemTime { if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) { __unsafe_unretained PGVideoMovie *weakSelf = self; CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL]; if (pixelBuffer != NULL) { runSynchronouslyOnVideoProcessingQueue(^{ [weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime]; CFRelease(pixelBuffer); }); } } } - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput; { if (reader.status == AVAssetReaderStatusReading && !videoEncodingIsFinished) { CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer]; if (sampleBufferRef) { CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef); CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime); CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent(); CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame); CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime; if (frameTimeDifference > actualTimeDifference) { usleep(1000000.0 * (frameTimeDifference - actualTimeDifference)); } previousFrameTime = currentSampleTime; previousActualFrameTime = CFAbsoluteTimeGetCurrent(); __unsafe_unretained PGVideoMovie *weakSelf = self; runSynchronouslyOnVideoProcessingQueue(^{ [weakSelf processMovieFrame:sampleBufferRef]; CMSampleBufferInvalidate(sampleBufferRef); CFRelease(sampleBufferRef); }); return true; } else { if (!keepLooping) { videoEncodingIsFinished = true; if (videoEncodingIsFinished) [self endProcessing]; } } } return false; } - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer; { CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer); CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer); processingFrameTime = currentSampleTime; [self processMovieFrame:movieFrame withSampleTime:currentSampleTime]; } - (CGFloat)progress { if (AVAssetReaderStatusReading == reader.status) { float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale; float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale; return current / duration; } else if ( AVAssetReaderStatusCompleted == reader.status ) { return 1.f; } else { return 0.f; } } - (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime { int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame); int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame); CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL); if (colorAttachments != NULL) { if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) { if (isFullYUVRange) { _preferredConversion = kColorConversion601FullRange; } else { _preferredConversion = kColorConversion601; } } else { _preferredConversion = kColorConversion709; } } else { if (isFullYUVRange) { _preferredConversion = kColorConversion601FullRange; } else { _preferredConversion = kColorConversion601; } } [GPUImageContext useImageProcessingContext]; if ([GPUImageContext supportsFastTextureUpload]) { CVOpenGLESTextureRef luminanceTextureRef = NULL; CVOpenGLESTextureRef chrominanceTextureRef = NULL; if (CVPixelBufferGetPlaneCount(movieFrame) > 0) { CVPixelBufferLockBaseAddress(movieFrame,0); if ((imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight)) { imageBufferWidth = bufferWidth; imageBufferHeight = bufferHeight; } CVReturn err; // Y-plane glActiveTexture(GL_TEXTURE4); if ([GPUImageContext deviceSupportsRedTextures]) { err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); } else { err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); } if (err) { NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); } luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef); glBindTexture(GL_TEXTURE_2D, luminanceTexture); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); // UV-plane glActiveTexture(GL_TEXTURE5); if ([GPUImageContext deviceSupportsRedTextures]) { err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); } else { err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); } if (err) { NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); } chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef); glBindTexture(GL_TEXTURE_2D, chrominanceTexture); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); [self convertYUVToRGBOutput]; for (id currentTarget in targets) { NSInteger indexOfObject = [targets indexOfObject:currentTarget]; NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex]; [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex]; AVAsset *asset = self.asset; if (asset == nil) { asset = self.playerItem.asset; } if (asset != nil) { UIInterfaceOrientation orientation = [self orientationForTrack:asset]; if (orientation == UIInterfaceOrientationPortrait) { [currentTarget setInputRotation:kGPUImageRotateRight atIndex:targetTextureIndex]; } else if (orientation == UIInterfaceOrientationLandscapeRight) { [currentTarget setInputRotation:kGPUImageRotate180 atIndex:targetTextureIndex]; } else if (orientation == UIInterfaceOrientationPortraitUpsideDown) { [currentTarget setInputRotation:kGPUImageRotateLeft atIndex:targetTextureIndex]; } } } [outputFramebuffer unlock]; for (id currentTarget in targets) { NSInteger indexOfObject = [targets indexOfObject:currentTarget]; NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex]; } CVPixelBufferUnlockBaseAddress(movieFrame, 0); CFRelease(luminanceTextureRef); CFRelease(chrominanceTextureRef); } } else { CVPixelBufferLockBaseAddress(movieFrame, 0); outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES]; glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]); glTexImage2D(GL_TEXTURE_2D, 0, self.outputTextureOptions.internalFormat, bufferWidth, bufferHeight, 0, self.outputTextureOptions.format, self.outputTextureOptions.type, CVPixelBufferGetBaseAddress(movieFrame)); for (id currentTarget in targets) { NSInteger indexOfObject = [targets indexOfObject:currentTarget]; NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex]; [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex]; } [outputFramebuffer unlock]; for (id currentTarget in targets) { NSInteger indexOfObject = [targets indexOfObject:currentTarget]; NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex]; } CVPixelBufferUnlockBaseAddress(movieFrame, 0); } } - (void)endProcessing { keepLooping = NO; [displayLink setPaused:YES]; for (id currentTarget in targets) { [currentTarget endProcessing]; } if (displayLink != nil) { [displayLink invalidate]; displayLink = nil; } } - (void)cancelProcessing { if (reader) { [reader cancelReading]; } [self endProcessing]; } - (void)convertYUVToRGBOutput; { [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO]; [outputFramebuffer activateFramebuffer]; glClearColor(0.0f, 0.0f, 0.0f, 1.0f); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); static const GLfloat squareVertices[] = { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, }; static const GLfloat textureCoordinates[] = { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, }; glActiveTexture(GL_TEXTURE4); glBindTexture(GL_TEXTURE_2D, luminanceTexture); glUniform1i(yuvConversionLuminanceTextureUniform, 4); glActiveTexture(GL_TEXTURE5); glBindTexture(GL_TEXTURE_2D, chrominanceTexture); glUniform1i(yuvConversionChrominanceTextureUniform, 5); glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion); glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); } - (AVAssetReader*)assetReader { return reader; } - (BOOL)videoEncodingIsFinished { return videoEncodingIsFinished; } @end