mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-07-27 21:50:50 +00:00
683 lines
23 KiB
Objective-C
Executable File
683 lines
23 KiB
Objective-C
Executable File
#import "PGVideoMovie.h"
|
|
#import "GPUImageFilter.h"
|
|
|
|
GLfloat kColorConversion601Default[] = {
|
|
1.164, 1.164, 1.164,
|
|
0.0, -0.392, 2.017,
|
|
1.596, -0.813, 0.0,
|
|
};
|
|
|
|
GLfloat kColorConversion601FullRangeDefault[] = {
|
|
1.0, 1.0, 1.0,
|
|
0.0, -0.343, 1.765,
|
|
1.4, -0.711, 0.0,
|
|
};
|
|
|
|
GLfloat kColorConversion709Default[] = {
|
|
1.164, 1.164, 1.164,
|
|
0.0, -0.213, 2.112,
|
|
1.793, -0.533, 0.0,
|
|
};
|
|
|
|
GLfloat *kColorConversion601 = kColorConversion601Default;
|
|
GLfloat *kColorConversion601FullRange = kColorConversion601FullRangeDefault;
|
|
GLfloat *kColorConversion709 = kColorConversion709Default;
|
|
|
|
NSString *const kYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
|
|
(
|
|
varying highp vec2 texCoord;
|
|
|
|
uniform sampler2D luminanceTexture;
|
|
uniform sampler2D chrominanceTexture;
|
|
uniform mediump mat3 colorConversionMatrix;
|
|
|
|
void main()
|
|
{
|
|
mediump vec3 yuv;
|
|
lowp vec3 rgb;
|
|
|
|
yuv.x = texture2D(luminanceTexture, texCoord).r;
|
|
yuv.yz = texture2D(chrominanceTexture, texCoord).rg - vec2(0.5, 0.5);
|
|
rgb = colorConversionMatrix * yuv;
|
|
|
|
gl_FragColor = vec4(rgb, 1);
|
|
}
|
|
);
|
|
|
|
NSString *const kYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
|
|
(
|
|
varying highp vec2 texCoord;
|
|
|
|
uniform sampler2D luminanceTexture;
|
|
uniform sampler2D chrominanceTexture;
|
|
uniform mediump mat3 colorConversionMatrix;
|
|
|
|
void main()
|
|
{
|
|
mediump vec3 yuv;
|
|
lowp vec3 rgb;
|
|
|
|
yuv.x = texture2D(luminanceTexture, texCoord).r;
|
|
yuv.yz = texture2D(chrominanceTexture, texCoord).ra - vec2(0.5, 0.5);
|
|
rgb = colorConversionMatrix * yuv;
|
|
|
|
gl_FragColor = vec4(rgb, 1);
|
|
}
|
|
);
|
|
|
|
|
|
NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
|
|
(
|
|
varying highp vec2 texCoord;
|
|
|
|
uniform sampler2D luminanceTexture;
|
|
uniform sampler2D chrominanceTexture;
|
|
uniform mediump mat3 colorConversionMatrix;
|
|
|
|
void main()
|
|
{
|
|
mediump vec3 yuv;
|
|
lowp vec3 rgb;
|
|
|
|
yuv.x = texture2D(luminanceTexture, texCoord).r - (16.0/255.0);
|
|
yuv.yz = texture2D(chrominanceTexture, texCoord).ra - vec2(0.5, 0.5);
|
|
rgb = colorConversionMatrix * yuv;
|
|
|
|
gl_FragColor = vec4(rgb, 1);
|
|
}
|
|
);
|
|
|
|
@interface PGVideoMovie () <AVPlayerItemOutputPullDelegate>
|
|
{
|
|
BOOL audioEncodingIsFinished, videoEncodingIsFinished;
|
|
// GPUImageMovieWriter *synchronizedMovieWriter;
|
|
AVAssetReader *reader;
|
|
AVPlayerItemVideoOutput *playerItemOutput;
|
|
CADisplayLink *displayLink;
|
|
CMTime previousFrameTime, processingFrameTime;
|
|
CFAbsoluteTime previousActualFrameTime;
|
|
BOOL keepLooping;
|
|
|
|
GLuint luminanceTexture, chrominanceTexture;
|
|
|
|
GLProgram *yuvConversionProgram;
|
|
GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
|
|
GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
|
|
GLint yuvConversionMatrixUniform;
|
|
const GLfloat *_preferredConversion;
|
|
|
|
BOOL isFullYUVRange;
|
|
|
|
int imageBufferWidth, imageBufferHeight;
|
|
}
|
|
|
|
- (void)processAsset;
|
|
|
|
@end
|
|
|
|
@implementation PGVideoMovie
|
|
|
|
@synthesize asset = _asset;
|
|
@synthesize shouldRepeat = _shouldRepeat;
|
|
|
|
#pragma mark -
|
|
#pragma mark Initialization and teardown
|
|
|
|
|
|
- (instancetype)initWithAsset:(AVAsset *)asset;
|
|
{
|
|
if (!(self = [super init]))
|
|
{
|
|
return nil;
|
|
}
|
|
|
|
[self yuvConversionSetup];
|
|
|
|
self.asset = asset;
|
|
|
|
return self;
|
|
}
|
|
|
|
- (void)yuvConversionSetup;
|
|
{
|
|
if ([GPUImageContext supportsFastTextureUpload])
|
|
{
|
|
runSynchronouslyOnVideoProcessingQueue(^{
|
|
[GPUImageContext useImageProcessingContext];
|
|
|
|
_preferredConversion = kColorConversion709;
|
|
isFullYUVRange = YES;
|
|
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kYUVFullRangeConversionForLAFragmentShaderString];
|
|
|
|
if (!yuvConversionProgram.initialized)
|
|
{
|
|
[yuvConversionProgram addAttribute:@"position"];
|
|
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
|
|
|
|
if (![yuvConversionProgram link])
|
|
{
|
|
NSString *progLog = [yuvConversionProgram programLog];
|
|
NSLog(@"Program link log: %@", progLog);
|
|
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
|
|
NSLog(@"Fragment shader compile log: %@", fragLog);
|
|
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
|
|
NSLog(@"Vertex shader compile log: %@", vertLog);
|
|
yuvConversionProgram = nil;
|
|
NSAssert(NO, @"Filter shader link failed");
|
|
}
|
|
}
|
|
|
|
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
|
|
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
|
|
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
|
|
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
|
|
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
|
|
|
|
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
|
|
|
|
glEnableVertexAttribArray(yuvConversionPositionAttribute);
|
|
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
|
|
});
|
|
}
|
|
}
|
|
|
|
- (void)dealloc
|
|
{
|
|
[playerItemOutput setDelegate:nil queue:nil];
|
|
|
|
// Moved into endProcessing
|
|
//if (self.playerItem && (displayLink != nil))
|
|
//{
|
|
// [displayLink invalidate]; // remove from all run loops
|
|
// displayLink = nil;
|
|
//}
|
|
}
|
|
|
|
#pragma mark -
|
|
#pragma mark Movie processing
|
|
|
|
//- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
|
|
//{
|
|
// synchronizedMovieWriter = movieWriter;
|
|
// movieWriter.encodingLiveVideo = NO;
|
|
//}
|
|
|
|
- (void)startProcessing
|
|
{
|
|
if (_shouldRepeat) self->keepLooping = true;
|
|
|
|
[self processAsset];
|
|
}
|
|
|
|
- (AVAssetReader*)createAssetReader
|
|
{
|
|
NSError *error = nil;
|
|
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
|
|
|
|
NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
|
|
if ([GPUImageContext supportsFastTextureUpload]) {
|
|
[outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
|
|
isFullYUVRange = YES;
|
|
}
|
|
else {
|
|
[outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
|
|
isFullYUVRange = NO;
|
|
}
|
|
|
|
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
|
|
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
|
|
readerVideoTrackOutput.alwaysCopiesSampleData = NO;
|
|
[assetReader addOutput:readerVideoTrackOutput];
|
|
|
|
// NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
|
|
// BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
|
|
// AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
|
|
//
|
|
// if (shouldRecordAudioTrack)
|
|
// {
|
|
// [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
|
|
//
|
|
// // This might need to be extended to handle movies with more than one audio track
|
|
// AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
|
|
// readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
|
|
// readerAudioTrackOutput.alwaysCopiesSampleData = NO;
|
|
// [assetReader addOutput:readerAudioTrackOutput];
|
|
// }
|
|
|
|
return assetReader;
|
|
}
|
|
|
|
- (void)processAsset
|
|
{
|
|
reader = [self createAssetReader];
|
|
|
|
AVAssetReaderOutput *readerVideoTrackOutput = nil;
|
|
AVAssetReaderOutput *readerAudioTrackOutput = nil;
|
|
|
|
audioEncodingIsFinished = YES;
|
|
for( AVAssetReaderOutput *output in reader.outputs ) {
|
|
if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
|
|
audioEncodingIsFinished = NO;
|
|
readerAudioTrackOutput = output;
|
|
}
|
|
else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
|
|
readerVideoTrackOutput = output;
|
|
}
|
|
}
|
|
|
|
if ([reader startReading] == NO) {
|
|
return;
|
|
}
|
|
|
|
__unsafe_unretained PGVideoMovie *weakSelf = self;
|
|
|
|
// if (synchronizedMovieWriter != nil)
|
|
// {
|
|
// [synchronizedMovieWriter setVideoInputReadyCallback:^{
|
|
// BOOL success = [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
|
|
// return success;
|
|
// }];
|
|
//
|
|
// [synchronizedMovieWriter setAudioInputReadyCallback:^{
|
|
// BOOL success = [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
|
|
// return success;
|
|
// }];
|
|
//
|
|
// [synchronizedMovieWriter enableSynchronizationCallbacks];
|
|
// }
|
|
// else
|
|
// {
|
|
while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
|
|
{
|
|
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
|
|
|
|
if ((readerAudioTrackOutput) && (!audioEncodingIsFinished))
|
|
{
|
|
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
|
|
}
|
|
|
|
}
|
|
|
|
if (reader.status == AVAssetReaderStatusCompleted) {
|
|
|
|
[reader cancelReading];
|
|
|
|
if (keepLooping) {
|
|
reader = nil;
|
|
[self startProcessing];
|
|
} else {
|
|
[weakSelf endProcessing];
|
|
}
|
|
|
|
}
|
|
// }
|
|
}
|
|
|
|
- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
|
|
{
|
|
[displayLink setPaused:false];
|
|
}
|
|
|
|
- (void)displayLinkCallback:(CADisplayLink *)sender
|
|
{
|
|
CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
|
|
CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
|
|
[self processPixelBufferAtTime:outputItemTime];
|
|
|
|
}
|
|
|
|
- (void)processPixelBufferAtTime:(CMTime)outputItemTime
|
|
{
|
|
if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime])
|
|
{
|
|
__unsafe_unretained PGVideoMovie *weakSelf = self;
|
|
CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
|
|
if (pixelBuffer != NULL)
|
|
{
|
|
runSynchronouslyOnVideoProcessingQueue(^{
|
|
[weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
|
|
CFRelease(pixelBuffer);
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
|
|
{
|
|
if (reader.status == AVAssetReaderStatusReading && !videoEncodingIsFinished)
|
|
{
|
|
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
|
|
if (sampleBufferRef)
|
|
{
|
|
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
|
|
CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
|
|
CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
|
|
|
|
CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
|
|
CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
|
|
|
|
if (frameTimeDifference > actualTimeDifference)
|
|
{
|
|
usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
|
|
}
|
|
|
|
previousFrameTime = currentSampleTime;
|
|
previousActualFrameTime = CFAbsoluteTimeGetCurrent();
|
|
|
|
__unsafe_unretained PGVideoMovie *weakSelf = self;
|
|
runSynchronouslyOnVideoProcessingQueue(^{
|
|
[weakSelf processMovieFrame:sampleBufferRef];
|
|
CMSampleBufferInvalidate(sampleBufferRef);
|
|
CFRelease(sampleBufferRef);
|
|
});
|
|
|
|
return YES;
|
|
}
|
|
else
|
|
{
|
|
if (!keepLooping) {
|
|
videoEncodingIsFinished = YES;
|
|
if( videoEncodingIsFinished && audioEncodingIsFinished )
|
|
[self endProcessing];
|
|
}
|
|
}
|
|
}
|
|
// else if (synchronizedMovieWriter != nil)
|
|
// {
|
|
// if (reader.status == AVAssetReaderStatusCompleted)
|
|
// {
|
|
// [self endProcessing];
|
|
// }
|
|
// }
|
|
return NO;
|
|
}
|
|
|
|
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
|
|
{
|
|
if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
|
|
{
|
|
CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
|
|
if (audioSampleBufferRef)
|
|
{
|
|
CFRelease(audioSampleBufferRef);
|
|
return YES;
|
|
}
|
|
else
|
|
{
|
|
if (!keepLooping) {
|
|
audioEncodingIsFinished = YES;
|
|
if (videoEncodingIsFinished && audioEncodingIsFinished)
|
|
[self endProcessing];
|
|
}
|
|
}
|
|
}
|
|
// else if (synchronizedMovieWriter != nil)
|
|
// {
|
|
// if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
|
|
// reader.status == AVAssetReaderStatusCancelled)
|
|
// {
|
|
// [self endProcessing];
|
|
// }
|
|
// }
|
|
return NO;
|
|
}
|
|
|
|
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
|
|
{
|
|
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
|
|
CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
|
|
|
|
processingFrameTime = currentSampleTime;
|
|
[self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
|
|
}
|
|
|
|
- (CGFloat)progress
|
|
{
|
|
if (AVAssetReaderStatusReading == reader.status)
|
|
{
|
|
float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
|
|
float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
|
|
return current / duration;
|
|
}
|
|
else if ( AVAssetReaderStatusCompleted == reader.status )
|
|
{
|
|
return 1.f;
|
|
}
|
|
else
|
|
{
|
|
return 0.f;
|
|
}
|
|
}
|
|
|
|
- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
|
|
{
|
|
int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
|
|
int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
|
|
|
|
CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
|
|
if (colorAttachments != NULL)
|
|
{
|
|
if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
|
|
{
|
|
if (isFullYUVRange)
|
|
{
|
|
_preferredConversion = kColorConversion601FullRange;
|
|
}
|
|
else
|
|
{
|
|
_preferredConversion = kColorConversion601;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
_preferredConversion = kColorConversion709;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
if (isFullYUVRange)
|
|
{
|
|
_preferredConversion = kColorConversion601FullRange;
|
|
}
|
|
else
|
|
{
|
|
_preferredConversion = kColorConversion601;
|
|
}
|
|
|
|
}
|
|
|
|
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
|
|
|
|
[GPUImageContext useImageProcessingContext];
|
|
|
|
if ([GPUImageContext supportsFastTextureUpload])
|
|
{
|
|
CVOpenGLESTextureRef luminanceTextureRef = NULL;
|
|
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
|
|
|
|
if (CVPixelBufferGetPlaneCount(movieFrame) > 0)
|
|
{
|
|
CVPixelBufferLockBaseAddress(movieFrame,0);
|
|
|
|
if ((imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight))
|
|
{
|
|
imageBufferWidth = bufferWidth;
|
|
imageBufferHeight = bufferHeight;
|
|
}
|
|
|
|
CVReturn err;
|
|
// Y-plane
|
|
glActiveTexture(GL_TEXTURE4);
|
|
if ([GPUImageContext deviceSupportsRedTextures])
|
|
{
|
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
|
|
}
|
|
else
|
|
{
|
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
|
|
}
|
|
if (err)
|
|
{
|
|
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
|
|
}
|
|
|
|
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
|
|
|
|
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
|
|
|
// UV-plane
|
|
glActiveTexture(GL_TEXTURE5);
|
|
if ([GPUImageContext deviceSupportsRedTextures])
|
|
{
|
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
|
|
}
|
|
else
|
|
{
|
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
|
|
}
|
|
if (err)
|
|
{
|
|
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
|
|
}
|
|
|
|
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
|
|
|
|
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
|
|
|
[self convertYUVToRGBOutput];
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
|
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
|
|
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
|
|
}
|
|
|
|
[outputFramebuffer unlock];
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
|
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
|
|
}
|
|
|
|
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
|
|
CFRelease(luminanceTextureRef);
|
|
CFRelease(chrominanceTextureRef);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
CVPixelBufferLockBaseAddress(movieFrame, 0);
|
|
|
|
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
|
|
|
|
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
|
|
|
|
glTexImage2D(GL_TEXTURE_2D, 0, self.outputTextureOptions.internalFormat, bufferWidth, bufferHeight, 0, self.outputTextureOptions.format, self.outputTextureOptions.type, CVPixelBufferGetBaseAddress(movieFrame));
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
|
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
|
|
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
|
|
}
|
|
|
|
[outputFramebuffer unlock];
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
|
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
|
|
}
|
|
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
|
|
}
|
|
}
|
|
|
|
- (void)endProcessing
|
|
{
|
|
keepLooping = NO;
|
|
|
|
[displayLink setPaused:YES];
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
[currentTarget endProcessing];
|
|
}
|
|
|
|
if (displayLink != nil)
|
|
{
|
|
[displayLink invalidate];
|
|
displayLink = nil;
|
|
}
|
|
}
|
|
|
|
- (void)cancelProcessing
|
|
{
|
|
if (reader) {
|
|
[reader cancelReading];
|
|
}
|
|
[self endProcessing];
|
|
}
|
|
|
|
- (void)convertYUVToRGBOutput;
|
|
{
|
|
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
|
|
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
|
|
[outputFramebuffer activateFramebuffer];
|
|
|
|
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
|
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
|
|
|
static const GLfloat squareVertices[] = {
|
|
-1.0f, -1.0f,
|
|
1.0f, -1.0f,
|
|
-1.0f, 1.0f,
|
|
1.0f, 1.0f,
|
|
};
|
|
|
|
static const GLfloat textureCoordinates[] = {
|
|
0.0f, 0.0f,
|
|
1.0f, 0.0f,
|
|
0.0f, 1.0f,
|
|
1.0f, 1.0f,
|
|
};
|
|
|
|
glActiveTexture(GL_TEXTURE4);
|
|
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
|
|
glUniform1i(yuvConversionLuminanceTextureUniform, 4);
|
|
|
|
glActiveTexture(GL_TEXTURE5);
|
|
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
|
|
glUniform1i(yuvConversionChrominanceTextureUniform, 5);
|
|
|
|
glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
|
|
|
|
glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
|
|
glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
|
|
|
|
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
|
}
|
|
|
|
- (AVAssetReader*)assetReader {
|
|
return reader;
|
|
}
|
|
|
|
- (BOOL)audioEncodingIsFinished {
|
|
return audioEncodingIsFinished;
|
|
}
|
|
|
|
- (BOOL)videoEncodingIsFinished {
|
|
return videoEncodingIsFinished;
|
|
}
|
|
|
|
@end
|