mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
520 lines
18 KiB
Objective-C
Executable File
520 lines
18 KiB
Objective-C
Executable File
#import "PGVideoMovie.h"
|
|
#import "GPUImageFilter.h"
|
|
#import "LegacyComponentsInternal.h"
|
|
|
|
GLfloat kColorConversion601Default[] = {
|
|
1.164, 1.164, 1.164,
|
|
0.0, -0.392, 2.017,
|
|
1.596, -0.813, 0.0,
|
|
};
|
|
|
|
GLfloat kColorConversion601FullRangeDefault[] = {
|
|
1.0, 1.0, 1.0,
|
|
0.0, -0.343, 1.765,
|
|
1.4, -0.711, 0.0,
|
|
};
|
|
|
|
GLfloat kColorConversion709Default[] = {
|
|
1, 1, 1,
|
|
0, -0.1873, 1.8556,
|
|
1.5748, -0.4681, 0,
|
|
};
|
|
|
|
GLfloat *kColorConversion601 = kColorConversion601Default;
|
|
GLfloat *kColorConversion601FullRange = kColorConversion601FullRangeDefault;
|
|
GLfloat *kColorConversion709 = kColorConversion709Default;
|
|
|
|
NSString *const kYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
|
|
(
|
|
varying highp vec2 texCoord;
|
|
|
|
uniform sampler2D luminanceTexture;
|
|
uniform sampler2D chrominanceTexture;
|
|
uniform mediump mat3 colorConversionMatrix;
|
|
|
|
void main()
|
|
{
|
|
mediump vec3 yuv;
|
|
lowp vec3 rgb;
|
|
|
|
yuv.x = texture2D(luminanceTexture, texCoord).r;
|
|
yuv.yz = texture2D(chrominanceTexture, texCoord).rg - vec2(0.5, 0.5);
|
|
rgb = colorConversionMatrix * yuv;
|
|
|
|
gl_FragColor = vec4(rgb, 1);
|
|
}
|
|
);
|
|
|
|
NSString *const kYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
|
|
(
|
|
varying highp vec2 texCoord;
|
|
|
|
uniform sampler2D luminanceTexture;
|
|
uniform sampler2D chrominanceTexture;
|
|
uniform mediump mat3 colorConversionMatrix;
|
|
|
|
void main()
|
|
{
|
|
mediump vec3 yuv;
|
|
lowp vec3 rgb;
|
|
|
|
yuv.x = texture2D(luminanceTexture, texCoord).r;
|
|
yuv.yz = texture2D(chrominanceTexture, texCoord).ra - vec2(0.5, 0.5);
|
|
rgb = colorConversionMatrix * yuv;
|
|
|
|
gl_FragColor = vec4(rgb, 1);
|
|
}
|
|
);
|
|
|
|
|
|
NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
|
|
(
|
|
varying highp vec2 texCoord;
|
|
|
|
uniform sampler2D luminanceTexture;
|
|
uniform sampler2D chrominanceTexture;
|
|
uniform mediump mat3 colorConversionMatrix;
|
|
|
|
void main()
|
|
{
|
|
mediump vec3 yuv;
|
|
lowp vec3 rgb;
|
|
|
|
yuv.x = texture2D(luminanceTexture, texCoord).r - (16.0/255.0);
|
|
yuv.yz = texture2D(chrominanceTexture, texCoord).ra - vec2(0.5, 0.5);
|
|
rgb = colorConversionMatrix * yuv;
|
|
|
|
gl_FragColor = vec4(rgb, 1);
|
|
}
|
|
);
|
|
|
|
@interface PGVideoMovie () <AVPlayerItemOutputPullDelegate>
|
|
{
|
|
AVPlayerItemVideoOutput *playerItemOutput;
|
|
CADisplayLink *displayLink;
|
|
CMTime previousFrameTime, processingFrameTime;
|
|
CFAbsoluteTime previousActualFrameTime;
|
|
|
|
GLuint luminanceTexture, chrominanceTexture;
|
|
|
|
GLProgram *yuvConversionProgram;
|
|
GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
|
|
GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
|
|
GLint yuvConversionMatrixUniform;
|
|
const GLfloat *_preferredConversion;
|
|
|
|
BOOL isFullYUVRange;
|
|
|
|
int imageBufferWidth, imageBufferHeight;
|
|
}
|
|
@end
|
|
|
|
@implementation PGVideoMovie
|
|
{
|
|
bool _shouldReprocessCurrentFrame;
|
|
}
|
|
|
|
#pragma mark -
|
|
#pragma mark Initialization and teardown
|
|
|
|
- (GPUImageRotationMode)rotationForTrack:(AVAsset *)asset {
|
|
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
|
|
CGAffineTransform t = [videoTrack preferredTransform];
|
|
|
|
if (t.a == -1 && t.d == -1) {
|
|
return kGPUImageRotate180;
|
|
} else if (t.a == 1 && t.d == 1) {
|
|
return kGPUImageNoRotation;
|
|
} else if (t.b == -1 && t.c == 1) {
|
|
return kGPUImageRotateLeft;
|
|
} else if (t.a == -1 && t.d == 1) {
|
|
return kGPUImageFlipHorizonal;
|
|
} else if (t.a == 1 && t.d == -1) {
|
|
return kGPUImageRotate180FlipHorizontal;
|
|
} else {
|
|
if (t.c == 1) {
|
|
return kGPUImageRotateRightFlipVertical;
|
|
} else {
|
|
return kGPUImageRotateRight;
|
|
}
|
|
}
|
|
}
|
|
|
|
- (instancetype)initWithPlayerItem:(AVPlayerItem *)playerItem;
|
|
{
|
|
if (!(self = [super init]))
|
|
{
|
|
return nil;
|
|
}
|
|
|
|
[self yuvConversionSetup];
|
|
|
|
self.playerItem = playerItem;
|
|
|
|
return self;
|
|
}
|
|
|
|
- (void)yuvConversionSetup
|
|
{
|
|
if ([GPUImageContext supportsFastTextureUpload])
|
|
{
|
|
runSynchronouslyOnVideoProcessingQueue(^{
|
|
[GPUImageContext useImageProcessingContext];
|
|
|
|
_preferredConversion = kColorConversion709;
|
|
isFullYUVRange = YES;
|
|
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kYUVVideoRangeConversionForRGFragmentShaderString];
|
|
|
|
if (!yuvConversionProgram.initialized)
|
|
{
|
|
[yuvConversionProgram addAttribute:@"position"];
|
|
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
|
|
|
|
if (![yuvConversionProgram link])
|
|
{
|
|
NSString *progLog = [yuvConversionProgram programLog];
|
|
NSLog(@"Program link log: %@", progLog);
|
|
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
|
|
NSLog(@"Fragment shader compile log: %@", fragLog);
|
|
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
|
|
NSLog(@"Vertex shader compile log: %@", vertLog);
|
|
yuvConversionProgram = nil;
|
|
NSAssert(NO, @"Filter shader link failed");
|
|
}
|
|
}
|
|
|
|
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
|
|
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
|
|
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
|
|
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
|
|
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
|
|
|
|
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
|
|
|
|
glEnableVertexAttribArray(yuvConversionPositionAttribute);
|
|
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
|
|
});
|
|
}
|
|
}
|
|
|
|
- (void)dealloc
|
|
{
|
|
[playerItemOutput setDelegate:nil queue:nil];
|
|
|
|
if (self.playerItem && (displayLink != nil))
|
|
{
|
|
[displayLink invalidate];
|
|
displayLink = nil;
|
|
}
|
|
}
|
|
|
|
#pragma mark -
|
|
#pragma mark Movie processing
|
|
|
|
- (void)startProcessing {
|
|
dispatch_sync(dispatch_get_main_queue(), ^{
|
|
displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
|
|
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
|
|
[displayLink setPaused:true];
|
|
});
|
|
|
|
runSynchronouslyOnVideoProcessingQueue(^{
|
|
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
|
|
NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
|
|
if ([GPUImageContext supportsFastTextureUpload]) {
|
|
[pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
|
|
}
|
|
else {
|
|
[pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
|
|
}
|
|
if (iosMajorVersion() >= 10) {
|
|
NSDictionary *hdVideoProperties = @
|
|
{
|
|
AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2,
|
|
AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2,
|
|
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2,
|
|
};
|
|
[pixBuffAttributes setObject:hdVideoProperties forKey:AVVideoColorPropertiesKey];
|
|
playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithOutputSettings:pixBuffAttributes];
|
|
|
|
|
|
} else {
|
|
playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
|
|
}
|
|
[playerItemOutput setDelegate:self queue:videoProcessingQueue];
|
|
|
|
[_playerItem addOutput:playerItemOutput];
|
|
[playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
|
|
});
|
|
}
|
|
|
|
- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
|
|
{
|
|
dispatch_async(dispatch_get_main_queue(), ^{
|
|
[displayLink setPaused:false];
|
|
});
|
|
}
|
|
|
|
- (void)displayLinkCallback:(CADisplayLink *)sender
|
|
{
|
|
CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
|
|
CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
|
|
[self processPixelBufferAtTime:outputItemTime];
|
|
}
|
|
|
|
- (void)process {
|
|
_shouldReprocessCurrentFrame = true;
|
|
[self displayLinkCallback:displayLink];
|
|
}
|
|
|
|
- (void)processPixelBufferAtTime:(CMTime)outputItemTime
|
|
{
|
|
if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime] || _shouldReprocessCurrentFrame)
|
|
{
|
|
_shouldReprocessCurrentFrame = false;
|
|
|
|
__unsafe_unretained PGVideoMovie *weakSelf = self;
|
|
CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
|
|
if (pixelBuffer != NULL)
|
|
{
|
|
runSynchronouslyOnVideoProcessingQueue(^{
|
|
[weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
|
|
CFRelease(pixelBuffer);
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
- (void)reprocessCurrent {
|
|
_shouldReprocessCurrentFrame = true;
|
|
}
|
|
|
|
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
|
|
{
|
|
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
|
|
CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
|
|
|
|
processingFrameTime = currentSampleTime;
|
|
[self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
|
|
}
|
|
|
|
- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
|
|
{
|
|
int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
|
|
int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
|
|
|
|
CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
|
|
if (colorAttachments != NULL)
|
|
{
|
|
if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
|
|
{
|
|
if (isFullYUVRange)
|
|
{
|
|
_preferredConversion = kColorConversion601FullRange;
|
|
}
|
|
else
|
|
{
|
|
_preferredConversion = kColorConversion601;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
_preferredConversion = kColorConversion709;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
if (isFullYUVRange)
|
|
{
|
|
_preferredConversion = kColorConversion601FullRange;
|
|
}
|
|
else
|
|
{
|
|
_preferredConversion = kColorConversion601;
|
|
}
|
|
|
|
}
|
|
|
|
[GPUImageContext useImageProcessingContext];
|
|
|
|
if ([GPUImageContext supportsFastTextureUpload])
|
|
{
|
|
CVOpenGLESTextureRef luminanceTextureRef = NULL;
|
|
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
|
|
|
|
if (CVPixelBufferGetPlaneCount(movieFrame) > 0)
|
|
{
|
|
CVPixelBufferLockBaseAddress(movieFrame,0);
|
|
|
|
if ((imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight))
|
|
{
|
|
imageBufferWidth = bufferWidth;
|
|
imageBufferHeight = bufferHeight;
|
|
}
|
|
|
|
CVReturn err;
|
|
// Y-plane
|
|
glActiveTexture(GL_TEXTURE4);
|
|
if ([GPUImageContext deviceSupportsRedTextures])
|
|
{
|
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
|
|
}
|
|
else
|
|
{
|
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
|
|
}
|
|
if (err)
|
|
{
|
|
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
|
|
}
|
|
|
|
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
|
|
|
|
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
|
|
|
// UV-plane
|
|
glActiveTexture(GL_TEXTURE5);
|
|
if ([GPUImageContext deviceSupportsRedTextures])
|
|
{
|
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
|
|
}
|
|
else
|
|
{
|
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
|
|
}
|
|
if (err)
|
|
{
|
|
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
|
|
}
|
|
|
|
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
|
|
|
|
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
|
|
|
[self convertYUVToRGBOutput];
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
|
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
|
|
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
|
|
|
|
AVAsset *asset = self.playerItem.asset;
|
|
if (asset != nil) {
|
|
GPUImageRotationMode rotation = [self rotationForTrack:asset];
|
|
[currentTarget setInputRotation:rotation atIndex:targetTextureIndex];
|
|
}
|
|
}
|
|
|
|
[outputFramebuffer unlock];
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
|
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
|
|
}
|
|
|
|
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
|
|
CFRelease(luminanceTextureRef);
|
|
CFRelease(chrominanceTextureRef);
|
|
}
|
|
}
|
|
else
|
|
{
|
|
CVPixelBufferLockBaseAddress(movieFrame, 0);
|
|
|
|
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
|
|
|
|
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
|
|
|
|
glTexImage2D(GL_TEXTURE_2D, 0, self.outputTextureOptions.internalFormat, bufferWidth, bufferHeight, 0, self.outputTextureOptions.format, self.outputTextureOptions.type, CVPixelBufferGetBaseAddress(movieFrame));
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
|
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
|
|
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
|
|
}
|
|
|
|
[outputFramebuffer unlock];
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
|
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
|
|
}
|
|
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
|
|
}
|
|
}
|
|
|
|
- (void)endProcessing
|
|
{
|
|
dispatch_async(dispatch_get_main_queue(), ^{
|
|
[displayLink setPaused:false];
|
|
if (displayLink != nil)
|
|
{
|
|
[displayLink invalidate];
|
|
displayLink = nil;
|
|
}
|
|
});
|
|
|
|
for (id<GPUImageInput> currentTarget in targets)
|
|
{
|
|
[currentTarget endProcessing];
|
|
}
|
|
}
|
|
|
|
- (void)cancelProcessing
|
|
{
|
|
[self endProcessing];
|
|
}
|
|
|
|
- (void)convertYUVToRGBOutput;
|
|
{
|
|
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
|
|
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
|
|
[outputFramebuffer activateFramebuffer];
|
|
|
|
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
|
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
|
|
|
static const GLfloat squareVertices[] = {
|
|
-1.0f, -1.0f,
|
|
1.0f, -1.0f,
|
|
-1.0f, 1.0f,
|
|
1.0f, 1.0f,
|
|
};
|
|
|
|
static const GLfloat textureCoordinates[] = {
|
|
0.0f, 0.0f,
|
|
1.0f, 0.0f,
|
|
0.0f, 1.0f,
|
|
1.0f, 1.0f,
|
|
};
|
|
|
|
glActiveTexture(GL_TEXTURE4);
|
|
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
|
|
glUniform1i(yuvConversionLuminanceTextureUniform, 4);
|
|
|
|
glActiveTexture(GL_TEXTURE5);
|
|
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
|
|
glUniform1i(yuvConversionChrominanceTextureUniform, 5);
|
|
|
|
glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
|
|
|
|
glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
|
|
glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
|
|
|
|
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
|
}
|
|
|
|
@end
|