Video editor fixes

This commit is contained in:
Ilya Laktyushin
2020-05-25 20:21:02 +03:00
parent 0eb6b5783f
commit 5f634e5818
5 changed files with 33 additions and 9 deletions

View File

@@ -45,6 +45,7 @@
- (void)setCIImage:(CIImage *)ciImage;
- (void)processAnimated:(bool)animated completion:(void (^)(void))completion;
- (void)reprocess;
- (void)createResultImageWithCompletion:(void (^)(UIImage *image))completion;
- (UIImage *)currentResultImage;

View File

@@ -320,6 +320,12 @@
} synchronous:synchronous];
}
- (void)reprocess {
if ([_currentInput isKindOfClass:[PGVideoMovie class]]) {
[(PGVideoMovie *)_currentInput reprocessCurrent];
}
}
- (void)updateProcessChain {
[GPUImageFramebuffer setMark:self.forVideo];

View File

@@ -23,4 +23,6 @@
- (void)cancelProcessing;
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
- (void)reprocessCurrent;
@end

View File

@@ -116,6 +116,7 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
@implementation PGVideoMovie
{
bool videoEncodingIsFinished;
bool _shouldReprocessCurrentFrame;
}
@synthesize asset = _asset;
@@ -341,8 +342,10 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
- (void)processPixelBufferAtTime:(CMTime)outputItemTime
{
if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime])
if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime] || _shouldReprocessCurrentFrame)
{
_shouldReprocessCurrentFrame = false;
__unsafe_unretained PGVideoMovie *weakSelf = self;
CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
if (pixelBuffer != NULL)
@@ -355,6 +358,10 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
}
}
- (void)reprocessCurrent {
_shouldReprocessCurrentFrame = true;
}
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
{
if (reader.status == AVAssetReaderStatusReading && !videoEncodingIsFinished)

View File

@@ -449,6 +449,10 @@
[strongSelf->_entitiesContainerView setupWithPaintingData:adjustments.paintingData];
[strongSelf->_photoEditor importAdjustments:adjustments];
if (!strongSelf.isPlaying) {
[strongSelf->_photoEditor reprocess];
}
}]];
}
else
@@ -810,13 +814,12 @@
{
if (_videoView != nil)
{
UIImage *image = nil;
UIGraphicsBeginImageContextWithOptions(_videoView.bounds.size, true, [UIScreen mainScreen].scale);
if (_lastRenderedScreenImage != nil)
return _lastRenderedScreenImage;
UIImage *image = nil;
UIGraphicsBeginImageContextWithOptions(_videoView.bounds.size, true, [UIScreen mainScreen].scale);
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:_player.currentItem.asset];
generator.appliesPreferredTrackTransform = true;
generator.maximumSize = TGFitSize(_videoDimensions, CGSizeMake(1280.0f, 1280.0f));
@@ -853,11 +856,16 @@
generator.requestedTimeToleranceBefore = kCMTimeZero;
CGImageRef imageRef = [generator copyCGImageAtTime:_player.currentTime actualTime:nil error:NULL];
_lastRenderedScreenImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
TGVideoEditAdjustments *adjustments = (TGVideoEditAdjustments *)[self.item.editingContext adjustmentsForItem:self.item.editableMediaItem];
UIImage *renderedImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
if (adjustments.toolsApplied) {
renderedImage = [PGPhotoEditor resultImageForImage:renderedImage adjustments:adjustments];
}
_lastRenderedScreenImage = renderedImage;
CGSize originalSize = _videoDimensions;
CGRect cropRect = CGRectMake(0, 0, _videoDimensions.width, _videoDimensions.height);
UIImageOrientation cropOrientation = UIImageOrientationUp;
@@ -873,7 +881,7 @@
CGRect drawRect = CGRectMake(-cropRect.origin.x * ratio, -cropRect.origin.y * ratio, originalSize.width * ratio, originalSize.height * ratio);
[_lastRenderedScreenImage drawInRect:drawRect];
if (_paintingImageView.image != nil)
[_paintingImageView.image drawInRect:drawRect];
}