mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2026-01-29 23:40:06 +00:00
no message
This commit is contained in:
@@ -478,6 +478,22 @@
|
||||
D026608B1F34B9F9000E2DC5 /* TGSearchDisplayMixin.m in Sources */ = {isa = PBXBuildFile; fileRef = D02660891F34B9F9000E2DC5 /* TGSearchDisplayMixin.m */; };
|
||||
D026608E1F34BA71000E2DC5 /* TGPickPinAnnotationView.h in Headers */ = {isa = PBXBuildFile; fileRef = D026608C1F34BA71000E2DC5 /* TGPickPinAnnotationView.h */; };
|
||||
D026608F1F34BA71000E2DC5 /* TGPickPinAnnotationView.m in Sources */ = {isa = PBXBuildFile; fileRef = D026608D1F34BA71000E2DC5 /* TGPickPinAnnotationView.m */; };
|
||||
D04268F91F58687D0037ECE8 /* TGVideoCameraGLView.h in Headers */ = {isa = PBXBuildFile; fileRef = D04268F71F58687D0037ECE8 /* TGVideoCameraGLView.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D04268FA1F58687D0037ECE8 /* TGVideoCameraGLView.m in Sources */ = {isa = PBXBuildFile; fileRef = D04268F81F58687D0037ECE8 /* TGVideoCameraGLView.m */; };
|
||||
D04269031F586A070037ECE8 /* TGVideoMessageRingView.h in Headers */ = {isa = PBXBuildFile; fileRef = D04268FB1F586A070037ECE8 /* TGVideoMessageRingView.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D04269041F586A070037ECE8 /* TGVideoMessageRingView.m in Sources */ = {isa = PBXBuildFile; fileRef = D04268FC1F586A070037ECE8 /* TGVideoMessageRingView.m */; };
|
||||
D04269051F586A070037ECE8 /* TGVideoMessageScrubber.h in Headers */ = {isa = PBXBuildFile; fileRef = D04268FD1F586A070037ECE8 /* TGVideoMessageScrubber.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D04269061F586A070037ECE8 /* TGVideoMessageScrubber.m in Sources */ = {isa = PBXBuildFile; fileRef = D04268FE1F586A070037ECE8 /* TGVideoMessageScrubber.m */; };
|
||||
D04269071F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h in Headers */ = {isa = PBXBuildFile; fileRef = D04268FF1F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D04269081F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m in Sources */ = {isa = PBXBuildFile; fileRef = D04269001F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m */; };
|
||||
D04269091F586A070037ECE8 /* TGVideoMessageTrimView.h in Headers */ = {isa = PBXBuildFile; fileRef = D04269011F586A070037ECE8 /* TGVideoMessageTrimView.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D042690A1F586A070037ECE8 /* TGVideoMessageTrimView.m in Sources */ = {isa = PBXBuildFile; fileRef = D04269021F586A070037ECE8 /* TGVideoMessageTrimView.m */; };
|
||||
D042690D1F586B140037ECE8 /* TGVideoMessageControls.h in Headers */ = {isa = PBXBuildFile; fileRef = D042690B1F586B140037ECE8 /* TGVideoMessageControls.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D042690E1F586B140037ECE8 /* TGVideoMessageControls.m in Sources */ = {isa = PBXBuildFile; fileRef = D042690C1F586B140037ECE8 /* TGVideoMessageControls.m */; };
|
||||
D04269111F586E430037ECE8 /* TGVideoCameraPipeline.h in Headers */ = {isa = PBXBuildFile; fileRef = D042690F1F586E430037ECE8 /* TGVideoCameraPipeline.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D04269121F586E430037ECE8 /* TGVideoCameraPipeline.m in Sources */ = {isa = PBXBuildFile; fileRef = D04269101F586E430037ECE8 /* TGVideoCameraPipeline.m */; };
|
||||
D04269151F586EC80037ECE8 /* TGVideoMessageCaptureController.h in Headers */ = {isa = PBXBuildFile; fileRef = D04269131F586EC80037ECE8 /* TGVideoMessageCaptureController.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D04269161F586EC80037ECE8 /* TGVideoMessageCaptureController.m in Sources */ = {isa = PBXBuildFile; fileRef = D04269141F586EC80037ECE8 /* TGVideoMessageCaptureController.m */; };
|
||||
D07BC6CF1F2A18B700ED97AA /* TGCameraMainPhoneView.h in Headers */ = {isa = PBXBuildFile; fileRef = D07BC6C91F2A18B700ED97AA /* TGCameraMainPhoneView.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D07BC6D01F2A18B700ED97AA /* TGCameraMainPhoneView.m in Sources */ = {isa = PBXBuildFile; fileRef = D07BC6CA1F2A18B700ED97AA /* TGCameraMainPhoneView.m */; };
|
||||
D07BC6D11F2A18B700ED97AA /* TGCameraMainTabletView.h in Headers */ = {isa = PBXBuildFile; fileRef = D07BC6CB1F2A18B700ED97AA /* TGCameraMainTabletView.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
@@ -1115,6 +1131,11 @@
|
||||
D07BCBFC1F2B757700ED97AA /* TGEmbedPIPScrubber.m in Sources */ = {isa = PBXBuildFile; fileRef = D07BCBFA1F2B757700ED97AA /* TGEmbedPIPScrubber.m */; };
|
||||
D07BCC051F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.h in Headers */ = {isa = PBXBuildFile; fileRef = D07BCC031F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D07BCC061F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.m in Sources */ = {isa = PBXBuildFile; fileRef = D07BCC041F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.m */; };
|
||||
D0F7C9C41F55DA49005B255A /* TGVideoCameraMovieRecorder.h in Headers */ = {isa = PBXBuildFile; fileRef = D0F7C9C21F55DA49005B255A /* TGVideoCameraMovieRecorder.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D0F7C9C51F55DA49005B255A /* TGVideoCameraMovieRecorder.m in Sources */ = {isa = PBXBuildFile; fileRef = D0F7C9C31F55DA49005B255A /* TGVideoCameraMovieRecorder.m */; };
|
||||
D0F7C9C81F55DA83005B255A /* TGVideoCameraGLRenderer.h in Headers */ = {isa = PBXBuildFile; fileRef = D0F7C9C61F55DA83005B255A /* TGVideoCameraGLRenderer.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
D0F7C9C91F55DA83005B255A /* TGVideoCameraGLRenderer.m in Sources */ = {isa = PBXBuildFile; fileRef = D0F7C9C71F55DA83005B255A /* TGVideoCameraGLRenderer.m */; };
|
||||
D0F7C9D61F55DB2D005B255A /* TGLiveUploadInterface.h in Headers */ = {isa = PBXBuildFile; fileRef = D0F7C9D41F55DB2D005B255A /* TGLiveUploadInterface.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXFileReference section */
|
||||
@@ -1591,6 +1612,22 @@
|
||||
D02660891F34B9F9000E2DC5 /* TGSearchDisplayMixin.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGSearchDisplayMixin.m; sourceTree = "<group>"; };
|
||||
D026608C1F34BA71000E2DC5 /* TGPickPinAnnotationView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGPickPinAnnotationView.h; sourceTree = "<group>"; };
|
||||
D026608D1F34BA71000E2DC5 /* TGPickPinAnnotationView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGPickPinAnnotationView.m; sourceTree = "<group>"; };
|
||||
D04268F71F58687D0037ECE8 /* TGVideoCameraGLView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoCameraGLView.h; sourceTree = "<group>"; };
|
||||
D04268F81F58687D0037ECE8 /* TGVideoCameraGLView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoCameraGLView.m; sourceTree = "<group>"; };
|
||||
D04268FB1F586A070037ECE8 /* TGVideoMessageRingView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageRingView.h; sourceTree = "<group>"; };
|
||||
D04268FC1F586A070037ECE8 /* TGVideoMessageRingView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageRingView.m; sourceTree = "<group>"; };
|
||||
D04268FD1F586A070037ECE8 /* TGVideoMessageScrubber.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageScrubber.h; sourceTree = "<group>"; };
|
||||
D04268FE1F586A070037ECE8 /* TGVideoMessageScrubber.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageScrubber.m; sourceTree = "<group>"; };
|
||||
D04268FF1F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageScrubberThumbnailView.h; sourceTree = "<group>"; };
|
||||
D04269001F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageScrubberThumbnailView.m; sourceTree = "<group>"; };
|
||||
D04269011F586A070037ECE8 /* TGVideoMessageTrimView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageTrimView.h; sourceTree = "<group>"; };
|
||||
D04269021F586A070037ECE8 /* TGVideoMessageTrimView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageTrimView.m; sourceTree = "<group>"; };
|
||||
D042690B1F586B140037ECE8 /* TGVideoMessageControls.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageControls.h; sourceTree = "<group>"; };
|
||||
D042690C1F586B140037ECE8 /* TGVideoMessageControls.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageControls.m; sourceTree = "<group>"; };
|
||||
D042690F1F586E430037ECE8 /* TGVideoCameraPipeline.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoCameraPipeline.h; sourceTree = "<group>"; };
|
||||
D04269101F586E430037ECE8 /* TGVideoCameraPipeline.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoCameraPipeline.m; sourceTree = "<group>"; };
|
||||
D04269131F586EC80037ECE8 /* TGVideoMessageCaptureController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageCaptureController.h; sourceTree = "<group>"; };
|
||||
D04269141F586EC80037ECE8 /* TGVideoMessageCaptureController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageCaptureController.m; sourceTree = "<group>"; };
|
||||
D07BC6C91F2A18B700ED97AA /* TGCameraMainPhoneView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGCameraMainPhoneView.h; sourceTree = "<group>"; };
|
||||
D07BC6CA1F2A18B700ED97AA /* TGCameraMainPhoneView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGCameraMainPhoneView.m; sourceTree = "<group>"; };
|
||||
D07BC6CB1F2A18B700ED97AA /* TGCameraMainTabletView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGCameraMainTabletView.h; sourceTree = "<group>"; };
|
||||
@@ -2229,6 +2266,11 @@
|
||||
D07BCC031F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGModernConversationTitleActivityIndicator.h; sourceTree = "<group>"; };
|
||||
D07BCC041F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGModernConversationTitleActivityIndicator.m; sourceTree = "<group>"; };
|
||||
D0EB42021F3142F400838FE6 /* LegacyComponentsResources.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; name = LegacyComponentsResources.bundle; path = Resources/LegacyComponentsResources.bundle; sourceTree = "<group>"; };
|
||||
D0F7C9C21F55DA49005B255A /* TGVideoCameraMovieRecorder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoCameraMovieRecorder.h; sourceTree = "<group>"; };
|
||||
D0F7C9C31F55DA49005B255A /* TGVideoCameraMovieRecorder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoCameraMovieRecorder.m; sourceTree = "<group>"; };
|
||||
D0F7C9C61F55DA83005B255A /* TGVideoCameraGLRenderer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoCameraGLRenderer.h; sourceTree = "<group>"; };
|
||||
D0F7C9C71F55DA83005B255A /* TGVideoCameraGLRenderer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoCameraGLRenderer.m; sourceTree = "<group>"; };
|
||||
D0F7C9D41F55DB2D005B255A /* TGLiveUploadInterface.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGLiveUploadInterface.h; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
@@ -2311,6 +2353,7 @@
|
||||
D07BCB3F1F2B69D400ED97AA /* Embed Video */,
|
||||
D02660331F34A7DA000E2DC5 /* Location */,
|
||||
09750FAE1F30DAE1001B9886 /* Clipboard Menu */,
|
||||
D0F7C9C11F55DA29005B255A /* Video Message */,
|
||||
D017772A1F1F8F100044446D /* LegacyComponents.h */,
|
||||
D017772B1F1F8F100044446D /* Info.plist */,
|
||||
);
|
||||
@@ -2411,6 +2454,7 @@
|
||||
D07BC9661F2A3F5C00ED97AA /* TGCache.h */,
|
||||
D07BC9671F2A3F5C00ED97AA /* TGCache.m */,
|
||||
D07BCAAC1F2B45DA00ED97AA /* TGFileUtils.h */,
|
||||
D0F7C9D41F55DB2D005B255A /* TGLiveUploadInterface.h */,
|
||||
);
|
||||
name = Utils;
|
||||
sourceTree = "<group>";
|
||||
@@ -3657,6 +3701,33 @@
|
||||
name = Resources;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
D0F7C9C11F55DA29005B255A /* Video Message */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
D042690B1F586B140037ECE8 /* TGVideoMessageControls.h */,
|
||||
D042690C1F586B140037ECE8 /* TGVideoMessageControls.m */,
|
||||
D0F7C9C61F55DA83005B255A /* TGVideoCameraGLRenderer.h */,
|
||||
D0F7C9C71F55DA83005B255A /* TGVideoCameraGLRenderer.m */,
|
||||
D0F7C9C21F55DA49005B255A /* TGVideoCameraMovieRecorder.h */,
|
||||
D0F7C9C31F55DA49005B255A /* TGVideoCameraMovieRecorder.m */,
|
||||
D04268F71F58687D0037ECE8 /* TGVideoCameraGLView.h */,
|
||||
D04268F81F58687D0037ECE8 /* TGVideoCameraGLView.m */,
|
||||
D04268FB1F586A070037ECE8 /* TGVideoMessageRingView.h */,
|
||||
D04268FC1F586A070037ECE8 /* TGVideoMessageRingView.m */,
|
||||
D04268FD1F586A070037ECE8 /* TGVideoMessageScrubber.h */,
|
||||
D04268FE1F586A070037ECE8 /* TGVideoMessageScrubber.m */,
|
||||
D04268FF1F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h */,
|
||||
D04269001F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m */,
|
||||
D04269011F586A070037ECE8 /* TGVideoMessageTrimView.h */,
|
||||
D04269021F586A070037ECE8 /* TGVideoMessageTrimView.m */,
|
||||
D042690F1F586E430037ECE8 /* TGVideoCameraPipeline.h */,
|
||||
D04269101F586E430037ECE8 /* TGVideoCameraPipeline.m */,
|
||||
D04269131F586EC80037ECE8 /* TGVideoMessageCaptureController.h */,
|
||||
D04269141F586EC80037ECE8 /* TGVideoMessageCaptureController.m */,
|
||||
);
|
||||
name = "Video Message";
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
/* End PBXGroup section */
|
||||
|
||||
/* Begin PBXHeadersBuildPhase section */
|
||||
@@ -3674,6 +3745,7 @@
|
||||
D01777531F1F8FE60044446D /* PSCoding.h in Headers */,
|
||||
D07BC6F91F2A19A700ED97AA /* TGCameraTimeCodeView.h in Headers */,
|
||||
D0177A131F213B440044446D /* NSValue+JNWAdditions.h in Headers */,
|
||||
D0F7C9D61F55DB2D005B255A /* TGLiveUploadInterface.h in Headers */,
|
||||
D017781D1F1F961D0044446D /* TGMessageEntityMention.h in Headers */,
|
||||
D07BC7FE1F2A2C0B00ED97AA /* PGFadeTool.h in Headers */,
|
||||
D01778ED1F20CAE60044446D /* TGOverlayController.h in Headers */,
|
||||
@@ -3722,6 +3794,7 @@
|
||||
D07BC9541F2A3EA900ED97AA /* TGModernConversationMentionsAssociatedPanel.h in Headers */,
|
||||
D01778FB1F20CF6B0044446D /* TGBackdropView.h in Headers */,
|
||||
D01779F51F2139980044446D /* POPBasicAnimationInternal.h in Headers */,
|
||||
D04269071F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h in Headers */,
|
||||
D026605A1F34A7F8000E2DC5 /* TGLocationMapModeControl.h in Headers */,
|
||||
D07BC8651F2A2F1300ED97AA /* TGMediaPickerCaptionInputPanel.h in Headers */,
|
||||
D07BC9191F2A380D00ED97AA /* TGPaintRadialBrush.h in Headers */,
|
||||
@@ -3756,6 +3829,7 @@
|
||||
D07BCB211F2B646A00ED97AA /* TGPasscodeBackground.h in Headers */,
|
||||
D01778371F1F961D0044446D /* TGAudioMediaAttachment.h in Headers */,
|
||||
D07BC6F51F2A19A700ED97AA /* TGCameraSegmentsView.h in Headers */,
|
||||
D04269051F586A070037ECE8 /* TGVideoMessageScrubber.h in Headers */,
|
||||
D07BC87F1F2A365000ED97AA /* TGProgressWindow.h in Headers */,
|
||||
D07BC8001F2A2C0B00ED97AA /* PGGrainTool.h in Headers */,
|
||||
D01779EA1F2139980044446D /* POPAnimationPrivate.h in Headers */,
|
||||
@@ -3775,6 +3849,7 @@
|
||||
D0177B181F2641B10044446D /* PGCameraDeviceAngleSampler.h in Headers */,
|
||||
D0177A0A1F2139980044446D /* POPSpringSolver.h in Headers */,
|
||||
D07BCB3D1F2B65F100ED97AA /* TGWallpaperInfo.h in Headers */,
|
||||
D04268F91F58687D0037ECE8 /* TGVideoCameraGLView.h in Headers */,
|
||||
D017794F1F2100280044446D /* TGMediaSelectionContext.h in Headers */,
|
||||
D01778111F1F961D0044446D /* TGMessageEntityBold.h in Headers */,
|
||||
D07BC6EB1F2A19A700ED97AA /* TGCameraFlashActiveView.h in Headers */,
|
||||
@@ -3805,6 +3880,7 @@
|
||||
D01778171F1F961D0044446D /* TGMessageEntityEmail.h in Headers */,
|
||||
D01779E21F2139980044446D /* POPAnimation.h in Headers */,
|
||||
D02660721F34A7F8000E2DC5 /* TGLocationVenueCell.h in Headers */,
|
||||
D042690D1F586B140037ECE8 /* TGVideoMessageControls.h in Headers */,
|
||||
D01777F41F1F961D0044446D /* TGTextCheckingResult.h in Headers */,
|
||||
D01778231F1F961D0044446D /* TGMessageEntityTextUrl.h in Headers */,
|
||||
D0177A9B1F22204A0044446D /* TGModernGalleryEmbeddedStickersHeaderView.h in Headers */,
|
||||
@@ -3859,6 +3935,7 @@
|
||||
D07BC8CB1F2A37EC00ED97AA /* TGPhotoPaintSelectionContainerView.h in Headers */,
|
||||
D07BCB621F2B6A5600ED97AA /* TGEmbedPlayerControls.h in Headers */,
|
||||
D07BC7371F2A2A7D00ED97AA /* PGPhotoEditorPicture.h in Headers */,
|
||||
D0F7C9C41F55DA49005B255A /* TGVideoCameraMovieRecorder.h in Headers */,
|
||||
D07BC85A1F2A2DBD00ED97AA /* TGMenuSheetDimView.h in Headers */,
|
||||
D07BC9AE1F2A4A5100ED97AA /* TGItemMenuSheetPreviewView.h in Headers */,
|
||||
D07BC7F81F2A2C0B00ED97AA /* PGCurvesTool.h in Headers */,
|
||||
@@ -3966,15 +4043,18 @@
|
||||
D07BCA171F2A9A2B00ED97AA /* TGMediaPickerPhotoStripView.h in Headers */,
|
||||
D0177B1C1F2641B10044446D /* PGCameraMomentSession.h in Headers */,
|
||||
D01778C21F200AF70044446D /* TGAnimationBlockDelegate.h in Headers */,
|
||||
D04269091F586A070037ECE8 /* TGVideoMessageTrimView.h in Headers */,
|
||||
D07BCBC31F2B6F6300ED97AA /* CBJSONCoubMapper.h in Headers */,
|
||||
D01779F61F2139980044446D /* POPCGUtils.h in Headers */,
|
||||
D07BC76A1F2A2B3700ED97AA /* TGPhotoEditorBlurToolView.h in Headers */,
|
||||
D01778401F1F961D0044446D /* TGDocumentAttributeSticker.h in Headers */,
|
||||
D0177A431F21F62A0044446D /* TGMediaVideoConverter.h in Headers */,
|
||||
D0177AA31F2222990044446D /* TGKeyCommandController.h in Headers */,
|
||||
D04269111F586E430037ECE8 /* TGVideoCameraPipeline.h in Headers */,
|
||||
D07BCACF1F2B4E9000ED97AA /* TGAttachmentMenuCell.h in Headers */,
|
||||
D01779FA1F2139980044446D /* POPDecayAnimation.h in Headers */,
|
||||
D07BCBB11F2B6F6300ED97AA /* CBCoubAuthorVO.h in Headers */,
|
||||
D0F7C9C81F55DA83005B255A /* TGVideoCameraGLRenderer.h in Headers */,
|
||||
D07BCBF31F2B72DC00ED97AA /* STKHTTPDataSource.h in Headers */,
|
||||
D07BC78C1F2A2B3700ED97AA /* TGPhotoEditorToolButtonsView.h in Headers */,
|
||||
D07BCBBB1F2B6F6300ED97AA /* CBCoubPlayerContance.h in Headers */,
|
||||
@@ -4152,6 +4232,7 @@
|
||||
D07BC9431F2A3E4400ED97AA /* TGSuggestionContext.h in Headers */,
|
||||
D07BC9011F2A380D00ED97AA /* TGPaintBrushPreview.h in Headers */,
|
||||
D0177AA71F22239A0044446D /* TGModernGalleryController.h in Headers */,
|
||||
D04269031F586A070037ECE8 /* TGVideoMessageRingView.h in Headers */,
|
||||
D07BC99D1F2A494000ED97AA /* TGStickerCollectionViewCell.h in Headers */,
|
||||
09750FC11F30DCDC001B9886 /* TGClipboardGalleryPhotoItemView.h in Headers */,
|
||||
D07BC7BE1F2A2BDD00ED97AA /* PGPhotoToolComposer.h in Headers */,
|
||||
@@ -4224,6 +4305,7 @@
|
||||
D07BCA961F2B443700ED97AA /* TGMediaAssetsPhotoCell.h in Headers */,
|
||||
D01778441F1F961D0044446D /* TGDocumentAttributeAnimated.h in Headers */,
|
||||
D07BCABB1F2B4E2600ED97AA /* TGTransitionLayout.h in Headers */,
|
||||
D04269151F586EC80037ECE8 /* TGVideoMessageCaptureController.h in Headers */,
|
||||
D01778591F1F961D0044446D /* TGImageMediaAttachment.h in Headers */,
|
||||
D07BC9151F2A380D00ED97AA /* TGPaintPanGestureRecognizer.h in Headers */,
|
||||
D0177A281F2144700044446D /* TGPhotoEditorAnimation.h in Headers */,
|
||||
@@ -4327,6 +4409,7 @@
|
||||
D07BC7B71F2A2BBE00ED97AA /* PGBlurTool.m in Sources */,
|
||||
D07BCA991F2B443700ED97AA /* TGMediaAssetsPickerController.m in Sources */,
|
||||
D01778471F1F961D0044446D /* TGDocumentAttributeAudio.m in Sources */,
|
||||
D042690A1F586A070037ECE8 /* TGVideoMessageTrimView.m in Sources */,
|
||||
D01778181F1F961D0044446D /* TGMessageEntityEmail.m in Sources */,
|
||||
D07BC90C1F2A380D00ED97AA /* TGPaintFaceDetector.m in Sources */,
|
||||
D07BC9F21F2A9A2B00ED97AA /* TGMediaPickerCell.m in Sources */,
|
||||
@@ -4354,6 +4437,7 @@
|
||||
D0177B211F2641B10044446D /* PGCameraShotMetadata.m in Sources */,
|
||||
D017794A1F20FFF60044446D /* TGMediaAssetMoment.m in Sources */,
|
||||
D07BC9691F2A3F5C00ED97AA /* TGCache.m in Sources */,
|
||||
D04269161F586EC80037ECE8 /* TGVideoMessageCaptureController.m in Sources */,
|
||||
D07BCBF81F2B72DC00ED97AA /* STKQueueEntry.m in Sources */,
|
||||
D07BC81F1F2A2C0B00ED97AA /* PGPhotoSharpenPass.m in Sources */,
|
||||
D02660871F34B9B1000E2DC5 /* TGSearchBar.m in Sources */,
|
||||
@@ -4390,6 +4474,7 @@
|
||||
D0177AA81F22239A0044446D /* TGModernGalleryController.m in Sources */,
|
||||
D07BC7001F2A1A7700ED97AA /* TGMenuView.m in Sources */,
|
||||
D01779931F2108130044446D /* PSLMDBKeyValueCursor.m in Sources */,
|
||||
D04269041F586A070037ECE8 /* TGVideoMessageRingView.m in Sources */,
|
||||
D07BCAE41F2B502F00ED97AA /* TGImagePickerController.mm in Sources */,
|
||||
D07BC7791F2A2B3700ED97AA /* TGPhotoEditorHUDView.m in Sources */,
|
||||
D02660771F34A7F8000E2DC5 /* TGLocationViewController.m in Sources */,
|
||||
@@ -4495,6 +4580,7 @@
|
||||
D0177A191F213B9E0044446D /* TransformationMatrix.cpp in Sources */,
|
||||
D07BC8031F2A2C0B00ED97AA /* PGHighlightsTool.m in Sources */,
|
||||
D0177A981F221DB60044446D /* TGModernGalleryContainerView.m in Sources */,
|
||||
D04268FA1F58687D0037ECE8 /* TGVideoCameraGLView.m in Sources */,
|
||||
D07BC8611F2A2DBD00ED97AA /* TGMenuSheetView.m in Sources */,
|
||||
D017796E1F2103DB0044446D /* TGPhotoPaintStickerEntity.m in Sources */,
|
||||
D07BC9F61F2A9A2B00ED97AA /* TGMediaPickerGalleryGifItem.m in Sources */,
|
||||
@@ -4522,6 +4608,7 @@
|
||||
D07BCA041F2A9A2B00ED97AA /* TGMediaPickerGallerySelectedItemsModel.m in Sources */,
|
||||
D017790F1F20F4370044446D /* UIImage+TG.m in Sources */,
|
||||
D07BC6F61F2A19A700ED97AA /* TGCameraSegmentsView.m in Sources */,
|
||||
D04269061F586A070037ECE8 /* TGVideoMessageScrubber.m in Sources */,
|
||||
D017791B1F20F4A20044446D /* TGImageLuminanceMap.m in Sources */,
|
||||
D07BCB6B1F2B6A5600ED97AA /* TGEmbedSoundCloudPlayerView.m in Sources */,
|
||||
D07BCB631F2B6A5600ED97AA /* TGEmbedPlayerControls.m in Sources */,
|
||||
@@ -4575,6 +4662,7 @@
|
||||
D07BCB731F2B6A5600ED97AA /* TGEmbedVKPlayerView.m in Sources */,
|
||||
D07BCA0E1F2A9A2B00ED97AA /* TGMediaPickerGalleryVideoTrimView.m in Sources */,
|
||||
D026608B1F34B9F9000E2DC5 /* TGSearchDisplayMixin.m in Sources */,
|
||||
D04269121F586E430037ECE8 /* TGVideoCameraPipeline.m in Sources */,
|
||||
D07BCA501F2A9DDD00ED97AA /* FLAnimatedImage.m in Sources */,
|
||||
D07BC9881F2A472900ED97AA /* TGPhotoStickersSectionHeader.m in Sources */,
|
||||
D0177ADE1F23D9B80044446D /* SGraphObjectNode.m in Sources */,
|
||||
@@ -4676,6 +4764,7 @@
|
||||
D07BC8231F2A2C0B00ED97AA /* PGShadowsTool.m in Sources */,
|
||||
D01779971F21082E0044446D /* PSLMDBTable.m in Sources */,
|
||||
D07BCC061F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.m in Sources */,
|
||||
D04269081F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m in Sources */,
|
||||
D07BCB361F2B65F100ED97AA /* TGBuiltinWallpaperInfo.m in Sources */,
|
||||
D07BC90A1F2A380D00ED97AA /* TGPaintFaceDebugView.m in Sources */,
|
||||
D017785A1F1F961D0044446D /* TGImageMediaAttachment.m in Sources */,
|
||||
@@ -4706,6 +4795,7 @@
|
||||
D01778141F1F961D0044446D /* TGMessageEntityBotCommand.m in Sources */,
|
||||
D01778541F1F961D0044446D /* TGVideoMediaAttachment.m in Sources */,
|
||||
D07BC7A21F2A2B8900ED97AA /* GLProgram.m in Sources */,
|
||||
D042690E1F586B140037ECE8 /* TGVideoMessageControls.m in Sources */,
|
||||
D07BC9A71F2A49E300ED97AA /* TGItemPreviewView.m in Sources */,
|
||||
D07BCB611F2B6A5600ED97AA /* TGEmbedPIPPullArrowView.m in Sources */,
|
||||
D01779341F20FFAC0044446D /* TGMediaAssetsModernLibrary.m in Sources */,
|
||||
@@ -4733,6 +4823,7 @@
|
||||
D0177A291F2144700044446D /* TGPhotoEditorAnimation.m in Sources */,
|
||||
D07BCBB61F2B6F6300ED97AA /* CBCoubLoopCompositionMaker.m in Sources */,
|
||||
D07BC9401F2A3DB900ED97AA /* TGMessageImageViewOverlayView.m in Sources */,
|
||||
D0F7C9C91F55DA83005B255A /* TGVideoCameraGLRenderer.m in Sources */,
|
||||
D07BC80B1F2A2C0B00ED97AA /* PGPhotoEnhanceInterpolationFilter.m in Sources */,
|
||||
D01779651F2103910044446D /* TGPaintUtils.m in Sources */,
|
||||
D01779AB1F210A2C0044446D /* TGMediaAssetImageSignals.m in Sources */,
|
||||
@@ -4751,6 +4842,7 @@
|
||||
D07BC7241F2A29E400ED97AA /* TGPhotoToolsController.m in Sources */,
|
||||
D07BC7AF1F2A2B8900ED97AA /* GPUImageTwoInputFilter.m in Sources */,
|
||||
D07BCA8D1F2B443700ED97AA /* TGMediaAssetsMomentsCollectionLayout.m in Sources */,
|
||||
D0F7C9C51F55DA49005B255A /* TGVideoCameraMovieRecorder.m in Sources */,
|
||||
D07BC8D21F2A37EC00ED97AA /* TGPhotoPaintSparseView.m in Sources */,
|
||||
D07BCA561F2A9E1600ED97AA /* TGDraggableCollectionView.m in Sources */,
|
||||
D07BCA581F2A9E1600ED97AA /* TGDraggableCollectionViewFlowLayout.m in Sources */,
|
||||
|
||||
@@ -33,6 +33,7 @@ FOUNDATION_EXPORT const unsigned char LegacyComponentsVersionString[];
|
||||
#import <LegacyComponents/TGKeyCommandController.h>
|
||||
#import <LegacyComponents/TGWeakDelegate.h>
|
||||
#import <LegacyComponents/TGCache.h>
|
||||
#import <LegacyComponents/TGLiveUploadInterface.h>
|
||||
|
||||
#import <LegacyComponents/JNWSpringAnimation.h>
|
||||
#import <LegacyComponents/POPAnimationEvent.h>
|
||||
@@ -289,3 +290,4 @@ FOUNDATION_EXPORT const unsigned char LegacyComponentsVersionString[];
|
||||
|
||||
#import <LegacyComponents/TGClipboardGalleryMixin.h>
|
||||
#import <LegacyComponents/TGClipboardGalleryPhotoItem.h>
|
||||
#import <LegacyComponents/TGVideoMessageCaptureController.h>
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 593 B |
Binary file not shown.
|
After Width: | Height: | Size: 874 B |
8
LegacyComponents/TGLiveUploadInterface.h
Normal file
8
LegacyComponents/TGLiveUploadInterface.h
Normal file
@@ -0,0 +1,8 @@
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@protocol TGLiveUploadInterface <NSObject>
|
||||
|
||||
- (void)setupWithFileURL:(NSURL *)fileURL;
|
||||
- (id)fileUpdated:(bool)completed;
|
||||
|
||||
@end
|
||||
19
LegacyComponents/TGVideoCameraGLRenderer.h
Normal file
19
LegacyComponents/TGVideoCameraGLRenderer.h
Normal file
@@ -0,0 +1,19 @@
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <CoreVideo/CoreVideo.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
@interface TGVideoCameraGLRenderer : NSObject
|
||||
|
||||
@property (nonatomic, readonly) __attribute__((NSObject)) CMFormatDescriptionRef outputFormatDescription;
|
||||
@property (nonatomic, assign) AVCaptureVideoOrientation orientation;
|
||||
@property (nonatomic, assign) bool mirror;
|
||||
@property (nonatomic, assign) CGFloat opacity;
|
||||
@property (nonatomic, readonly) bool hasPreviousPixelbuffer;
|
||||
|
||||
- (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint;
|
||||
- (void)reset;
|
||||
|
||||
- (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer;
|
||||
- (void)setPreviousPixelBuffer:(CVPixelBufferRef)previousPixelBuffer;
|
||||
|
||||
@end
|
||||
504
LegacyComponents/TGVideoCameraGLRenderer.m
Normal file
504
LegacyComponents/TGVideoCameraGLRenderer.m
Normal file
@@ -0,0 +1,504 @@
|
||||
#import "TGVideoCameraGLRenderer.h"
|
||||
#import <OpenGLES/EAGL.h>
|
||||
#import <OpenGLES/ES2/glext.h>
|
||||
|
||||
#import <LegacyComponents/TGPaintShader.h>
|
||||
|
||||
@interface TGVideoCameraGLRenderer ()
|
||||
{
|
||||
EAGLContext *_context;
|
||||
CVOpenGLESTextureCacheRef _textureCache;
|
||||
CVOpenGLESTextureCacheRef _prevTextureCache;
|
||||
CVOpenGLESTextureCacheRef _renderTextureCache;
|
||||
CVPixelBufferPoolRef _bufferPool;
|
||||
CFDictionaryRef _bufferPoolAuxAttributes;
|
||||
CMFormatDescriptionRef _outputFormatDescription;
|
||||
|
||||
CVPixelBufferRef _previousPixelBuffer;
|
||||
|
||||
TGPaintShader *_shader;
|
||||
GLint _frameUniform;
|
||||
GLint _previousFrameUniform;
|
||||
GLint _opacityUniform;
|
||||
GLint _aspectRatioUniform;
|
||||
GLint _noMirrorUniform;
|
||||
GLuint _offscreenBufferHandle;
|
||||
|
||||
CGFloat _aspectRatio;
|
||||
float _textureVertices[8];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation TGVideoCameraGLRenderer
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
self = [super init];
|
||||
if ( self )
|
||||
{
|
||||
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
|
||||
if (!_context)
|
||||
return nil;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[self deleteBuffers];
|
||||
}
|
||||
|
||||
- (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint
|
||||
{
|
||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(inputFormatDescription);
|
||||
CGFloat minSide = MIN(dimensions.width, dimensions.height);
|
||||
CGFloat maxSide = MAX(dimensions.width, dimensions.height);
|
||||
CGSize outputSize = CGSizeMake(minSide, minSide);
|
||||
|
||||
_aspectRatio = minSide / maxSide;
|
||||
[self updateTextureVertices];
|
||||
|
||||
[self deleteBuffers];
|
||||
[self initializeBuffersWithOutputSize:outputSize retainedBufferCountHint:outputRetainedBufferCountHint];
|
||||
}
|
||||
|
||||
- (void)setOrientation:(AVCaptureVideoOrientation)orientation
|
||||
{
|
||||
_orientation = orientation;
|
||||
[self updateTextureVertices];
|
||||
}
|
||||
|
||||
- (void)setMirror:(bool)mirror
|
||||
{
|
||||
_mirror = mirror;
|
||||
[self updateTextureVertices];
|
||||
}
|
||||
|
||||
- (void)updateTextureVertices
|
||||
{
|
||||
GLfloat centerOffset = (GLfloat)((1.0f - _aspectRatio) / 2.0f);
|
||||
|
||||
switch (_orientation)
|
||||
{
|
||||
case AVCaptureVideoOrientationPortrait:
|
||||
if (!_mirror)
|
||||
{
|
||||
_textureVertices[0] = centerOffset;
|
||||
_textureVertices[1] = 1.0f;
|
||||
_textureVertices[2] = centerOffset;
|
||||
_textureVertices[3] = 0.0f;
|
||||
_textureVertices[4] = (1.0f - centerOffset);
|
||||
_textureVertices[5] = 1.0f;
|
||||
_textureVertices[6] = (1.0f - centerOffset);
|
||||
_textureVertices[7] = 0.0f;
|
||||
}
|
||||
else
|
||||
{
|
||||
_textureVertices[0] = (1.0f - centerOffset);
|
||||
_textureVertices[1] = 0.0f;
|
||||
_textureVertices[2] = (1.0f - centerOffset);
|
||||
_textureVertices[3] = 1.0f;
|
||||
_textureVertices[4] = centerOffset;
|
||||
_textureVertices[5] = 0.0f;
|
||||
_textureVertices[6] = centerOffset;
|
||||
_textureVertices[7] = 1.0f;
|
||||
}
|
||||
break;
|
||||
|
||||
case AVCaptureVideoOrientationLandscapeLeft:
|
||||
if (!_mirror)
|
||||
{
|
||||
_textureVertices[0] = (1.0f - centerOffset);
|
||||
_textureVertices[1] = 1.0f;
|
||||
_textureVertices[2] = centerOffset;
|
||||
_textureVertices[3] = 1.0f;
|
||||
_textureVertices[4] = (1.0f - centerOffset);
|
||||
_textureVertices[5] = 0.0f;
|
||||
_textureVertices[6] = centerOffset;
|
||||
_textureVertices[7] = 0.0f;
|
||||
}
|
||||
else
|
||||
{
|
||||
_textureVertices[0] = centerOffset;
|
||||
_textureVertices[1] = 0.0f;
|
||||
_textureVertices[2] = (1.0f - centerOffset);
|
||||
_textureVertices[3] = 0.0f;
|
||||
_textureVertices[4] = centerOffset;
|
||||
_textureVertices[5] = 1.0f;
|
||||
_textureVertices[6] = (1.0f - centerOffset);
|
||||
_textureVertices[7] = 1.0f;
|
||||
}
|
||||
break;
|
||||
|
||||
case AVCaptureVideoOrientationLandscapeRight:
|
||||
if (!_mirror)
|
||||
{
|
||||
_textureVertices[0] = centerOffset;
|
||||
_textureVertices[1] = 0.0f;
|
||||
_textureVertices[2] = (1.0f - centerOffset);
|
||||
_textureVertices[3] = 0.0f;
|
||||
_textureVertices[4] = centerOffset;
|
||||
_textureVertices[5] = 1.0f;
|
||||
_textureVertices[6] = (1.0f - centerOffset);
|
||||
_textureVertices[7] = 1.0f;
|
||||
}
|
||||
else
|
||||
{
|
||||
_textureVertices[0] = (1.0f - centerOffset);
|
||||
_textureVertices[1] = 1.0f;
|
||||
_textureVertices[2] = centerOffset;
|
||||
_textureVertices[3] = 1.0f;
|
||||
_textureVertices[4] = (1.0f - centerOffset);
|
||||
_textureVertices[5] = 0.0f;
|
||||
_textureVertices[6] = centerOffset;
|
||||
_textureVertices[7] = 0.0f;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)reset
|
||||
{
|
||||
[self deleteBuffers];
|
||||
}
|
||||
|
||||
- (bool)hasPreviousPixelbuffer
|
||||
{
|
||||
return _previousPixelBuffer != NULL;
|
||||
}
|
||||
|
||||
- (void)setPreviousPixelBuffer:(CVPixelBufferRef)previousPixelBuffer
|
||||
{
|
||||
if (_previousPixelBuffer != NULL)
|
||||
{
|
||||
CFRelease(_previousPixelBuffer);
|
||||
_previousPixelBuffer = NULL;
|
||||
}
|
||||
|
||||
_previousPixelBuffer = previousPixelBuffer;
|
||||
if (_previousPixelBuffer != NULL)
|
||||
CFRetain(_previousPixelBuffer);
|
||||
}
|
||||
|
||||
- (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||
{
|
||||
static const GLfloat squareVertices[] =
|
||||
{
|
||||
-1.0f, -1.0f,
|
||||
1.0f, -1.0f,
|
||||
-1.0f, 1.0f,
|
||||
1.0f, 1.0f,
|
||||
};
|
||||
|
||||
if (_offscreenBufferHandle == 0)
|
||||
return NULL;
|
||||
|
||||
if (pixelBuffer == NULL)
|
||||
return NULL;
|
||||
|
||||
const CMVideoDimensions srcDimensions = { (int32_t)CVPixelBufferGetWidth(pixelBuffer), (int32_t)CVPixelBufferGetHeight(pixelBuffer) };
|
||||
const CMVideoDimensions dstDimensions = CMVideoFormatDescriptionGetDimensions(_outputFormatDescription);
|
||||
|
||||
EAGLContext *oldContext = [EAGLContext currentContext];
|
||||
if (oldContext != _context)
|
||||
{
|
||||
if (![EAGLContext setCurrentContext:_context])
|
||||
return NULL;
|
||||
}
|
||||
|
||||
CVReturn err = noErr;
|
||||
CVOpenGLESTextureRef srcTexture = NULL;
|
||||
CVOpenGLESTextureRef prevTexture = NULL;
|
||||
CVOpenGLESTextureRef dstTexture = NULL;
|
||||
CVPixelBufferRef dstPixelBuffer = NULL;
|
||||
|
||||
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, srcDimensions.width, srcDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &srcTexture);
|
||||
|
||||
if (!srcTexture || err)
|
||||
goto bail;
|
||||
|
||||
bool hasPreviousTexture = false;
|
||||
if (_previousPixelBuffer != NULL)
|
||||
{
|
||||
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _prevTextureCache, _previousPixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, srcDimensions.width, srcDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &prevTexture);
|
||||
|
||||
if (!prevTexture || err)
|
||||
goto bail;
|
||||
|
||||
hasPreviousTexture = true;
|
||||
}
|
||||
|
||||
err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer);
|
||||
if (err == kCVReturnWouldExceedAllocationThreshold)
|
||||
{
|
||||
CVOpenGLESTextureCacheFlush(_renderTextureCache, 0);
|
||||
err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer);
|
||||
}
|
||||
|
||||
if (err)
|
||||
goto bail;
|
||||
|
||||
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _renderTextureCache, dstPixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, dstDimensions.width, dstDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &dstTexture);
|
||||
|
||||
if (!dstTexture || err)
|
||||
goto bail;
|
||||
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, _offscreenBufferHandle);
|
||||
glViewport(0, 0, dstDimensions.width, dstDimensions.height);
|
||||
glUseProgram(_shader.program);
|
||||
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(dstTexture), CVOpenGLESTextureGetName(dstTexture));
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, CVOpenGLESTextureGetTarget(dstTexture), CVOpenGLESTextureGetName(dstTexture), 0);
|
||||
|
||||
glActiveTexture(GL_TEXTURE1);
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(srcTexture), CVOpenGLESTextureGetName(srcTexture));
|
||||
glUniform1i(_frameUniform, 1);
|
||||
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
|
||||
if (hasPreviousTexture)
|
||||
{
|
||||
glActiveTexture(GL_TEXTURE2);
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(prevTexture), CVOpenGLESTextureGetName(prevTexture));
|
||||
glUniform1i(_previousFrameUniform, 2);
|
||||
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
|
||||
glVertexAttribPointer(0, 2, GL_FLOAT, 0, 0, squareVertices);
|
||||
glEnableVertexAttribArray(0);
|
||||
glVertexAttribPointer(1, 2, GL_FLOAT, 0, 0, _textureVertices);
|
||||
glEnableVertexAttribArray(1);
|
||||
|
||||
glUniform1f(_opacityUniform, (GLfloat)_opacity);
|
||||
glUniform1f(_aspectRatioUniform, (GLfloat)(1.0f / _aspectRatio));
|
||||
glUniform1f(_noMirrorUniform, (GLfloat)(_mirror ? 1 : -1));
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(srcTexture), 0);
|
||||
if (hasPreviousTexture)
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(prevTexture), 0);
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(dstTexture), 0);
|
||||
|
||||
glFlush();
|
||||
|
||||
bail:
|
||||
if (oldContext != _context)
|
||||
[EAGLContext setCurrentContext:oldContext];
|
||||
|
||||
if (srcTexture)
|
||||
CFRelease(srcTexture);
|
||||
|
||||
if (prevTexture)
|
||||
CFRetain(prevTexture);
|
||||
|
||||
if (dstTexture)
|
||||
CFRelease(dstTexture);
|
||||
|
||||
return dstPixelBuffer;
|
||||
}
|
||||
|
||||
- (CMFormatDescriptionRef)outputFormatDescription
|
||||
{
|
||||
return _outputFormatDescription;
|
||||
}
|
||||
|
||||
- (bool)initializeBuffersWithOutputSize:(CGSize)outputSize retainedBufferCountHint:(size_t)clientRetainedBufferCountHint
|
||||
{
|
||||
bool success = true;
|
||||
|
||||
EAGLContext *oldContext = [EAGLContext currentContext];
|
||||
if (oldContext != _context)
|
||||
{
|
||||
if (![EAGLContext setCurrentContext:_context])
|
||||
return false;
|
||||
}
|
||||
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
|
||||
glGenFramebuffers(1, &_offscreenBufferHandle);
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, _offscreenBufferHandle);
|
||||
|
||||
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_textureCache);
|
||||
if (err)
|
||||
{
|
||||
success = false;
|
||||
goto bail;
|
||||
}
|
||||
|
||||
err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_prevTextureCache);
|
||||
if (err)
|
||||
{
|
||||
success = false;
|
||||
goto bail;
|
||||
}
|
||||
|
||||
err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_renderTextureCache);
|
||||
if (err)
|
||||
{
|
||||
success = false;
|
||||
goto bail;
|
||||
}
|
||||
|
||||
_shader = [[TGPaintShader alloc] initWithVertexShader:@"VideoMessage" fragmentShader:@"VideoMessage" attributes:@[ @"inPosition", @"inTexcoord" ] uniforms:@[ @"texture", @"previousTexture", @"opacity", @"aspectRatio", @"noMirror" ]];
|
||||
|
||||
_frameUniform = [_shader uniformForKey:@"texture"];
|
||||
_previousFrameUniform = [_shader uniformForKey:@"previousTexture"];
|
||||
_opacityUniform = [_shader uniformForKey:@"opacity"];
|
||||
_aspectRatioUniform = [_shader uniformForKey:@"aspectRatio"];
|
||||
_noMirrorUniform = [_shader uniformForKey:@"noMirror"];
|
||||
|
||||
size_t maxRetainedBufferCount = clientRetainedBufferCountHint + 1;
|
||||
_bufferPool = [TGVideoCameraGLRenderer createPixelBufferPoolWithWidth:(int32_t)outputSize.width height:(int32_t)outputSize.height pixelFormat:kCVPixelFormatType_32BGRA maxBufferCount:(int32_t)maxRetainedBufferCount];
|
||||
|
||||
if (!_bufferPool)
|
||||
{
|
||||
success = NO;
|
||||
goto bail;
|
||||
}
|
||||
|
||||
_bufferPoolAuxAttributes = [TGVideoCameraGLRenderer createPixelBufferPoolAuxAttribute:(int32_t)maxRetainedBufferCount];
|
||||
[TGVideoCameraGLRenderer preallocatePixelBuffersInPool:_bufferPool auxAttributes:_bufferPoolAuxAttributes];
|
||||
|
||||
CMFormatDescriptionRef outputFormatDescription = NULL;
|
||||
CVPixelBufferRef testPixelBuffer = NULL;
|
||||
CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &testPixelBuffer);
|
||||
if (!testPixelBuffer)
|
||||
{
|
||||
success = false;
|
||||
goto bail;
|
||||
}
|
||||
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, testPixelBuffer, &outputFormatDescription);
|
||||
_outputFormatDescription = outputFormatDescription;
|
||||
CFRelease( testPixelBuffer );
|
||||
|
||||
bail:
|
||||
if (!success)
|
||||
[self deleteBuffers];
|
||||
|
||||
if (oldContext != _context)
|
||||
[EAGLContext setCurrentContext:oldContext];
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
- (void)deleteBuffers
|
||||
{
|
||||
EAGLContext *oldContext = [EAGLContext currentContext];
|
||||
if (oldContext != _context)
|
||||
{
|
||||
if (![EAGLContext setCurrentContext:_context])
|
||||
return;
|
||||
}
|
||||
|
||||
if (_offscreenBufferHandle)
|
||||
{
|
||||
glDeleteFramebuffers(1, &_offscreenBufferHandle);
|
||||
_offscreenBufferHandle = 0;
|
||||
}
|
||||
|
||||
if (_shader)
|
||||
{
|
||||
[_shader cleanResources];
|
||||
_shader = nil;
|
||||
}
|
||||
|
||||
if (_textureCache)
|
||||
{
|
||||
CFRelease(_textureCache);
|
||||
_textureCache = 0;
|
||||
}
|
||||
|
||||
if (_prevTextureCache)
|
||||
{
|
||||
CFRelease(_prevTextureCache);
|
||||
_prevTextureCache = 0;
|
||||
}
|
||||
|
||||
if (_renderTextureCache)
|
||||
{
|
||||
CFRelease(_renderTextureCache);
|
||||
_renderTextureCache = 0;
|
||||
}
|
||||
|
||||
if (_bufferPool)
|
||||
{
|
||||
CFRelease(_bufferPool);
|
||||
_bufferPool = NULL;
|
||||
}
|
||||
|
||||
if (_bufferPoolAuxAttributes)
|
||||
{
|
||||
CFRelease(_bufferPoolAuxAttributes);
|
||||
_bufferPoolAuxAttributes = NULL;
|
||||
}
|
||||
|
||||
if (_outputFormatDescription)
|
||||
{
|
||||
CFRelease(_outputFormatDescription);
|
||||
_outputFormatDescription = NULL;
|
||||
}
|
||||
|
||||
if (oldContext != _context)
|
||||
[EAGLContext setCurrentContext:oldContext];
|
||||
}
|
||||
|
||||
+ (CVPixelBufferPoolRef)createPixelBufferPoolWithWidth:(int32_t)width height:(int32_t)height pixelFormat:(FourCharCode)pixelFormat maxBufferCount:(int32_t) maxBufferCount
|
||||
{
|
||||
CVPixelBufferPoolRef outputPool = NULL;
|
||||
|
||||
NSDictionary *sourcePixelBufferOptions = @
|
||||
{
|
||||
(id)kCVPixelBufferPixelFormatTypeKey : @(pixelFormat),
|
||||
(id)kCVPixelBufferWidthKey : @(width),
|
||||
(id)kCVPixelBufferHeightKey : @(height),
|
||||
(id)kCVPixelFormatOpenGLESCompatibility : @true,
|
||||
(id)kCVPixelBufferIOSurfacePropertiesKey : @{ }
|
||||
};
|
||||
|
||||
NSDictionary *pixelBufferPoolOptions = @{ (id)kCVPixelBufferPoolMinimumBufferCountKey : @(maxBufferCount) };
|
||||
CVPixelBufferPoolCreate(kCFAllocatorDefault, (__bridge CFDictionaryRef)pixelBufferPoolOptions, (__bridge CFDictionaryRef)sourcePixelBufferOptions, &outputPool);
|
||||
|
||||
return outputPool;
|
||||
}
|
||||
|
||||
+ (CFDictionaryRef)createPixelBufferPoolAuxAttribute:(int32_t)maxBufferCount
|
||||
{
|
||||
return CFBridgingRetain( @{ (id)kCVPixelBufferPoolAllocationThresholdKey : @(maxBufferCount) } );
|
||||
}
|
||||
|
||||
+ (void)preallocatePixelBuffersInPool:(CVPixelBufferPoolRef)pool auxAttributes:(CFDictionaryRef)auxAttributes
|
||||
{
|
||||
NSMutableArray *pixelBuffers = [[NSMutableArray alloc] init];
|
||||
|
||||
while (true)
|
||||
{
|
||||
CVPixelBufferRef pixelBuffer = NULL;
|
||||
OSStatus err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer);
|
||||
|
||||
if (err == kCVReturnWouldExceedAllocationThreshold)
|
||||
break;
|
||||
|
||||
[pixelBuffers addObject:CFBridgingRelease(pixelBuffer)];
|
||||
}
|
||||
|
||||
[pixelBuffers removeAllObjects];
|
||||
}
|
||||
|
||||
@end
|
||||
10
LegacyComponents/TGVideoCameraGLView.h
Normal file
10
LegacyComponents/TGVideoCameraGLView.h
Normal file
@@ -0,0 +1,10 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
#import <CoreVideo/CoreVideo.h>
|
||||
|
||||
@interface TGVideoCameraGLView : UIView
|
||||
|
||||
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer;
|
||||
- (void)flushPixelBufferCache;
|
||||
- (void)reset;
|
||||
|
||||
@end
|
||||
231
LegacyComponents/TGVideoCameraGLView.m
Normal file
231
LegacyComponents/TGVideoCameraGLView.m
Normal file
@@ -0,0 +1,231 @@
|
||||
#import "TGVideoCameraGLView.h"
|
||||
#import <OpenGLES/EAGL.h>
|
||||
#import <OpenGLES/ES2/glext.h>
|
||||
#import <QuartzCore/CAEAGLLayer.h>
|
||||
|
||||
#import <LegacyComponents/TGPaintShader.h>
|
||||
|
||||
#import "LegacyComponentsInternal.h"
|
||||
|
||||
@interface TGVideoCameraGLView ()
|
||||
{
|
||||
EAGLContext *_context;
|
||||
CVOpenGLESTextureCacheRef _textureCache;
|
||||
GLint _width;
|
||||
GLint _height;
|
||||
GLuint _framebuffer;
|
||||
GLuint _colorbuffer;
|
||||
|
||||
TGPaintShader *_shader;
|
||||
GLint _frame;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation TGVideoCameraGLView
|
||||
|
||||
+ (Class)layerClass
|
||||
{
|
||||
return [CAEAGLLayer class];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
{
|
||||
self = [super initWithFrame:frame];
|
||||
if (self != nil)
|
||||
{
|
||||
if (iosMajorVersion() >= 8)
|
||||
self.contentScaleFactor = [UIScreen mainScreen].nativeScale;
|
||||
else
|
||||
self.contentScaleFactor = [UIScreen mainScreen].scale;
|
||||
|
||||
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
|
||||
eaglLayer.opaque = true;
|
||||
eaglLayer.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking : @false, kEAGLDrawablePropertyColorFormat : kEAGLColorFormatRGBA8 };
|
||||
|
||||
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
|
||||
if (!_context)
|
||||
return nil;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (bool)initializeBuffers
|
||||
{
|
||||
bool success = YES;
|
||||
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
|
||||
glGenFramebuffers(1, &_framebuffer);
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
|
||||
|
||||
glGenRenderbuffers(1, &_colorbuffer );
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, _colorbuffer);
|
||||
|
||||
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
|
||||
|
||||
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_width);
|
||||
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_height);
|
||||
|
||||
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorbuffer);
|
||||
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
|
||||
{
|
||||
success = false;
|
||||
goto bail;
|
||||
}
|
||||
|
||||
|
||||
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_textureCache);
|
||||
if (err)
|
||||
{
|
||||
success = false;
|
||||
goto bail;
|
||||
}
|
||||
|
||||
_shader = [[TGPaintShader alloc] initWithVertexShader:@"Passthrough" fragmentShader:@"Passthrough" attributes:@[ @"inPosition", @"inTexcoord" ] uniforms:@[ @"texture" ]];
|
||||
|
||||
_frame = [_shader uniformForKey:@"texture"];
|
||||
|
||||
bail:
|
||||
if ( ! success ) {
|
||||
[self reset];
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
- (void)reset
|
||||
{
|
||||
EAGLContext *oldContext = [EAGLContext currentContext];
|
||||
if (oldContext != _context)
|
||||
{
|
||||
if (![EAGLContext setCurrentContext:_context])
|
||||
return;
|
||||
}
|
||||
|
||||
if (_framebuffer)
|
||||
{
|
||||
glDeleteFramebuffers(1, &_framebuffer);
|
||||
_framebuffer = 0;
|
||||
}
|
||||
|
||||
if (_colorbuffer)
|
||||
{
|
||||
glDeleteRenderbuffers(1, &_colorbuffer);
|
||||
_colorbuffer = 0;
|
||||
}
|
||||
|
||||
if (_shader != nil)
|
||||
{
|
||||
[_shader cleanResources];
|
||||
_shader = nil;
|
||||
}
|
||||
|
||||
if (_textureCache)
|
||||
{
|
||||
CFRelease(_textureCache);
|
||||
_textureCache = 0;
|
||||
}
|
||||
|
||||
if (oldContext != _context)
|
||||
[EAGLContext setCurrentContext:oldContext];
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[self reset];
|
||||
}
|
||||
|
||||
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer
|
||||
{
|
||||
static const GLfloat squareVertices[] =
|
||||
{
|
||||
-1.0f, -1.0f, // bottom left
|
||||
1.0f, -1.0f, // bottom right
|
||||
-1.0f, 1.0f, // top left
|
||||
1.0f, 1.0f, // top right
|
||||
};
|
||||
|
||||
if (pixelBuffer == NULL)
|
||||
return;
|
||||
|
||||
EAGLContext *oldContext = [EAGLContext currentContext];
|
||||
if (oldContext != _context)
|
||||
{
|
||||
if (![EAGLContext setCurrentContext:_context])
|
||||
return;
|
||||
}
|
||||
|
||||
if (_framebuffer == 0)
|
||||
{
|
||||
bool success = [self initializeBuffers];
|
||||
if (!success)
|
||||
return;
|
||||
}
|
||||
|
||||
size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer);
|
||||
size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer);
|
||||
CVOpenGLESTextureRef texture = NULL;
|
||||
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, (GLsizei)frameWidth, (GLsizei)frameHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
|
||||
|
||||
if (!texture || err)
|
||||
return;
|
||||
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
|
||||
glViewport(0, 0, _width, _height);
|
||||
|
||||
glUseProgram(_shader.program);
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(texture), CVOpenGLESTextureGetName(texture));
|
||||
glUniform1i(_frame, 0);
|
||||
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
|
||||
glVertexAttribPointer(0, 2, GL_FLOAT, 0, 0, squareVertices);
|
||||
glEnableVertexAttribArray(0);
|
||||
|
||||
CGSize textureSamplingSize;
|
||||
CGSize cropScaleAmount = CGSizeMake(self.bounds.size.width / (CGFloat)frameWidth, self.bounds.size.height / (CGFloat)frameHeight);
|
||||
if (cropScaleAmount.height > cropScaleAmount.width)
|
||||
{
|
||||
textureSamplingSize.width = self.bounds.size.width / ( frameWidth * cropScaleAmount.height );
|
||||
textureSamplingSize.height = 1.0;
|
||||
}
|
||||
else
|
||||
{
|
||||
textureSamplingSize.width = 1.0;
|
||||
textureSamplingSize.height = self.bounds.size.height / ( frameHeight * cropScaleAmount.width );
|
||||
}
|
||||
|
||||
GLfloat passThroughTextureVertices[] =
|
||||
{
|
||||
(GLfloat)((1.0 - textureSamplingSize.width) / 2.0), (GLfloat)((1.0 + textureSamplingSize.height) / 2.0), // top left
|
||||
(GLfloat)((1.0 + textureSamplingSize.width) / 2.0), (GLfloat)((1.0 + textureSamplingSize.height) / 2.0), // top right
|
||||
(GLfloat)((1.0 - textureSamplingSize.width) / 2.0), (GLfloat)((1.0 - textureSamplingSize.height) / 2.0), // bottom left
|
||||
(GLfloat)((1.0 + textureSamplingSize.width) / 2.0), (GLfloat)((1.0 - textureSamplingSize.height) / 2.0), // bottom right
|
||||
};
|
||||
|
||||
glVertexAttribPointer(1, 2, GL_FLOAT, 0, 0, passThroughTextureVertices );
|
||||
glEnableVertexAttribArray(1);
|
||||
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, _colorbuffer);
|
||||
[_context presentRenderbuffer:GL_RENDERBUFFER];
|
||||
|
||||
glBindTexture(CVOpenGLESTextureGetTarget(texture), 0);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
CFRelease(texture);
|
||||
|
||||
if (oldContext != _context)
|
||||
[EAGLContext setCurrentContext:oldContext];
|
||||
}
|
||||
|
||||
- (void)flushPixelBufferCache
|
||||
{
|
||||
if (_textureCache)
|
||||
CVOpenGLESTextureCacheFlush(_textureCache, 0);
|
||||
}
|
||||
|
||||
@end
|
||||
32
LegacyComponents/TGVideoCameraMovieRecorder.h
Normal file
32
LegacyComponents/TGVideoCameraMovieRecorder.h
Normal file
@@ -0,0 +1,32 @@
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
|
||||
@protocol TGVideoCameraMovieRecorderDelegate;
|
||||
|
||||
@interface TGVideoCameraMovieRecorder : NSObject
|
||||
|
||||
@property (nonatomic, assign) bool paused;
|
||||
|
||||
- (instancetype)initWithURL:(NSURL *)URL delegate:(id<TGVideoCameraMovieRecorderDelegate>)delegate callbackQueue:(dispatch_queue_t)queue;
|
||||
|
||||
- (void)addVideoTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings;
|
||||
- (void)addAudioTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription settings:(NSDictionary *)audioSettings;
|
||||
|
||||
|
||||
- (void)prepareToRecord;
|
||||
|
||||
- (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime;
|
||||
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
|
||||
|
||||
- (void)finishRecording;
|
||||
|
||||
- (NSTimeInterval)videoDuration;
|
||||
|
||||
@end
|
||||
|
||||
@protocol TGVideoCameraMovieRecorderDelegate <NSObject>
|
||||
@required
|
||||
- (void)movieRecorderDidFinishPreparing:(TGVideoCameraMovieRecorder *)recorder;
|
||||
- (void)movieRecorder:(TGVideoCameraMovieRecorder *)recorder didFailWithError:(NSError *)error;
|
||||
- (void)movieRecorderDidFinishRecording:(TGVideoCameraMovieRecorder *)recorder;
|
||||
@end
|
||||
468
LegacyComponents/TGVideoCameraMovieRecorder.m
Normal file
468
LegacyComponents/TGVideoCameraMovieRecorder.m
Normal file
@@ -0,0 +1,468 @@
|
||||
#import "TGVideoCameraMovieRecorder.h"
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
typedef enum {
|
||||
TGMovieRecorderStatusIdle = 0,
|
||||
TGMovieRecorderStatusPreparingToRecord,
|
||||
TGMovieRecorderStatusRecording,
|
||||
TGMovieRecorderStatusFinishingWaiting,
|
||||
TGMovieRecorderStatusFinishingCommiting,
|
||||
TGMovieRecorderStatusFinished,
|
||||
TGMovieRecorderStatusFailed
|
||||
} TGMovieRecorderStatus;
|
||||
|
||||
|
||||
@interface TGVideoCameraMovieRecorder ()
|
||||
{
|
||||
TGMovieRecorderStatus _status;
|
||||
|
||||
dispatch_queue_t _writingQueue;
|
||||
|
||||
NSURL *_url;
|
||||
|
||||
AVAssetWriter *_assetWriter;
|
||||
bool _haveStartedSession;
|
||||
|
||||
CMFormatDescriptionRef _audioTrackSourceFormatDescription;
|
||||
NSDictionary *_audioTrackSettings;
|
||||
AVAssetWriterInput *_audioInput;
|
||||
|
||||
CMFormatDescriptionRef _videoTrackSourceFormatDescription;
|
||||
CGAffineTransform _videoTrackTransform;
|
||||
NSDictionary *_videoTrackSettings;
|
||||
AVAssetWriterInput *_videoInput;
|
||||
|
||||
__weak id<TGVideoCameraMovieRecorderDelegate> _delegate;
|
||||
dispatch_queue_t _delegateCallbackQueue;
|
||||
|
||||
CMTime _startTimeStamp;
|
||||
CMTime _lastAudioTimeStamp;
|
||||
|
||||
CMTime _timeOffset;
|
||||
|
||||
bool _wasPaused;
|
||||
}
|
||||
@end
|
||||
|
||||
|
||||
@implementation TGVideoCameraMovieRecorder
|
||||
|
||||
- (instancetype)initWithURL:(NSURL *)URL delegate:(id<TGVideoCameraMovieRecorderDelegate>)delegate callbackQueue:(dispatch_queue_t)queue
|
||||
{
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
_writingQueue = dispatch_queue_create("org.telegram.movierecorder.writing", DISPATCH_QUEUE_SERIAL);
|
||||
_videoTrackTransform = CGAffineTransformIdentity;
|
||||
_url = URL;
|
||||
_delegate = delegate;
|
||||
_delegateCallbackQueue = queue;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)addVideoTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings
|
||||
{
|
||||
if (formatDescription == NULL)
|
||||
return;
|
||||
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_status != TGMovieRecorderStatusIdle)
|
||||
return;
|
||||
|
||||
if (_videoTrackSourceFormatDescription)
|
||||
return;
|
||||
|
||||
_videoTrackSourceFormatDescription = (CMFormatDescriptionRef)CFRetain(formatDescription);
|
||||
_videoTrackTransform = transform;
|
||||
_videoTrackSettings = [videoSettings copy];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)addAudioTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription settings:(NSDictionary *)audioSettings
|
||||
{
|
||||
if (formatDescription == NULL)
|
||||
return;
|
||||
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_status != TGMovieRecorderStatusIdle)
|
||||
return;
|
||||
|
||||
if (_audioTrackSourceFormatDescription)
|
||||
return;
|
||||
|
||||
_audioTrackSourceFormatDescription = (CMFormatDescriptionRef)CFRetain(formatDescription);
|
||||
_audioTrackSettings = [audioSettings copy];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)prepareToRecord
|
||||
{
|
||||
@synchronized( self )
|
||||
{
|
||||
if (_status != TGMovieRecorderStatusIdle)
|
||||
return;
|
||||
|
||||
[self transitionToStatus:TGMovieRecorderStatusPreparingToRecord error:nil];
|
||||
}
|
||||
|
||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^
|
||||
{
|
||||
@autoreleasepool
|
||||
{
|
||||
NSError *error = nil;
|
||||
|
||||
[[NSFileManager defaultManager] removeItemAtURL:_url error:NULL];
|
||||
|
||||
_assetWriter = [[AVAssetWriter alloc] initWithURL:_url fileType:AVFileTypeMPEG4 error:&error];
|
||||
|
||||
bool succeed = false;
|
||||
if (error == nil && _videoTrackSourceFormatDescription)
|
||||
{
|
||||
succeed = [self setupAssetWriterVideoInputWithSourceFormatDescription:_videoTrackSourceFormatDescription transform:_videoTrackTransform settings:_videoTrackSettings];
|
||||
}
|
||||
|
||||
if (error == nil && succeed && _audioTrackSourceFormatDescription)
|
||||
{
|
||||
succeed = [self setupAssetWriterAudioInputWithSourceFormatDescription:_audioTrackSourceFormatDescription settings:_audioTrackSettings];
|
||||
}
|
||||
|
||||
if (error == nil && succeed)
|
||||
{
|
||||
if (![_assetWriter startWriting])
|
||||
error = _assetWriter.error;
|
||||
}
|
||||
|
||||
@synchronized (self)
|
||||
{
|
||||
if (error || !succeed)
|
||||
[self transitionToStatus:TGMovieRecorderStatusFailed error:error];
|
||||
else
|
||||
[self transitionToStatus:TGMovieRecorderStatusRecording error:nil];
|
||||
}
|
||||
}
|
||||
} );
|
||||
}
|
||||
|
||||
- (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime
|
||||
{
|
||||
CMSampleBufferRef sampleBuffer = NULL;
|
||||
|
||||
CMSampleTimingInfo timingInfo;
|
||||
timingInfo.duration = kCMTimeInvalid;
|
||||
timingInfo.decodeTimeStamp = kCMTimeInvalid;
|
||||
timingInfo.presentationTimeStamp = presentationTime;
|
||||
|
||||
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, _videoTrackSourceFormatDescription, &timingInfo, &sampleBuffer);
|
||||
|
||||
if (sampleBuffer)
|
||||
{
|
||||
[self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeVideo];
|
||||
CFRelease(sampleBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
{
|
||||
[self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeAudio];
|
||||
}
|
||||
|
||||
- (void)finishRecording
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
bool shouldFinishRecording = false;
|
||||
switch (_status)
|
||||
{
|
||||
case TGMovieRecorderStatusIdle:
|
||||
case TGMovieRecorderStatusPreparingToRecord:
|
||||
case TGMovieRecorderStatusFinishingWaiting:
|
||||
case TGMovieRecorderStatusFinishingCommiting:
|
||||
case TGMovieRecorderStatusFinished:
|
||||
case TGMovieRecorderStatusFailed:
|
||||
break;
|
||||
|
||||
case TGMovieRecorderStatusRecording:
|
||||
shouldFinishRecording = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (shouldFinishRecording)
|
||||
[self transitionToStatus:TGMovieRecorderStatusFinishingWaiting error:nil];
|
||||
else
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch_async(_writingQueue, ^
|
||||
{
|
||||
@autoreleasepool
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_status != TGMovieRecorderStatusFinishingWaiting)
|
||||
return;
|
||||
|
||||
[self transitionToStatus:TGMovieRecorderStatusFinishingCommiting error:nil];
|
||||
}
|
||||
|
||||
[_assetWriter finishWritingWithCompletionHandler:^
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
NSError *error = _assetWriter.error;
|
||||
if (error)
|
||||
[self transitionToStatus:TGMovieRecorderStatusFailed error:error];
|
||||
else
|
||||
[self transitionToStatus:TGMovieRecorderStatusFinished error:nil];
|
||||
}
|
||||
}];
|
||||
}
|
||||
} );
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
if (_audioTrackSourceFormatDescription)
|
||||
CFRelease(_audioTrackSourceFormatDescription);
|
||||
|
||||
if (_videoTrackSourceFormatDescription)
|
||||
CFRelease(_videoTrackSourceFormatDescription);
|
||||
}
|
||||
|
||||
- (void)setPaused:(bool)paused
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
_paused = paused;
|
||||
if (_paused)
|
||||
_wasPaused = true;
|
||||
}
|
||||
}
|
||||
|
||||
- (CMSampleBufferRef)adjustTimeOfSample:(CMSampleBufferRef)sample byOffset:(CMTime)offset
|
||||
{
|
||||
CMItemCount count;
|
||||
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
|
||||
CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
|
||||
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
|
||||
for (CMItemCount i = 0; i < count; i++)
|
||||
{
|
||||
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
|
||||
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
|
||||
}
|
||||
CMSampleBufferRef sout;
|
||||
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
|
||||
free(pInfo);
|
||||
return sout;
|
||||
}
|
||||
|
||||
- (void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer ofMediaType:(NSString *)mediaType
|
||||
{
|
||||
if (sampleBuffer == NULL)
|
||||
return;
|
||||
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_status < TGMovieRecorderStatusRecording || (mediaType == AVMediaTypeAudio && !_haveStartedSession))
|
||||
return;
|
||||
}
|
||||
|
||||
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
||||
|
||||
CFRetain(sampleBuffer);
|
||||
dispatch_async(_writingQueue, ^
|
||||
{
|
||||
CMSampleBufferRef buffer = sampleBuffer;
|
||||
|
||||
@autoreleasepool
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_status > TGMovieRecorderStatusFinishingWaiting)
|
||||
{
|
||||
CFRelease(sampleBuffer);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!_haveStartedSession)
|
||||
{
|
||||
[_assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
|
||||
_haveStartedSession = true;
|
||||
|
||||
_startTimeStamp = timestamp;
|
||||
}
|
||||
|
||||
AVAssetWriterInput *input = (mediaType == AVMediaTypeVideo) ? _videoInput : _audioInput;
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_wasPaused)
|
||||
{
|
||||
if (input == _videoInput)
|
||||
return;
|
||||
|
||||
_wasPaused = false;
|
||||
|
||||
CMTime pts = CMSampleBufferGetPresentationTimeStamp(buffer);
|
||||
CMTime last = _lastAudioTimeStamp;
|
||||
if (last.flags & kCMTimeFlags_Valid)
|
||||
{
|
||||
CMTime offset = CMTimeSubtract(pts, last);
|
||||
if (_timeOffset.value == 0)
|
||||
_timeOffset = offset;
|
||||
else
|
||||
_timeOffset = CMTimeAdd(_timeOffset, offset);
|
||||
}
|
||||
_lastAudioTimeStamp.flags = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (_timeOffset.value > 0 && input == _videoInput)
|
||||
{
|
||||
buffer = [self adjustTimeOfSample:buffer byOffset:_timeOffset];
|
||||
CFRelease(sampleBuffer);
|
||||
}
|
||||
|
||||
CMTime pts = CMSampleBufferGetPresentationTimeStamp(buffer);
|
||||
CMTime duration = CMSampleBufferGetDuration(buffer);
|
||||
if (duration.value > 0)
|
||||
pts = CMTimeAdd(pts, duration);
|
||||
|
||||
if (input == _audioInput)
|
||||
_lastAudioTimeStamp = pts;
|
||||
|
||||
if (input.readyForMoreMediaData)
|
||||
{
|
||||
if (![input appendSampleBuffer:buffer])
|
||||
{
|
||||
NSError *error = _assetWriter.error;
|
||||
@synchronized (self)
|
||||
{
|
||||
[self transitionToStatus:TGMovieRecorderStatusFailed error:error];
|
||||
}
|
||||
}
|
||||
}
|
||||
CFRelease(buffer);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)transitionToStatus:(TGMovieRecorderStatus)newStatus error:(NSError *)error
|
||||
{
|
||||
bool shouldNotifyDelegate = false;
|
||||
|
||||
if (newStatus != _status)
|
||||
{
|
||||
if ((newStatus == TGMovieRecorderStatusFinished) || (newStatus == TGMovieRecorderStatusFailed))
|
||||
{
|
||||
shouldNotifyDelegate = true;
|
||||
|
||||
dispatch_async(_writingQueue, ^
|
||||
{
|
||||
[self teardownAssetWriterAndInputs];
|
||||
if (newStatus == TGMovieRecorderStatusFailed)
|
||||
{
|
||||
[[NSFileManager defaultManager] removeItemAtURL:_url error:NULL];
|
||||
}
|
||||
});
|
||||
}
|
||||
else if (newStatus == TGMovieRecorderStatusRecording)
|
||||
{
|
||||
shouldNotifyDelegate = true;
|
||||
}
|
||||
|
||||
_status = newStatus;
|
||||
}
|
||||
|
||||
if (shouldNotifyDelegate)
|
||||
{
|
||||
dispatch_async(_delegateCallbackQueue, ^
|
||||
{
|
||||
@autoreleasepool
|
||||
{
|
||||
switch ( newStatus )
|
||||
{
|
||||
case TGMovieRecorderStatusRecording:
|
||||
[_delegate movieRecorderDidFinishPreparing:self];
|
||||
break;
|
||||
|
||||
case TGMovieRecorderStatusFinished:
|
||||
[_delegate movieRecorderDidFinishRecording:self];
|
||||
break;
|
||||
|
||||
case TGMovieRecorderStatusFailed:
|
||||
[_delegate movieRecorder:self didFailWithError:error];
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
- (bool)setupAssetWriterAudioInputWithSourceFormatDescription:(CMFormatDescriptionRef)audioFormatDescription settings:(NSDictionary *)audioSettings
|
||||
{
|
||||
if ([_assetWriter canApplyOutputSettings:audioSettings forMediaType:AVMediaTypeAudio])
|
||||
{
|
||||
_audioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings sourceFormatHint:audioFormatDescription];
|
||||
_audioInput.expectsMediaDataInRealTime = true;
|
||||
|
||||
if ([_assetWriter canAddInput:_audioInput])
|
||||
{
|
||||
[_assetWriter addInput:_audioInput];
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
- (bool)setupAssetWriterVideoInputWithSourceFormatDescription:(CMFormatDescriptionRef)videoFormatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings
|
||||
{
|
||||
if ([_assetWriter canApplyOutputSettings:videoSettings forMediaType:AVMediaTypeVideo])
|
||||
{
|
||||
_videoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings sourceFormatHint:videoFormatDescription];
|
||||
_videoInput.expectsMediaDataInRealTime = true;
|
||||
_videoInput.transform = transform;
|
||||
|
||||
if ([_assetWriter canAddInput:_videoInput])
|
||||
{
|
||||
[_assetWriter addInput:_videoInput];
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
- (void)teardownAssetWriterAndInputs
|
||||
{
|
||||
_videoInput = nil;
|
||||
_audioInput = nil;
|
||||
_assetWriter = nil;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)videoDuration
|
||||
{
|
||||
return CMTimeGetSeconds(CMTimeSubtract(_lastAudioTimeStamp, _startTimeStamp));
|
||||
}
|
||||
|
||||
@end
|
||||
47
LegacyComponents/TGVideoCameraPipeline.h
Normal file
47
LegacyComponents/TGVideoCameraPipeline.h
Normal file
@@ -0,0 +1,47 @@
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <LegacyComponents/TGVideoEditAdjustments.h>
|
||||
#import <LegacyComponents/TGLiveUploadInterface.h>
|
||||
|
||||
@protocol TGVideoCameraPipelineDelegate;
|
||||
|
||||
|
||||
@interface TGVideoCameraPipeline : NSObject
|
||||
|
||||
@property (nonatomic, assign) AVCaptureVideoOrientation orientation;
|
||||
@property (nonatomic, assign) bool renderingEnabled;
|
||||
@property (nonatomic, readonly) NSTimeInterval videoDuration;
|
||||
@property (nonatomic, readonly) CGAffineTransform videoTransform;
|
||||
@property (nonatomic, readonly) bool isRecording;
|
||||
|
||||
@property (nonatomic, copy) void (^micLevel)(CGFloat);
|
||||
|
||||
- (instancetype)initWithDelegate:(id<TGVideoCameraPipelineDelegate>)delegate position:(AVCaptureDevicePosition)position callbackQueue:(dispatch_queue_t)queue liveUploadInterface:(id<TGLiveUploadInterface>)liveUploadInterface;
|
||||
|
||||
- (void)startRunning;
|
||||
- (void)stopRunning;
|
||||
|
||||
- (void)startRecording:(NSURL *)url preset:(TGMediaVideoConversionPreset)preset liveUpload:(bool)liveUpload;
|
||||
- (void)stopRecording;
|
||||
|
||||
- (CGAffineTransform)transformForOrientation:(AVCaptureVideoOrientation)orientation;
|
||||
|
||||
- (void)setCameraPosition:(AVCaptureDevicePosition)position;
|
||||
+ (bool)cameraPositionChangeAvailable;
|
||||
|
||||
@end
|
||||
|
||||
|
||||
@protocol TGVideoCameraPipelineDelegate <NSObject>
|
||||
@required
|
||||
|
||||
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline didStopRunningWithError:(NSError *)error;
|
||||
|
||||
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline previewPixelBufferReadyForDisplay:(CVPixelBufferRef)previewPixelBuffer;
|
||||
- (void)capturePipelineDidRunOutOfPreviewBuffers:(TGVideoCameraPipeline *)capturePipeline;
|
||||
|
||||
- (void)capturePipelineRecordingDidStart:(TGVideoCameraPipeline *)capturePipeline;
|
||||
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline recordingDidFailWithError:(NSError *)error;
|
||||
- (void)capturePipelineRecordingWillStop:(TGVideoCameraPipeline *)capturePipeline;
|
||||
- (void)capturePipelineRecordingDidStop:(TGVideoCameraPipeline *)capturePipeline duration:(NSTimeInterval)duration liveUploadData:(id)liveUploadData thumbnailImage:(UIImage *)thumbnailImage thumbnails:(NSDictionary *)thumbnails;
|
||||
|
||||
@end
|
||||
996
LegacyComponents/TGVideoCameraPipeline.m
Normal file
996
LegacyComponents/TGVideoCameraPipeline.m
Normal file
@@ -0,0 +1,996 @@
|
||||
#import "TGVideoCameraPipeline.h"
|
||||
|
||||
#import "LegacyComponentsInternal.h"
|
||||
|
||||
#import <libkern/OSAtomic.h>
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <ImageIO/ImageIO.h>
|
||||
#import <Accelerate/Accelerate.h>
|
||||
|
||||
#import <LegacyComponents/TGVideoCameraGLRenderer.h>
|
||||
|
||||
#import <LegacyComponents/TGVideoCameraMovieRecorder.h>
|
||||
#import <LegacyComponents/TGMediaVideoConverter.h>
|
||||
|
||||
typedef enum {
|
||||
TGVideoCameraRecordingStatusIdle = 0,
|
||||
TGVideoCameraRecordingStatusStartingRecording,
|
||||
TGVideoCameraRecordingStatusRecording,
|
||||
TGVideoCameraRecordingStatusStoppingRecording,
|
||||
} TGVideoCameraRecordingStatus;
|
||||
|
||||
const NSInteger TGVideoCameraRetainedBufferCount = 16;
|
||||
|
||||
@interface TGVideoCameraPipeline () <AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, TGVideoCameraMovieRecorderDelegate>
|
||||
{
|
||||
AVCaptureSession *_captureSession;
|
||||
|
||||
AVCaptureDevice *_videoDevice;
|
||||
AVCaptureConnection *_videoConnection;
|
||||
AVCaptureDeviceInput *_videoInput;
|
||||
AVCaptureVideoDataOutput *_videoOutput;
|
||||
|
||||
AVCaptureDevice *_audioDevice;
|
||||
AVCaptureConnection *_audioConnection;
|
||||
AVCaptureDeviceInput *_audioInput;
|
||||
AVCaptureAudioDataOutput *_audioOutput;
|
||||
|
||||
AVCaptureVideoOrientation _videoBufferOrientation;
|
||||
AVCaptureDevicePosition _preferredPosition;
|
||||
bool _running;
|
||||
bool _startCaptureSessionOnEnteringForeground;
|
||||
id _applicationWillEnterForegroundObserver;
|
||||
|
||||
dispatch_queue_t _audioDataOutputQueue;
|
||||
dispatch_queue_t _videoDataOutputQueue;
|
||||
|
||||
TGVideoCameraGLRenderer *_renderer;
|
||||
bool _renderingEnabled;
|
||||
|
||||
TGVideoCameraMovieRecorder *_recorder;
|
||||
NSURL *_recordingURL;
|
||||
TGVideoCameraRecordingStatus _recordingStatus;
|
||||
UIImage *_recordingThumbnail;
|
||||
|
||||
__weak id<TGVideoCameraPipelineDelegate> _delegate;
|
||||
dispatch_queue_t _delegateCallbackQueue;
|
||||
|
||||
NSTimeInterval _resultDuration;
|
||||
|
||||
CVPixelBufferRef _previousPixelBuffer;
|
||||
int32_t _repeatingCount;
|
||||
|
||||
NSMutableData *_audioBuffer;
|
||||
int16_t _micLevelPeak;
|
||||
int _micLevelPeakCount;
|
||||
|
||||
TGMediaVideoConversionPreset _preset;
|
||||
|
||||
bool _liveUpload;
|
||||
id<TGLiveUploadInterface> _watcher;
|
||||
id _liveUploadData;
|
||||
|
||||
OSSpinLock _recordLock;
|
||||
bool _startRecordAfterAudioBuffer;
|
||||
|
||||
CVPixelBufferRef _currentPreviewPixelBuffer;
|
||||
NSMutableDictionary *_thumbnails;
|
||||
|
||||
NSTimeInterval _firstThumbnailTime;
|
||||
NSTimeInterval _previousThumbnailTime;
|
||||
|
||||
id<TGLiveUploadInterface> _liveUploadInterface;
|
||||
}
|
||||
|
||||
@property (nonatomic, strong) __attribute__((NSObject)) CMFormatDescriptionRef outputVideoFormatDescription;
|
||||
@property (nonatomic, strong) __attribute__((NSObject)) CMFormatDescriptionRef outputAudioFormatDescription;
|
||||
|
||||
@end
|
||||
|
||||
@implementation TGVideoCameraPipeline
|
||||
|
||||
- (instancetype)initWithDelegate:(id<TGVideoCameraPipelineDelegate>)delegate position:(AVCaptureDevicePosition)position callbackQueue:(dispatch_queue_t)queue liveUploadInterface:(id<TGLiveUploadInterface>)liveUploadInterface
|
||||
{
|
||||
self = [super init];
|
||||
if (self != nil)
|
||||
{
|
||||
_liveUploadInterface = liveUploadInterface;
|
||||
_preferredPosition = position;
|
||||
|
||||
_videoDataOutputQueue = dispatch_queue_create("org.telegram.VideoCameraPipeline.video", DISPATCH_QUEUE_SERIAL);
|
||||
dispatch_set_target_queue(_videoDataOutputQueue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
|
||||
|
||||
_renderer = [[TGVideoCameraGLRenderer alloc] init];
|
||||
|
||||
_delegate = delegate;
|
||||
_delegateCallbackQueue = queue;
|
||||
|
||||
_thumbnails = [[NSMutableDictionary alloc] init];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[self destroyCaptureSession];
|
||||
}
|
||||
|
||||
- (void)startRunning
|
||||
{
|
||||
[[TGVideoCameraPipeline cameraQueue] dispatch:^
|
||||
{
|
||||
[self setupCaptureSession];
|
||||
|
||||
if (_captureSession != nil)
|
||||
{
|
||||
[_captureSession startRunning];
|
||||
_running = true;
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopRunning
|
||||
{
|
||||
[[TGVideoCameraPipeline cameraQueue] dispatch:^
|
||||
{
|
||||
_running = false;
|
||||
|
||||
[self stopRecording];
|
||||
|
||||
[_captureSession stopRunning];
|
||||
[self captureSessionDidStopRunning];
|
||||
[self destroyCaptureSession];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setupCaptureSession
|
||||
{
|
||||
if (_captureSession != nil)
|
||||
return;
|
||||
|
||||
_captureSession = [[AVCaptureSession alloc] init];
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(captureSessionNotification:) name:nil object:_captureSession];
|
||||
_applicationWillEnterForegroundObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationWillEnterForegroundNotification object:[[LegacyComponentsGlobals provider] applicationInstance] queue:nil usingBlock:^(__unused NSNotification *note)
|
||||
{
|
||||
[self applicationWillEnterForeground];
|
||||
}];
|
||||
|
||||
_audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
||||
_audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:_audioDevice error:nil];
|
||||
if ([_captureSession canAddInput:_audioInput])
|
||||
[_captureSession addInput:_audioInput];
|
||||
|
||||
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
|
||||
_audioDataOutputQueue = dispatch_queue_create("org.telegram.VideoCameraPipeline.audio", DISPATCH_QUEUE_SERIAL);
|
||||
[_audioOutput setSampleBufferDelegate:self queue:_audioDataOutputQueue];
|
||||
|
||||
if ([_captureSession canAddOutput:_audioOutput])
|
||||
[_captureSession addOutput:_audioOutput];
|
||||
|
||||
_audioConnection = [_audioOutput connectionWithMediaType:AVMediaTypeAudio];
|
||||
|
||||
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
||||
for (AVCaptureDevice *device in devices)
|
||||
{
|
||||
if (device.position == _preferredPosition)
|
||||
{
|
||||
videoDevice = device;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_renderer.mirror = (videoDevice.position == AVCaptureDevicePositionFront);
|
||||
_renderer.orientation = _orientation;
|
||||
|
||||
NSError *videoDeviceError = nil;
|
||||
_videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoDevice error:&videoDeviceError];
|
||||
if ([_captureSession canAddInput:_videoInput])
|
||||
{
|
||||
[_captureSession addInput:_videoInput];
|
||||
_videoDevice = videoDevice;
|
||||
}
|
||||
else
|
||||
{
|
||||
[self handleNonRecoverableCaptureSessionRuntimeError:videoDeviceError];
|
||||
return;
|
||||
}
|
||||
|
||||
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
|
||||
_videoOutput.alwaysDiscardsLateVideoFrames = false;
|
||||
_videoOutput.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) };
|
||||
[_videoOutput setSampleBufferDelegate:self queue:_videoDataOutputQueue];
|
||||
|
||||
if ([_captureSession canAddOutput:_videoOutput])
|
||||
[_captureSession addOutput:_videoOutput];
|
||||
|
||||
_videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
|
||||
|
||||
if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480])
|
||||
_captureSession.sessionPreset = AVCaptureSessionPreset640x480;
|
||||
else
|
||||
_captureSession.sessionPreset = AVCaptureSessionPresetMedium;
|
||||
|
||||
[self _configureFPS];
|
||||
|
||||
[self _enableLowLightBoost];
|
||||
[self _enableVideoStabilization];
|
||||
|
||||
_videoBufferOrientation = _videoConnection.videoOrientation;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
- (void)destroyCaptureSession
|
||||
{
|
||||
if (_captureSession)
|
||||
{
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self name:nil object:_captureSession];
|
||||
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:_applicationWillEnterForegroundObserver];
|
||||
_applicationWillEnterForegroundObserver = nil;
|
||||
|
||||
_captureSession = nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)captureSessionNotification:(NSNotification *)notification
|
||||
{
|
||||
[[TGVideoCameraPipeline cameraQueue] dispatch:^
|
||||
{
|
||||
if ([notification.name isEqualToString:AVCaptureSessionWasInterruptedNotification])
|
||||
{
|
||||
[self captureSessionDidStopRunning];
|
||||
}
|
||||
else if ([notification.name isEqualToString:AVCaptureSessionRuntimeErrorNotification])
|
||||
{
|
||||
[self captureSessionDidStopRunning];
|
||||
|
||||
NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
|
||||
if (error.code == AVErrorDeviceIsNotAvailableInBackground)
|
||||
{
|
||||
if (_running)
|
||||
_startCaptureSessionOnEnteringForeground = true;
|
||||
}
|
||||
else if (error.code == AVErrorMediaServicesWereReset)
|
||||
{
|
||||
[self handleRecoverableCaptureSessionRuntimeError:error];
|
||||
}
|
||||
else
|
||||
{
|
||||
[self handleNonRecoverableCaptureSessionRuntimeError:error];
|
||||
}
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleRecoverableCaptureSessionRuntimeError:(NSError *)__unused error
|
||||
{
|
||||
if (_running)
|
||||
[_captureSession startRunning];
|
||||
}
|
||||
|
||||
- (void)handleNonRecoverableCaptureSessionRuntimeError:(NSError *)error
|
||||
{
|
||||
_running = false;
|
||||
[self destroyCaptureSession];
|
||||
|
||||
[self invokeDelegateCallbackAsync:^
|
||||
{
|
||||
[_delegate capturePipeline:self didStopRunningWithError:error];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)captureSessionDidStopRunning
|
||||
{
|
||||
[self stopRecording];
|
||||
[self destroyVideoPipeline];
|
||||
}
|
||||
|
||||
- (void)applicationWillEnterForeground
|
||||
{
|
||||
[[TGVideoCameraPipeline cameraQueue] dispatch:^
|
||||
{
|
||||
if (_startCaptureSessionOnEnteringForeground)
|
||||
{
|
||||
_startCaptureSessionOnEnteringForeground = false;
|
||||
if (_running)
|
||||
[_captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setupVideoPipelineWithInputFormatDescription:(CMFormatDescriptionRef)inputFormatDescription
|
||||
{
|
||||
[_renderer prepareForInputWithFormatDescription:inputFormatDescription outputRetainedBufferCountHint:TGVideoCameraRetainedBufferCount];
|
||||
self.outputVideoFormatDescription = _renderer.outputFormatDescription;
|
||||
}
|
||||
|
||||
- (void)destroyVideoPipeline
|
||||
{
|
||||
dispatch_sync(_videoDataOutputQueue, ^
|
||||
{
|
||||
if (self.outputVideoFormatDescription == NULL)
|
||||
return;
|
||||
|
||||
self.outputVideoFormatDescription = NULL;
|
||||
[_renderer reset];
|
||||
|
||||
if (_currentPreviewPixelBuffer != NULL)
|
||||
{
|
||||
CFRelease(_currentPreviewPixelBuffer);
|
||||
_currentPreviewPixelBuffer = NULL;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (void)videoPipelineDidRunOutOfBuffers
|
||||
{
|
||||
[self invokeDelegateCallbackAsync:^
|
||||
{
|
||||
[_delegate capturePipelineDidRunOutOfPreviewBuffers:self];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setRenderingEnabled:(bool)renderingEnabled
|
||||
{
|
||||
@synchronized (_renderer)
|
||||
{
|
||||
_renderingEnabled = renderingEnabled;
|
||||
}
|
||||
}
|
||||
|
||||
- (bool)renderingEnabled
|
||||
{
|
||||
@synchronized (_renderer)
|
||||
{
|
||||
return _renderingEnabled;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)__unused captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
|
||||
{
|
||||
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
|
||||
|
||||
if (connection == _videoConnection)
|
||||
{
|
||||
if (self.outputVideoFormatDescription == NULL)
|
||||
[self setupVideoPipelineWithInputFormatDescription:formatDescription];
|
||||
else
|
||||
[self renderVideoSampleBuffer:sampleBuffer];
|
||||
}
|
||||
else if (connection == _audioConnection)
|
||||
{
|
||||
self.outputAudioFormatDescription = formatDescription;
|
||||
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_recordingStatus == TGVideoCameraRecordingStatusRecording)
|
||||
[_recorder appendAudioSampleBuffer:sampleBuffer];
|
||||
}
|
||||
|
||||
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
|
||||
uint32_t numSamplesInBuffer = (uint32_t)CMSampleBufferGetNumSamples(sampleBuffer);
|
||||
|
||||
AudioBufferList audioBufferList;
|
||||
|
||||
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer );
|
||||
|
||||
for (uint32_t bufferCount = 0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++)
|
||||
{
|
||||
int16_t *samples = (int16_t *)audioBufferList.mBuffers[bufferCount].mData;
|
||||
[self processWaveformPreview:samples count:numSamplesInBuffer];
|
||||
}
|
||||
|
||||
CFRelease(blockBuffer);
|
||||
|
||||
OSSpinLockLock(&_recordLock);
|
||||
if (_startRecordAfterAudioBuffer)
|
||||
{
|
||||
_startRecordAfterAudioBuffer = false;
|
||||
TGDispatchOnMainThread(^
|
||||
{
|
||||
[self startRecording:_recordingURL preset:_preset liveUpload:_liveUpload];
|
||||
});
|
||||
}
|
||||
OSSpinLockUnlock(&_recordLock);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)processWaveformPreview:(int16_t const *)samples count:(int)count {
|
||||
for (int i = 0; i < count; i++) {
|
||||
int16_t sample = samples[i];
|
||||
if (sample < 0) {
|
||||
sample = -sample;
|
||||
}
|
||||
|
||||
if (_micLevelPeak < sample) {
|
||||
_micLevelPeak = sample;
|
||||
}
|
||||
_micLevelPeakCount++;
|
||||
|
||||
if (_micLevelPeakCount >= 1200) {
|
||||
if (_micLevel) {
|
||||
CGFloat level = (CGFloat)_micLevelPeak / 4000.0;
|
||||
_micLevel(level);
|
||||
}
|
||||
_micLevelPeak = 0;
|
||||
_micLevelPeakCount = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (UIImage *)imageFromImageBuffer:(CVPixelBufferRef)imageBuffer
|
||||
{
|
||||
CVPixelBufferLockBaseAddress(imageBuffer, 0);
|
||||
|
||||
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
|
||||
|
||||
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
|
||||
|
||||
size_t width = CVPixelBufferGetWidth(imageBuffer);
|
||||
size_t height = CVPixelBufferGetHeight(imageBuffer);
|
||||
|
||||
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
||||
|
||||
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
|
||||
|
||||
CGImageRef cgImage = CGBitmapContextCreateImage(context);
|
||||
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
|
||||
|
||||
CGContextRelease(context);
|
||||
CGColorSpaceRelease(colorSpace);
|
||||
|
||||
UIImage *image = [UIImage imageWithCGImage:cgImage];
|
||||
CGImageRelease(cgImage);
|
||||
|
||||
return image;
|
||||
}
|
||||
|
||||
|
||||
- (void)renderVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
{
|
||||
CVPixelBufferRef renderedPixelBuffer = NULL;
|
||||
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
||||
|
||||
@synchronized (_renderer)
|
||||
{
|
||||
if (_renderingEnabled)
|
||||
{
|
||||
bool repeatingFrames = false;
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_recorder.paused && _previousPixelBuffer != NULL)
|
||||
{
|
||||
_recorder.paused = false;
|
||||
_repeatingCount = 11;
|
||||
|
||||
[_renderer setPreviousPixelBuffer:_previousPixelBuffer];
|
||||
CFRelease(_previousPixelBuffer);
|
||||
_previousPixelBuffer = NULL;
|
||||
}
|
||||
|
||||
if (_repeatingCount > 0)
|
||||
{
|
||||
repeatingFrames = true;
|
||||
_repeatingCount--;
|
||||
}
|
||||
|
||||
CGFloat opacity = 1.0f;
|
||||
if (_repeatingCount < 10)
|
||||
opacity = _repeatingCount / 9.0f;
|
||||
|
||||
[_renderer setOpacity:opacity];
|
||||
|
||||
if (_repeatingCount == 0)
|
||||
[_renderer setPreviousPixelBuffer:NULL];
|
||||
}
|
||||
|
||||
CVPixelBufferRef sourcePixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
renderedPixelBuffer = [_renderer copyRenderedPixelBuffer:sourcePixelBuffer];
|
||||
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_recordingStatus == TGVideoCameraRecordingStatusRecording && _recordingThumbnail == nil)
|
||||
{
|
||||
UIImage *image = [self imageFromImageBuffer:sourcePixelBuffer];
|
||||
_recordingThumbnail = image;
|
||||
}
|
||||
|
||||
if (_recordingStatus == TGVideoCameraRecordingStatusRecording && !repeatingFrames)
|
||||
{
|
||||
NSTimeInterval currentTime = CMTimeGetSeconds(timestamp);
|
||||
if (_previousThumbnailTime < DBL_EPSILON)
|
||||
{
|
||||
_firstThumbnailTime = currentTime;
|
||||
_previousThumbnailTime = currentTime;
|
||||
|
||||
[self storeThumbnailWithSampleBuffer:sampleBuffer time:0.0 mirror:_renderer.mirror];
|
||||
}
|
||||
else
|
||||
{
|
||||
NSTimeInterval relativeThumbnailTime = _previousThumbnailTime - _firstThumbnailTime;
|
||||
NSTimeInterval interval = MAX(0.1, relativeThumbnailTime / 10.0);
|
||||
|
||||
if (currentTime - _previousThumbnailTime >= interval)
|
||||
{
|
||||
[self storeThumbnailWithSampleBuffer:sampleBuffer time:relativeThumbnailTime mirror:_renderer.mirror];
|
||||
_previousThumbnailTime = currentTime;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!repeatingFrames)
|
||||
{
|
||||
if (_previousPixelBuffer != NULL)
|
||||
{
|
||||
CFRelease(_previousPixelBuffer);
|
||||
_previousPixelBuffer = NULL;
|
||||
}
|
||||
|
||||
_previousPixelBuffer = sourcePixelBuffer;
|
||||
CFRetain(sourcePixelBuffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (renderedPixelBuffer)
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
[self outputPreviewPixelBuffer:renderedPixelBuffer];
|
||||
|
||||
if (_recordingStatus == TGVideoCameraRecordingStatusRecording)
|
||||
[_recorder appendVideoPixelBuffer:renderedPixelBuffer withPresentationTime:timestamp];
|
||||
}
|
||||
|
||||
CFRelease(renderedPixelBuffer);
|
||||
}
|
||||
else
|
||||
{
|
||||
[self videoPipelineDidRunOutOfBuffers];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)outputPreviewPixelBuffer:(CVPixelBufferRef)previewPixelBuffer
|
||||
{
|
||||
if (_currentPreviewPixelBuffer != NULL)
|
||||
{
|
||||
CFRelease(_currentPreviewPixelBuffer);
|
||||
_currentPreviewPixelBuffer = NULL;
|
||||
}
|
||||
|
||||
if (_previousPixelBuffer != NULL)
|
||||
{
|
||||
_currentPreviewPixelBuffer = previewPixelBuffer;
|
||||
CFRetain(_currentPreviewPixelBuffer);
|
||||
}
|
||||
|
||||
[self invokeDelegateCallbackAsync:^
|
||||
{
|
||||
CVPixelBufferRef currentPreviewPixelBuffer = NULL;
|
||||
@synchronized (self)
|
||||
{
|
||||
currentPreviewPixelBuffer = _currentPreviewPixelBuffer;
|
||||
if (currentPreviewPixelBuffer != NULL)
|
||||
{
|
||||
CFRetain(currentPreviewPixelBuffer);
|
||||
if (_currentPreviewPixelBuffer != NULL)
|
||||
{
|
||||
CFRelease(_currentPreviewPixelBuffer);
|
||||
_currentPreviewPixelBuffer = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (currentPreviewPixelBuffer != NULL)
|
||||
{
|
||||
[_delegate capturePipeline:self previewPixelBufferReadyForDisplay:currentPreviewPixelBuffer];
|
||||
CFRelease(currentPreviewPixelBuffer);
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)storeThumbnailWithSampleBuffer:(CMSampleBufferRef)sampleBuffer time:(NSTimeInterval)time mirror:(bool)mirror
|
||||
{
|
||||
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
size_t width = CVPixelBufferGetWidth(imageBuffer);
|
||||
size_t height = CVPixelBufferGetHeight(imageBuffer);
|
||||
|
||||
size_t cropX = (size_t)((width - height) / 2.0);
|
||||
size_t cropY = 0;
|
||||
size_t cropWidth = height;
|
||||
size_t cropHeight = height;
|
||||
size_t outWidth = 66;
|
||||
size_t outHeight = 66;
|
||||
|
||||
CVPixelBufferLockBaseAddress(imageBuffer,0);
|
||||
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
|
||||
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
|
||||
|
||||
vImage_Buffer inBuff;
|
||||
inBuff.height = cropHeight;
|
||||
inBuff.width = cropWidth;
|
||||
inBuff.rowBytes = bytesPerRow;
|
||||
|
||||
unsigned long startpos = cropY * bytesPerRow + 4 * cropX;
|
||||
inBuff.data = baseAddress + startpos;
|
||||
|
||||
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
||||
CGContextRef context = CGBitmapContextCreateWithData(NULL, outWidth, outHeight, 8, outWidth * 4, colorSpace, kCGImageByteOrder32Little | kCGImageAlphaPremultipliedFirst, NULL, nil);
|
||||
|
||||
unsigned char *outImg = CGBitmapContextGetData(context);
|
||||
vImage_Buffer outBuff = {outImg, outHeight, outWidth, 4 * outWidth};
|
||||
|
||||
vImage_Error err = vImageScale_ARGB8888(&inBuff, &outBuff, NULL, 0);
|
||||
if (err != kvImageNoError)
|
||||
TGLegacyLog(@"Video Message thumbnail generation error %ld", err);
|
||||
|
||||
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
|
||||
|
||||
CGImageRef cgImage = CGBitmapContextCreateImage(context);
|
||||
CGContextRelease(context);
|
||||
CGColorSpaceRelease(colorSpace);
|
||||
|
||||
UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0f orientation:mirror ? UIImageOrientationLeftMirrored : UIImageOrientationRight];
|
||||
CGImageRelease(cgImage);
|
||||
|
||||
_thumbnails[@(time)] = image;
|
||||
}
|
||||
|
||||
- (void)startRecording:(NSURL *)url preset:(TGMediaVideoConversionPreset)preset liveUpload:(bool)liveUpload
|
||||
{
|
||||
_recordingURL = url;
|
||||
_preset = preset;
|
||||
_liveUpload = liveUpload;
|
||||
|
||||
OSSpinLockLock(&_recordLock);
|
||||
if (self.outputAudioFormatDescription == NULL)
|
||||
{
|
||||
_startRecordAfterAudioBuffer = true;
|
||||
OSSpinLockUnlock(&_recordLock);
|
||||
return;
|
||||
}
|
||||
OSSpinLockUnlock(&_recordLock);
|
||||
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_recordingStatus != TGVideoCameraRecordingStatusIdle)
|
||||
return;
|
||||
|
||||
[self transitionToRecordingStatus:TGVideoCameraRecordingStatusStartingRecording error:nil];
|
||||
}
|
||||
|
||||
dispatch_queue_t callbackQueue = dispatch_queue_create("org.telegram.VideoCameraPipeline.recorder", DISPATCH_QUEUE_SERIAL);
|
||||
TGVideoCameraMovieRecorder *recorder = [[TGVideoCameraMovieRecorder alloc] initWithURL:_recordingURL delegate:self callbackQueue:callbackQueue];
|
||||
|
||||
NSDictionary *audioSettings = [TGMediaVideoConversionPresetSettings audioSettingsForPreset:preset];
|
||||
[recorder addAudioTrackWithSourceFormatDescription:self.outputAudioFormatDescription settings:audioSettings];
|
||||
|
||||
_videoTransform = [self transformForOrientation:self.orientation];
|
||||
|
||||
CGSize size = [TGMediaVideoConversionPresetSettings maximumSizeForPreset:preset];
|
||||
NSDictionary *videoSettings = [TGMediaVideoConversionPresetSettings videoSettingsForPreset:preset dimensions:size];
|
||||
[recorder addVideoTrackWithSourceFormatDescription:self.outputVideoFormatDescription transform:CGAffineTransformIdentity settings:videoSettings];
|
||||
_recorder = recorder;
|
||||
|
||||
[recorder prepareToRecord];
|
||||
}
|
||||
|
||||
- (void)stopRecording
|
||||
{
|
||||
[[TGVideoCameraPipeline cameraQueue] dispatch:^
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_recordingStatus != TGVideoCameraRecordingStatusRecording)
|
||||
return;
|
||||
|
||||
[self transitionToRecordingStatus:TGVideoCameraRecordingStatusStoppingRecording error:nil];
|
||||
}
|
||||
|
||||
_resultDuration = _recorder.videoDuration;
|
||||
[_recorder finishRecording];
|
||||
}];
|
||||
}
|
||||
|
||||
- (bool)isRecording
|
||||
{
|
||||
return _recorder != nil && !_recorder.paused;
|
||||
}
|
||||
|
||||
- (void)movieRecorderDidFinishPreparing:(TGVideoCameraMovieRecorder *)__unused recorder
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_recordingStatus != TGVideoCameraRecordingStatusStartingRecording)
|
||||
return;
|
||||
|
||||
[self transitionToRecordingStatus:TGVideoCameraRecordingStatusRecording error:nil];
|
||||
|
||||
if (_liveUpload)
|
||||
{
|
||||
_watcher = _liveUploadInterface;
|
||||
[_watcher setupWithFileURL:_recordingURL];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)movieRecorder:(TGVideoCameraMovieRecorder *)__unused recorder didFailWithError:(NSError *)error
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
_recorder = nil;
|
||||
[self transitionToRecordingStatus:TGVideoCameraRecordingStatusIdle error:error];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)movieRecorderDidFinishRecording:(TGVideoCameraMovieRecorder *)__unused recorder
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
if (_recordingStatus != TGVideoCameraRecordingStatusStoppingRecording)
|
||||
return;
|
||||
}
|
||||
|
||||
_recorder = nil;
|
||||
|
||||
if (_watcher != nil)
|
||||
_liveUploadData = [_watcher fileUpdated:true];
|
||||
|
||||
[self transitionToRecordingStatus:TGVideoCameraRecordingStatusIdle error:nil];
|
||||
}
|
||||
|
||||
- (void)transitionToRecordingStatus:(TGVideoCameraRecordingStatus)newStatus error:(NSError *)error
|
||||
{
|
||||
TGVideoCameraRecordingStatus oldStatus = _recordingStatus;
|
||||
_recordingStatus = newStatus;
|
||||
|
||||
if (newStatus != oldStatus)
|
||||
{
|
||||
dispatch_block_t delegateCallbackBlock = nil;
|
||||
|
||||
if (error && newStatus == TGVideoCameraRecordingStatusIdle)
|
||||
{
|
||||
delegateCallbackBlock = ^{ [_delegate capturePipeline:self recordingDidFailWithError:error]; };
|
||||
}
|
||||
else
|
||||
{
|
||||
if ((oldStatus == TGVideoCameraRecordingStatusStartingRecording) && (newStatus == TGVideoCameraRecordingStatusRecording))
|
||||
delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingDidStart:self]; };
|
||||
else if ((oldStatus == TGVideoCameraRecordingStatusRecording) && (newStatus == TGVideoCameraRecordingStatusStoppingRecording))
|
||||
delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingWillStop:self]; };
|
||||
else if ((oldStatus == TGVideoCameraRecordingStatusStoppingRecording) && (newStatus == TGVideoCameraRecordingStatusIdle))
|
||||
delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingDidStop:self duration:_resultDuration liveUploadData:_liveUploadData thumbnailImage:_recordingThumbnail thumbnails:_thumbnails]; };
|
||||
}
|
||||
|
||||
if (delegateCallbackBlock != nil)
|
||||
[self invokeDelegateCallbackAsync:delegateCallbackBlock];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)invokeDelegateCallbackAsync:(dispatch_block_t)callbackBlock
|
||||
{
|
||||
dispatch_async(_delegateCallbackQueue, ^
|
||||
{
|
||||
@autoreleasepool
|
||||
{
|
||||
callbackBlock();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
- (CGAffineTransform)transformForOrientation:(AVCaptureVideoOrientation)orientation
|
||||
{
|
||||
CGAffineTransform transform = CGAffineTransformIdentity;
|
||||
|
||||
CGFloat orientationAngleOffset = angleOffsetFromPortraitOrientationToOrientation(orientation);
|
||||
CGFloat videoOrientationAngleOffset = angleOffsetFromPortraitOrientationToOrientation(_videoBufferOrientation);
|
||||
|
||||
CGFloat angleOffset = orientationAngleOffset - videoOrientationAngleOffset;
|
||||
transform = CGAffineTransformMakeRotation(angleOffset);
|
||||
|
||||
return transform;
|
||||
}
|
||||
|
||||
static CGFloat angleOffsetFromPortraitOrientationToOrientation(AVCaptureVideoOrientation orientation)
|
||||
{
|
||||
CGFloat angle = 0.0;
|
||||
|
||||
switch (orientation)
|
||||
{
|
||||
case AVCaptureVideoOrientationPortrait:
|
||||
angle = 0.0;
|
||||
break;
|
||||
case AVCaptureVideoOrientationPortraitUpsideDown:
|
||||
angle = M_PI;
|
||||
break;
|
||||
case AVCaptureVideoOrientationLandscapeRight:
|
||||
angle = -M_PI_2;
|
||||
break;
|
||||
case AVCaptureVideoOrientationLandscapeLeft:
|
||||
angle = M_PI_2;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return angle;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)videoDuration
|
||||
{
|
||||
return _recorder.videoDuration;
|
||||
}
|
||||
|
||||
- (void)setCameraPosition:(AVCaptureDevicePosition)position
|
||||
{
|
||||
@synchronized (self)
|
||||
{
|
||||
_recorder.paused = true;
|
||||
}
|
||||
|
||||
[[TGVideoCameraPipeline cameraQueue] dispatch:^
|
||||
{
|
||||
NSError *error;
|
||||
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||
AVCaptureDevice *deviceForTargetPosition = nil;
|
||||
for (AVCaptureDevice *device in devices)
|
||||
{
|
||||
if (device.position == position)
|
||||
{
|
||||
deviceForTargetPosition = device;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_renderer.mirror = deviceForTargetPosition.position == AVCaptureDevicePositionFront;
|
||||
_renderer.orientation = _orientation;
|
||||
|
||||
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:deviceForTargetPosition error:&error];
|
||||
if (newVideoInput != nil)
|
||||
{
|
||||
[_captureSession beginConfiguration];
|
||||
|
||||
[_captureSession removeInput:_videoInput];
|
||||
if ([_captureSession canAddInput:newVideoInput])
|
||||
{
|
||||
[_captureSession addInput:newVideoInput];
|
||||
_videoInput = newVideoInput;
|
||||
}
|
||||
else
|
||||
{
|
||||
[_captureSession addInput:_videoInput];
|
||||
}
|
||||
|
||||
[_captureSession commitConfiguration];
|
||||
}
|
||||
|
||||
_videoDevice = deviceForTargetPosition;
|
||||
|
||||
_videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
|
||||
|
||||
[self _configureFPS];
|
||||
|
||||
[self _enableLowLightBoost];
|
||||
[self _enableVideoStabilization];
|
||||
|
||||
_videoBufferOrientation = _videoConnection.videoOrientation;
|
||||
}];
|
||||
}
|
||||
|
||||
|
||||
- (void)_enableLowLightBoost
|
||||
{
|
||||
[self _reconfigureDevice:_videoDevice withBlock:^(AVCaptureDevice *device)
|
||||
{
|
||||
if (device.isLowLightBoostSupported)
|
||||
device.automaticallyEnablesLowLightBoostWhenAvailable = true;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)_enableVideoStabilization
|
||||
{
|
||||
AVCaptureConnection *videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
|
||||
if (videoConnection.supportsVideoStabilization)
|
||||
{
|
||||
if ([videoConnection respondsToSelector:@selector(setPreferredVideoStabilizationMode:)])
|
||||
videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
|
||||
else
|
||||
videoConnection.enablesVideoStabilizationWhenAvailable = true;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_reconfigureDevice:(AVCaptureDevice *)device withBlock:(void (^)(AVCaptureDevice *device))block
|
||||
{
|
||||
if (block == nil)
|
||||
return;
|
||||
|
||||
NSError *error = nil;
|
||||
[device lockForConfiguration:&error];
|
||||
block(device);
|
||||
[device unlockForConfiguration];
|
||||
|
||||
if (error != nil)
|
||||
TGLegacyLog(@"ERROR: failed to reconfigure camera: %@", error);
|
||||
}
|
||||
|
||||
- (void)_addAudioInput
|
||||
{
|
||||
if (_audioDevice != nil || _audioDataOutputQueue == NULL)
|
||||
return;
|
||||
|
||||
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
|
||||
|
||||
NSError *error = nil;
|
||||
if (audioDevice != nil)
|
||||
{
|
||||
_audioDevice = audioDevice;
|
||||
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_audioDevice error:&error];
|
||||
if ([_captureSession canAddInput:audioInput])
|
||||
{
|
||||
[_captureSession addInput:audioInput];
|
||||
_audioInput = audioInput;
|
||||
}
|
||||
}
|
||||
|
||||
AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
|
||||
if ([_captureSession canAddOutput:audioOutput])
|
||||
{
|
||||
[audioOutput setSampleBufferDelegate:self queue:_audioDataOutputQueue];
|
||||
[_captureSession addOutput:audioOutput];
|
||||
_audioOutput = audioOutput;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_removeAudioInput
|
||||
{
|
||||
if (_audioDevice == nil)
|
||||
return;
|
||||
|
||||
[_captureSession removeInput:_audioInput];
|
||||
_audioInput = nil;
|
||||
|
||||
[_audioOutput setSampleBufferDelegate:nil queue:NULL];
|
||||
[_captureSession removeOutput:_audioOutput];
|
||||
_audioOutput = nil;
|
||||
|
||||
_audioDevice = nil;
|
||||
}
|
||||
|
||||
- (void)_configureFPS
|
||||
{
|
||||
CMTime frameDuration = CMTimeMake(1, 30);
|
||||
[self _reconfigureDevice:_videoDevice withBlock:^(AVCaptureDevice *device)
|
||||
{
|
||||
device.activeVideoMaxFrameDuration = frameDuration;
|
||||
device.activeVideoMinFrameDuration = frameDuration;
|
||||
}];
|
||||
}
|
||||
|
||||
+ (bool)cameraPositionChangeAvailable
|
||||
{
|
||||
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].count > 1;
|
||||
}
|
||||
|
||||
+ (SQueue *)cameraQueue
|
||||
{
|
||||
static dispatch_once_t onceToken;
|
||||
static SQueue *queue = nil;
|
||||
dispatch_once(&onceToken, ^
|
||||
{
|
||||
queue = [[SQueue alloc] init];
|
||||
});
|
||||
|
||||
return queue;
|
||||
}
|
||||
|
||||
@end
|
||||
27
LegacyComponents/TGVideoMessageCaptureController.h
Normal file
27
LegacyComponents/TGVideoMessageCaptureController.h
Normal file
@@ -0,0 +1,27 @@
|
||||
#import <LegacyComponents/LegacyComponents.h>
|
||||
|
||||
@class TGVideoEditAdjustments;
|
||||
|
||||
@interface TGVideoMessageCaptureController : TGOverlayController
|
||||
|
||||
@property (nonatomic, copy) id (^requestActivityHolder)();
|
||||
@property (nonatomic, copy) void (^micLevel)(CGFloat level);
|
||||
@property (nonatomic, copy) void(^finishedWithVideo)(NSURL *videoURL, UIImage *previewImage, NSUInteger fileSize, NSTimeInterval duration, CGSize dimensions, id liveUploadData, TGVideoEditAdjustments *adjustments);
|
||||
@property (nonatomic, copy) void(^onDismiss)(bool isAuto);
|
||||
@property (nonatomic, copy) void(^onStop)(void);
|
||||
@property (nonatomic, copy) void(^onCancel)(void);
|
||||
|
||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context transitionInView:(UIView *(^)())transitionInView parentController:(TGViewController *)parentController controlsFrame:(CGRect)controlsFrame isAlreadyLocked:(bool (^)(void))isAlreadyLocked liveUploadInterface:(id<TGLiveUploadInterface>)liveUploadInterface;
|
||||
- (void)buttonInteractionUpdate:(CGPoint)value;
|
||||
- (void)setLocked;
|
||||
|
||||
- (void)complete;
|
||||
- (void)dismiss;
|
||||
- (void)stop;
|
||||
|
||||
+ (void)clearStartImage;
|
||||
|
||||
+ (void)requestCameraAccess:(void (^)(bool granted, bool wasNotDetermined))resultBlock;
|
||||
+ (void)requestMicrophoneAccess:(void (^)(bool granted, bool wasNotDetermined))resultBlock;
|
||||
|
||||
@end
|
||||
1283
LegacyComponents/TGVideoMessageCaptureController.m
Normal file
1283
LegacyComponents/TGVideoMessageCaptureController.m
Normal file
File diff suppressed because it is too large
Load Diff
31
LegacyComponents/TGVideoMessageControls.h
Normal file
31
LegacyComponents/TGVideoMessageControls.h
Normal file
@@ -0,0 +1,31 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
#import <LegacyComponents/TGVideoMessageScrubber.h>
|
||||
|
||||
@interface TGVideoMessageControls : UIView
|
||||
|
||||
@property (nonatomic, readonly) TGVideoMessageScrubber *scrubberView;
|
||||
|
||||
@property (nonatomic, assign) CGFloat controlsHeight;
|
||||
@property (nonatomic, copy) void (^positionChanged)(void);
|
||||
@property (nonatomic, copy) void (^cancel)(void);
|
||||
@property (nonatomic, copy) void (^deletePressed)(void);
|
||||
@property (nonatomic, copy) void (^sendPressed)(void);
|
||||
|
||||
@property (nonatomic, copy) bool(^isAlreadyLocked)(void);
|
||||
|
||||
@property (nonatomic, assign) bool positionChangeAvailable;
|
||||
|
||||
@property (nonatomic, weak) id<TGVideoMessageScrubberDelegate, TGVideoMessageScrubberDataSource> parent;
|
||||
|
||||
- (void)captureStarted;
|
||||
- (void)recordingStarted;
|
||||
- (void)setShowRecordingInterface:(bool)show velocity:(CGFloat)velocity;
|
||||
- (void)buttonInteractionUpdate:(CGPoint)value;
|
||||
- (void)setLocked;
|
||||
- (void)setStopped;
|
||||
|
||||
- (void)showScrubberView;
|
||||
|
||||
- (void)setDurationString:(NSString *)string;
|
||||
|
||||
@end
|
||||
471
LegacyComponents/TGVideoMessageControls.m
Normal file
471
LegacyComponents/TGVideoMessageControls.m
Normal file
@@ -0,0 +1,471 @@
|
||||
#import "TGVideoMessageControls.h"
|
||||
|
||||
#import <LegacyComponents/LegacyComponents.h>
|
||||
|
||||
#import <LegacyComponents/TGModernButton.h>
|
||||
//#import "TGModernConversationInputMicButton.h"
|
||||
#import <LegacyComponents/TGVideoMessageScrubber.h>
|
||||
|
||||
#import "LegacyComponentsInternal.h"
|
||||
#import "TGColor.h"
|
||||
|
||||
static void setViewFrame(UIView *view, CGRect frame)
|
||||
{
|
||||
CGAffineTransform transform = view.transform;
|
||||
view.transform = CGAffineTransformIdentity;
|
||||
if (!CGRectEqualToRect(view.frame, frame))
|
||||
view.frame = frame;
|
||||
view.transform = transform;
|
||||
}
|
||||
|
||||
static CGRect viewFrame(UIView *view)
|
||||
{
|
||||
CGAffineTransform transform = view.transform;
|
||||
view.transform = CGAffineTransformIdentity;
|
||||
CGRect result = view.frame;
|
||||
view.transform = transform;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@interface TGVideoMessageControls () //<TGModernConversationInputMicButtonDelegate>
|
||||
{
|
||||
UIImageView *_slideToCancelArrow;
|
||||
UILabel *_slideToCancelLabel;
|
||||
|
||||
TGModernButton *_cancelButton;
|
||||
|
||||
TGModernButton *_deleteButton;
|
||||
TGModernButton *_sendButton;
|
||||
|
||||
UIImageView *_recordIndicatorView;
|
||||
UILabel *_recordDurationLabel;
|
||||
|
||||
CFAbsoluteTime _recordingInterfaceShowTime;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation TGVideoMessageControls
|
||||
|
||||
- (void)captureStarted
|
||||
{
|
||||
[UIView transitionWithView:_recordDurationLabel duration:0.3 options:UIViewAnimationOptionTransitionCrossDissolve animations:^{
|
||||
_recordDurationLabel.textColor = [UIColor whiteColor];
|
||||
} completion:nil];
|
||||
|
||||
[UIView transitionWithView:_slideToCancelLabel duration:0.3 options:UIViewAnimationOptionTransitionCrossDissolve animations:^{
|
||||
_slideToCancelLabel.textColor = [UIColor whiteColor];
|
||||
} completion:nil];
|
||||
|
||||
[UIView transitionWithView:_slideToCancelArrow duration:0.3 options:UIViewAnimationOptionTransitionCrossDissolve animations:^{
|
||||
_slideToCancelArrow.image = TGTintedImage(_slideToCancelArrow.image, [UIColor whiteColor]);
|
||||
} completion:nil];
|
||||
}
|
||||
|
||||
- (void)setShowRecordingInterface:(bool)show velocity:(CGFloat)velocity
|
||||
{
|
||||
CGFloat hideLeftOffset = 400.0f;
|
||||
|
||||
bool isAlreadyLocked = self.isAlreadyLocked();
|
||||
|
||||
if (show)
|
||||
{
|
||||
_recordingInterfaceShowTime = CFAbsoluteTimeGetCurrent();
|
||||
|
||||
if (_recordIndicatorView == nil)
|
||||
{
|
||||
static UIImage *indicatorImage = nil;
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^
|
||||
{
|
||||
indicatorImage = TGCircleImage(9.0f, UIColorRGB(0xF33D2B));
|
||||
});
|
||||
_recordIndicatorView = [[UIImageView alloc] initWithImage:indicatorImage];
|
||||
}
|
||||
|
||||
setViewFrame(_recordIndicatorView, CGRectMake(11.0f, CGFloor((self.frame.size.height - 9.0f) / 2.0f), 9.0f, 9.0f));
|
||||
_recordIndicatorView.transform = CGAffineTransformMakeTranslation(-80.0f, 0.0f);
|
||||
|
||||
if (_recordDurationLabel == nil)
|
||||
{
|
||||
_recordDurationLabel = [[UILabel alloc] init];
|
||||
_recordDurationLabel.backgroundColor = [UIColor clearColor];
|
||||
_recordDurationLabel.textColor = [UIColor blackColor];
|
||||
_recordDurationLabel.font = TGSystemFontOfSize(15.0f);
|
||||
_recordDurationLabel.text = @"0:00,00 ";
|
||||
[_recordDurationLabel sizeToFit];
|
||||
_recordDurationLabel.alpha = 0.0f;
|
||||
_recordDurationLabel.layer.anchorPoint = CGPointMake((26.0f - _recordDurationLabel.frame.size.width) / (2 * 26.0f), 0.5f);
|
||||
_recordDurationLabel.textAlignment = NSTextAlignmentLeft;
|
||||
_recordDurationLabel.userInteractionEnabled = false;
|
||||
}
|
||||
|
||||
setViewFrame(_recordDurationLabel, CGRectMake(26.0f, CGFloor((self.frame.size.height - _recordDurationLabel.frame.size.height) / 2.0f), _recordDurationLabel.frame.size.width, _recordDurationLabel.frame.size.height));
|
||||
|
||||
_recordDurationLabel.transform = CGAffineTransformMakeTranslation(-80.0f, 0.0f);
|
||||
|
||||
if (_slideToCancelLabel == nil)
|
||||
{
|
||||
_slideToCancelLabel = [[UILabel alloc] init];
|
||||
_slideToCancelLabel.backgroundColor = [UIColor clearColor];
|
||||
_slideToCancelLabel.textColor = UIColorRGB(0x9597a0);
|
||||
_slideToCancelLabel.font = TGSystemFontOfSize(15.0f);
|
||||
_slideToCancelLabel.text = TGLocalized(@"Conversation.SlideToCancel");
|
||||
_slideToCancelLabel.clipsToBounds = false;
|
||||
_slideToCancelLabel.userInteractionEnabled = false;
|
||||
[_slideToCancelLabel sizeToFit];
|
||||
setViewFrame(_slideToCancelLabel, CGRectMake(CGFloor((self.frame.size.width - _slideToCancelLabel.frame.size.width) / 2.0f), CGFloor((self.frame.size.height - _slideToCancelLabel.frame.size.height) / 2.0f), _slideToCancelLabel.frame.size.width, _slideToCancelLabel.frame.size.height));
|
||||
_slideToCancelLabel.alpha = 0.0f;
|
||||
|
||||
_slideToCancelArrow = [[UIImageView alloc] initWithImage:TGTintedImage([UIImage imageNamed:@"ModernConversationAudioSlideToCancel.png"], UIColorRGB(0x9597a0))];
|
||||
CGRect slideToCancelArrowFrame = viewFrame(_slideToCancelArrow);
|
||||
setViewFrame(_slideToCancelArrow, CGRectMake(CGFloor((self.frame.size.width - _slideToCancelLabel.frame.size.width) / 2.0f) - slideToCancelArrowFrame.size.width - 7.0f, CGFloor((self.frame.size.height - _slideToCancelLabel.frame.size.height) / 2.0f), slideToCancelArrowFrame.size.width, slideToCancelArrowFrame.size.height));
|
||||
_slideToCancelArrow.alpha = 0.0f;
|
||||
[self addSubview:_slideToCancelArrow];
|
||||
|
||||
_slideToCancelArrow.transform = CGAffineTransformMakeTranslation(hideLeftOffset, 0.0f);
|
||||
_slideToCancelLabel.transform = CGAffineTransformMakeTranslation(hideLeftOffset, 0.0f);
|
||||
|
||||
_cancelButton = [[TGModernButton alloc] init];
|
||||
_cancelButton.titleLabel.font = TGSystemFontOfSize(17.0f);
|
||||
[_cancelButton setTitle:TGLocalized(@"Common.Cancel") forState:UIControlStateNormal];
|
||||
[_cancelButton setTitleColor:TGAccentColor()];
|
||||
[_cancelButton addTarget:self action:@selector(cancelPressed) forControlEvents:UIControlEventTouchUpInside];
|
||||
[_cancelButton sizeToFit];
|
||||
[self addSubview:_cancelButton];
|
||||
|
||||
setViewFrame(_cancelButton, CGRectMake(CGFloor((self.frame.size.width - _cancelButton.frame.size.width) / 2.0f), CGFloor((self.frame.size.height - _cancelButton.frame.size.height) / 2.0f) - 1.0f, _cancelButton.frame.size.width, _cancelButton.frame.size.height));
|
||||
}
|
||||
|
||||
if (!isAlreadyLocked)
|
||||
{
|
||||
_cancelButton.alpha = 0.0f;
|
||||
_cancelButton.userInteractionEnabled = false;
|
||||
}
|
||||
|
||||
_recordDurationLabel.text = @"0:00,00";
|
||||
|
||||
if (_recordIndicatorView.superview == nil)
|
||||
[self addSubview:_recordIndicatorView];
|
||||
[_recordIndicatorView.layer removeAllAnimations];
|
||||
|
||||
if (_recordDurationLabel.superview == nil)
|
||||
[self addSubview:_recordDurationLabel];
|
||||
[_recordDurationLabel.layer removeAllAnimations];
|
||||
|
||||
_slideToCancelArrow.transform = CGAffineTransformMakeTranslation(300.0f, 0.0f);
|
||||
_slideToCancelLabel.transform = CGAffineTransformMakeTranslation(300.0f, 0.0f);
|
||||
|
||||
int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0;
|
||||
|
||||
[UIView animateWithDuration:0.25 delay:0.06 options:animationCurveOption animations:^
|
||||
{
|
||||
_recordIndicatorView.transform = CGAffineTransformIdentity;
|
||||
} completion:nil];
|
||||
|
||||
[UIView animateWithDuration:0.25 delay:0.0 options:animationCurveOption animations:^
|
||||
{
|
||||
_recordDurationLabel.alpha = 1.0f;
|
||||
_recordDurationLabel.transform = CGAffineTransformIdentity;
|
||||
} completion:nil];
|
||||
|
||||
if (!isAlreadyLocked)
|
||||
{
|
||||
if (_slideToCancelLabel.superview == nil)
|
||||
[self addSubview:_slideToCancelLabel];
|
||||
|
||||
[UIView animateWithDuration:0.18 delay:0.0 options:animationCurveOption animations:^
|
||||
{
|
||||
_slideToCancelArrow.alpha = 1.0f;
|
||||
_slideToCancelArrow.transform = CGAffineTransformIdentity;
|
||||
|
||||
_slideToCancelLabel.alpha = 1.0f;
|
||||
_slideToCancelLabel.transform = CGAffineTransformIdentity;
|
||||
} completion:nil];
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
[self removeDotAnimation];
|
||||
NSTimeInterval durationFactor = MIN(0.4, MAX(1.0, velocity / 1000.0));
|
||||
|
||||
int options = 0;
|
||||
|
||||
if (ABS(CFAbsoluteTimeGetCurrent() - _recordingInterfaceShowTime) < 0.2)
|
||||
{
|
||||
options = UIViewAnimationOptionBeginFromCurrentState;
|
||||
}
|
||||
|
||||
int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0;
|
||||
[UIView animateWithDuration:0.25 * durationFactor delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||
{
|
||||
_recordIndicatorView.transform = CGAffineTransformMakeTranslation(-90.0f, 0.0f);
|
||||
} completion:^(BOOL finished)
|
||||
{
|
||||
if (finished)
|
||||
[_recordIndicatorView removeFromSuperview];
|
||||
}];
|
||||
|
||||
[UIView animateWithDuration:0.25 * durationFactor delay:0.05 * durationFactor options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||
{
|
||||
_recordDurationLabel.alpha = 0.0f;
|
||||
_recordDurationLabel.transform = CGAffineTransformMakeTranslation(-90.0f, 0.0f);
|
||||
} completion:^(BOOL finished)
|
||||
{
|
||||
if (finished)
|
||||
[_recordDurationLabel removeFromSuperview];
|
||||
}];
|
||||
|
||||
[UIView animateWithDuration:0.2 * durationFactor delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||
{
|
||||
_slideToCancelArrow.alpha = 0.0f;
|
||||
_slideToCancelArrow.transform = CGAffineTransformMakeTranslation(-300, 0.0f);
|
||||
_slideToCancelLabel.alpha = 0.0f;
|
||||
_slideToCancelLabel.transform = CGAffineTransformMakeTranslation(-200, 0.0f);
|
||||
|
||||
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f);
|
||||
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
|
||||
_cancelButton.transform = transform;
|
||||
_cancelButton.alpha = 0.0f;
|
||||
|
||||
_sendButton.transform = CGAffineTransformMakeScale(0.01, 0.01);
|
||||
_sendButton.alpha = 0.0f;
|
||||
|
||||
transform = CGAffineTransformMakeTranslation(0.0f, -44.0f);
|
||||
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
|
||||
|
||||
_deleteButton.transform = transform;
|
||||
_deleteButton.alpha = 0.0f;
|
||||
|
||||
_scrubberView.transform = transform;
|
||||
_scrubberView.alpha = 0.0f;
|
||||
} completion:nil];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)buttonInteractionUpdate:(CGPoint)value
|
||||
{
|
||||
CGFloat valueX = value.x;
|
||||
CGFloat offset = valueX * 300.0f;
|
||||
|
||||
offset = MAX(0.0f, offset - 5.0f);
|
||||
|
||||
_slideToCancelArrow.transform = CGAffineTransformMakeTranslation(-offset, 0.0f);
|
||||
|
||||
CGAffineTransform labelTransform = CGAffineTransformIdentity;
|
||||
labelTransform = CGAffineTransformTranslate(labelTransform, -offset, 0.0f);
|
||||
_slideToCancelLabel.transform = labelTransform;
|
||||
|
||||
CGAffineTransform indicatorTransform = CGAffineTransformIdentity;
|
||||
CGAffineTransform durationTransform = CGAffineTransformIdentity;
|
||||
|
||||
static CGFloat freeOffsetLimit = 35.0f;
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^
|
||||
{
|
||||
CGFloat labelWidth = [TGLocalized(@"Conversation.SlideToCancel") sizeWithFont:TGSystemFontOfSize(14.0f)].width;
|
||||
CGFloat arrowOrigin = CGFloor((TGScreenSize().width - labelWidth) / 2.0f) - 9.0f - 6.0f;
|
||||
CGFloat timerWidth = 90.0f;
|
||||
|
||||
freeOffsetLimit = MAX(0.0f, arrowOrigin - timerWidth);
|
||||
});
|
||||
|
||||
if (offset > freeOffsetLimit)
|
||||
{
|
||||
indicatorTransform = CGAffineTransformMakeTranslation(freeOffsetLimit - offset, 0.0f);
|
||||
durationTransform = CGAffineTransformMakeTranslation(freeOffsetLimit - offset, 0.0f);
|
||||
}
|
||||
|
||||
if (!CGAffineTransformEqualToTransform(indicatorTransform, _recordIndicatorView.transform))
|
||||
_recordIndicatorView.transform = indicatorTransform;
|
||||
|
||||
if (!CGAffineTransformEqualToTransform(durationTransform, _recordDurationLabel.transform))
|
||||
_recordDurationLabel.transform = durationTransform;
|
||||
}
|
||||
|
||||
- (void)setLocked
|
||||
{
|
||||
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, 22.0f);
|
||||
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
|
||||
_cancelButton.alpha = 0.0f;
|
||||
_cancelButton.transform = transform;
|
||||
_cancelButton.userInteractionEnabled = true;
|
||||
|
||||
[UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^
|
||||
{
|
||||
_cancelButton.transform = CGAffineTransformIdentity;
|
||||
|
||||
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f);
|
||||
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
|
||||
_slideToCancelLabel.transform = transform;
|
||||
} completion:^(__unused BOOL finished)
|
||||
{
|
||||
_slideToCancelLabel.transform = CGAffineTransformIdentity;
|
||||
}];
|
||||
|
||||
[UIView animateWithDuration:0.25 animations:^
|
||||
{
|
||||
_slideToCancelArrow.alpha = 0.0f;
|
||||
_slideToCancelLabel.alpha = 0.0f;
|
||||
_cancelButton.alpha = 1.0f;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setStopped
|
||||
{
|
||||
UIImage *deleteImage = [UIImage imageNamed:@"ModernConversationActionDelete.png"];
|
||||
|
||||
_deleteButton = [[TGModernButton alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 45.0f, 45.0f)];
|
||||
[_deleteButton setImage:deleteImage forState:UIControlStateNormal];
|
||||
_deleteButton.adjustsImageWhenDisabled = false;
|
||||
_deleteButton.adjustsImageWhenHighlighted = false;
|
||||
[_deleteButton addTarget:self action:@selector(deleteButtonPressed) forControlEvents:UIControlEventTouchUpInside];
|
||||
[self addSubview:_deleteButton];
|
||||
|
||||
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, 45.0f);
|
||||
transform = CGAffineTransformScale(transform, 0.88f, 0.88f);
|
||||
_deleteButton.transform = transform;
|
||||
|
||||
TGModernButton *sendButton = [[TGModernButton alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 45.0f, 45.0f)];
|
||||
sendButton.modernHighlight = true;
|
||||
_sendButton = sendButton;
|
||||
_sendButton.alpha = 0.0f;
|
||||
_sendButton.exclusiveTouch = true;
|
||||
[_sendButton setImage:[UIImage imageNamed:@"ModernConversationSend"] forState:UIControlStateNormal];
|
||||
_sendButton.adjustsImageWhenHighlighted = false;
|
||||
[_sendButton addTarget:self action:@selector(sendButtonPressed) forControlEvents:UIControlEventTouchUpInside];
|
||||
[self addSubview:_sendButton];
|
||||
|
||||
_scrubberView = [[TGVideoMessageScrubber alloc] init];
|
||||
_scrubberView.dataSource = self.parent;
|
||||
_scrubberView.delegate = self.parent;
|
||||
[self addSubview:_scrubberView];
|
||||
|
||||
[self layoutSubviews];
|
||||
|
||||
transform = CGAffineTransformMakeTranslation(0.0f, 44.0f);
|
||||
_scrubberView.transform = transform;
|
||||
|
||||
int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0;
|
||||
[UIView animateWithDuration:0.25 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||
{
|
||||
_recordIndicatorView.transform = CGAffineTransformMakeTranslation(-90.0f, 0.0f);
|
||||
_recordIndicatorView.alpha = 0.0f;
|
||||
} completion:^(BOOL finished)
|
||||
{
|
||||
if (finished)
|
||||
{
|
||||
[self removeDotAnimation];
|
||||
[_recordIndicatorView removeFromSuperview];
|
||||
}
|
||||
}];
|
||||
|
||||
[UIView animateWithDuration:0.25 delay:0.05 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||
{
|
||||
_recordDurationLabel.alpha = 0.0f;
|
||||
_recordDurationLabel.transform = CGAffineTransformMakeTranslation(-90.0f, 0.0f);
|
||||
} completion:^(BOOL finished)
|
||||
{
|
||||
if (finished)
|
||||
[_recordDurationLabel removeFromSuperview];
|
||||
}];
|
||||
|
||||
[UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||
{
|
||||
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f);
|
||||
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
|
||||
_cancelButton.transform = transform;
|
||||
_cancelButton.alpha = 0.0f;
|
||||
} completion:nil];
|
||||
|
||||
[UIView animateWithDuration:0.2 delay:0.07 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||
{
|
||||
_deleteButton.transform = CGAffineTransformMakeScale(0.88f, 0.88f);
|
||||
} completion:nil];
|
||||
|
||||
[UIView animateWithDuration:0.3 animations:^
|
||||
{
|
||||
_sendButton.alpha = 1.0f;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)showScrubberView
|
||||
{
|
||||
int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0;
|
||||
[UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^
|
||||
{
|
||||
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f);
|
||||
transform = CGAffineTransformScale(transform, 0.25f, 0.25f);
|
||||
|
||||
_scrubberView.transform = CGAffineTransformIdentity;
|
||||
} completion:nil];
|
||||
}
|
||||
|
||||
- (void)deleteButtonPressed
|
||||
{
|
||||
_deleteButton.userInteractionEnabled = false;
|
||||
|
||||
if (self.deletePressed != nil)
|
||||
self.deletePressed();
|
||||
}
|
||||
|
||||
- (void)sendButtonPressed
|
||||
{
|
||||
_sendButton.userInteractionEnabled = false;
|
||||
|
||||
if (self.sendPressed != nil)
|
||||
self.sendPressed();
|
||||
}
|
||||
|
||||
- (void)cancelPressed
|
||||
{
|
||||
dispatch_async(dispatch_get_main_queue(), ^
|
||||
{
|
||||
if (self.cancel != nil)
|
||||
self.cancel();
|
||||
});
|
||||
}
|
||||
|
||||
- (void)setDurationString:(NSString *)string
|
||||
{
|
||||
_recordDurationLabel.text = string;
|
||||
}
|
||||
|
||||
- (void)recordingStarted
|
||||
{
|
||||
[self addRecordingDotAnimation];
|
||||
}
|
||||
|
||||
- (void)addRecordingDotAnimation {
|
||||
CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:@"opacity"];
|
||||
animation.values = @[@1.0f, @1.0f, @0.0f];
|
||||
animation.keyTimes = @[@.0, @0.4546, @0.9091, @1];
|
||||
animation.duration = 0.5;
|
||||
animation.duration = 0.5;
|
||||
animation.autoreverses = true;
|
||||
animation.repeatCount = INFINITY;
|
||||
|
||||
[_recordIndicatorView.layer addAnimation:animation forKey:@"opacity-dot"];
|
||||
}
|
||||
|
||||
- (void)removeDotAnimation {
|
||||
[_recordIndicatorView.layer removeAnimationForKey:@"opacity-dot"];
|
||||
}
|
||||
|
||||
- (void)layoutSubviews
|
||||
{
|
||||
if (_slideToCancelLabel != nil)
|
||||
{
|
||||
CGRect slideToCancelLabelFrame = viewFrame(_slideToCancelLabel);
|
||||
setViewFrame(_slideToCancelLabel, CGRectMake(CGFloor((self.frame.size.width - slideToCancelLabelFrame.size.width) / 2.0f), CGFloor((self.frame.size.height - slideToCancelLabelFrame.size.height) / 2.0f), slideToCancelLabelFrame.size.width, slideToCancelLabelFrame.size.height));
|
||||
|
||||
CGRect slideToCancelArrowFrame = viewFrame(_slideToCancelArrow);
|
||||
setViewFrame(_slideToCancelArrow, CGRectMake(CGFloor((self.frame.size.width - slideToCancelLabelFrame.size.width) / 2.0f) - slideToCancelArrowFrame.size.width - 7.0f, CGFloor((self.frame.size.height - slideToCancelLabelFrame.size.height) / 2.0f), slideToCancelArrowFrame.size.width, slideToCancelArrowFrame.size.height));
|
||||
}
|
||||
|
||||
setViewFrame(_sendButton, CGRectMake(self.frame.size.width - _sendButton.frame.size.width, 0.0f, _sendButton.frame.size.width, self.frame.size.height));
|
||||
_deleteButton.center = CGPointMake(24.0f, 22.0f);
|
||||
setViewFrame(_scrubberView, CGRectMake(46.0f, (self.frame.size.height - 33.0f) / 2.0f, self.frame.size.width - 46.0f * 2.0f, 33.0f));
|
||||
}
|
||||
|
||||
@end
|
||||
7
LegacyComponents/TGVideoMessageRingView.h
Normal file
7
LegacyComponents/TGVideoMessageRingView.h
Normal file
@@ -0,0 +1,7 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface TGVideoMessageRingView : UIView
|
||||
|
||||
- (void)setValue:(CGFloat)value;
|
||||
|
||||
@end
|
||||
52
LegacyComponents/TGVideoMessageRingView.m
Normal file
52
LegacyComponents/TGVideoMessageRingView.m
Normal file
@@ -0,0 +1,52 @@
|
||||
#import "TGVideoMessageRingView.h"
|
||||
|
||||
#import "TGColor.h"
|
||||
|
||||
@interface TGVideoMessageRingView ()
|
||||
{
|
||||
CGFloat _value;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation TGVideoMessageRingView
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
{
|
||||
self = [super initWithFrame:frame];
|
||||
if (self != nil)
|
||||
{
|
||||
self.backgroundColor = [UIColor clearColor];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setValue:(CGFloat)value
|
||||
{
|
||||
_value = value;
|
||||
[self setNeedsDisplay];
|
||||
}
|
||||
|
||||
- (void)drawRect:(CGRect)rect
|
||||
{
|
||||
if (_value < DBL_EPSILON)
|
||||
return;
|
||||
|
||||
CGContextRef context = UIGraphicsGetCurrentContext();
|
||||
CGContextSetFillColorWithColor(context, TGAccentColor().CGColor);
|
||||
|
||||
CGMutablePathRef path = CGPathCreateMutable();
|
||||
CGPoint centerPoint = CGPointMake(rect.size.width / 2.0f, rect.size.height / 2.0f);
|
||||
CGFloat lineWidth = 4.0f;
|
||||
|
||||
CGPathAddArc(path, NULL, centerPoint.x, centerPoint.y, rect.size.width / 2.0f - lineWidth / 2.0f, -M_PI_2, -M_PI_2 + 2 * M_PI * _value, false);
|
||||
|
||||
CGPathRef strokedArc = CGPathCreateCopyByStrokingPath(path, NULL, lineWidth, kCGLineCapRound, kCGLineJoinMiter, 10);
|
||||
CGPathRelease(path);
|
||||
|
||||
CGContextAddPath(context, strokedArc);
|
||||
CGPathRelease(strokedArc);
|
||||
|
||||
CGContextFillPath(context);
|
||||
}
|
||||
|
||||
@end
|
||||
68
LegacyComponents/TGVideoMessageScrubber.h
Normal file
68
LegacyComponents/TGVideoMessageScrubber.h
Normal file
@@ -0,0 +1,68 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@protocol TGVideoMessageScrubberDelegate;
|
||||
@protocol TGVideoMessageScrubberDataSource;
|
||||
|
||||
@interface TGVideoMessageScrubber : UIView
|
||||
|
||||
@property (nonatomic, weak) id<TGVideoMessageScrubberDelegate> delegate;
|
||||
@property (nonatomic, weak) id<TGVideoMessageScrubberDataSource> dataSource;
|
||||
|
||||
@property (nonatomic, readonly) NSTimeInterval duration;
|
||||
|
||||
@property (nonatomic, assign) bool allowsTrimming;
|
||||
@property (nonatomic, readonly) bool hasTrimming;
|
||||
@property (nonatomic, assign) NSTimeInterval trimStartValue;
|
||||
@property (nonatomic, assign) NSTimeInterval trimEndValue;
|
||||
|
||||
@property (nonatomic, assign) NSTimeInterval maximumLength;
|
||||
|
||||
|
||||
@property (nonatomic, assign) bool isPlaying;
|
||||
@property (nonatomic, assign) NSTimeInterval value;
|
||||
- (void)setValue:(NSTimeInterval)value resetPosition:(bool)resetPosition;
|
||||
|
||||
- (void)setTrimApplied:(bool)trimApplied;
|
||||
|
||||
- (void)resetToStart;
|
||||
|
||||
- (void)reloadData;
|
||||
- (void)reloadDataAndReset:(bool)reset;
|
||||
|
||||
- (void)reloadThumbnails;
|
||||
- (void)ignoreThumbnails;
|
||||
- (void)resetThumbnails;
|
||||
|
||||
- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)timestamp isSummaryThubmnail:(bool)isSummaryThumbnail;
|
||||
|
||||
@end
|
||||
|
||||
@protocol TGVideoMessageScrubberDelegate <NSObject>
|
||||
|
||||
- (void)videoScrubberDidBeginScrubbing:(TGVideoMessageScrubber *)videoScrubber;
|
||||
- (void)videoScrubberDidEndScrubbing:(TGVideoMessageScrubber *)videoScrubber;
|
||||
- (void)videoScrubber:(TGVideoMessageScrubber *)videoScrubber valueDidChange:(NSTimeInterval)position;
|
||||
|
||||
- (void)videoScrubberDidBeginEditing:(TGVideoMessageScrubber *)videoScrubber;
|
||||
- (void)videoScrubberDidEndEditing:(TGVideoMessageScrubber *)videoScrubber endValueChanged:(bool)endValueChanged;
|
||||
- (void)videoScrubber:(TGVideoMessageScrubber *)videoScrubber editingStartValueDidChange:(NSTimeInterval)startValue;
|
||||
- (void)videoScrubber:(TGVideoMessageScrubber *)videoScrubber editingEndValueDidChange:(NSTimeInterval)endValue;
|
||||
|
||||
- (void)videoScrubberDidFinishRequestingThumbnails:(TGVideoMessageScrubber *)videoScrubber;
|
||||
- (void)videoScrubberDidCancelRequestingThumbnails:(TGVideoMessageScrubber *)videoScrubber;
|
||||
|
||||
@end
|
||||
|
||||
@protocol TGVideoMessageScrubberDataSource <NSObject>
|
||||
|
||||
- (NSTimeInterval)videoScrubberDuration:(TGVideoMessageScrubber *)videoScrubber;
|
||||
|
||||
- (NSArray *)videoScrubber:(TGVideoMessageScrubber *)videoScrubber evenlySpacedTimestamps:(NSInteger)count startingAt:(NSTimeInterval)startTimestamp endingAt:(NSTimeInterval)endTimestamp;
|
||||
|
||||
- (void)videoScrubber:(TGVideoMessageScrubber *)videoScrubber requestThumbnailImagesForTimestamps:(NSArray *)timestamps size:(CGSize)size isSummaryThumbnails:(bool)isSummaryThumbnails;
|
||||
|
||||
- (CGFloat)videoScrubberThumbnailAspectRatio:(TGVideoMessageScrubber *)videoScrubber;
|
||||
|
||||
- (CGSize)videoScrubberOriginalSize:(TGVideoMessageScrubber *)videoScrubber cropRect:(CGRect *)cropRect cropOrientation:(UIImageOrientation *)cropOrientation cropMirrored:(bool *)cropMirrored;
|
||||
|
||||
@end
|
||||
883
LegacyComponents/TGVideoMessageScrubber.m
Normal file
883
LegacyComponents/TGVideoMessageScrubber.m
Normal file
@@ -0,0 +1,883 @@
|
||||
#import "TGVideoMessageScrubber.h"
|
||||
|
||||
#import "LegacyComponentsInternal.h"
|
||||
#import "TGImageUtils.h"
|
||||
#import "POPBasicAnimation.h"
|
||||
|
||||
#import <LegacyComponents/UIControl+HitTestEdgeInsets.h>
|
||||
|
||||
#import <LegacyComponents/TGPhotoEditorInterfaceAssets.h>
|
||||
|
||||
#import "TGVideoMessageScrubberThumbnailView.h"
|
||||
#import "TGVideoMessageTrimView.h"
|
||||
|
||||
const CGFloat TGVideoScrubberMinimumTrimDuration = 1.0f;
|
||||
const CGFloat TGVideoScrubberZoomActivationInterval = 0.25f;
|
||||
const CGFloat TGVideoScrubberTrimRectEpsilon = 3.0f;
|
||||
|
||||
typedef enum
|
||||
{
|
||||
TGMediaPickerGalleryVideoScrubberPivotSourceHandle,
|
||||
TGMediaPickerGalleryVideoScrubberPivotSourceTrimStart,
|
||||
TGMediaPickerGalleryVideoScrubberPivotSourceTrimEnd
|
||||
} TGMediaPickerGalleryVideoScrubberPivotSource;
|
||||
|
||||
@interface TGVideoMessageScrubber () <UIGestureRecognizerDelegate>
|
||||
{
|
||||
UIControl *_wrapperView;
|
||||
UIView *_summaryThumbnailSnapshotView;
|
||||
UIView *_zoomedThumbnailWrapperView;
|
||||
UIView *_summaryThumbnailWrapperView;
|
||||
TGVideoMessageTrimView *_trimView;
|
||||
UIView *_leftCurtainView;
|
||||
UIView *_rightCurtainView;
|
||||
UIControl *_scrubberHandle;
|
||||
|
||||
UIPanGestureRecognizer *_panGestureRecognizer;
|
||||
UILongPressGestureRecognizer *_pressGestureRecognizer;
|
||||
|
||||
bool _beganInteraction;
|
||||
bool _endedInteraction;
|
||||
|
||||
bool _scrubbing;
|
||||
CGFloat _scrubbingPosition;
|
||||
|
||||
NSTimeInterval _duration;
|
||||
NSTimeInterval _trimStartValue;
|
||||
NSTimeInterval _trimEndValue;
|
||||
|
||||
bool _ignoreThumbnailLoad;
|
||||
bool _fadingThumbnailViews;
|
||||
CGFloat _thumbnailAspectRatio;
|
||||
NSArray *_summaryTimestamps;
|
||||
NSMutableArray *_summaryThumbnailViews;
|
||||
|
||||
CGSize _originalSize;
|
||||
CGRect _cropRect;
|
||||
UIImageOrientation _cropOrientation;
|
||||
bool _cropMirrored;
|
||||
|
||||
UIImageView *_leftMaskView;
|
||||
UIImageView *_rightMaskView;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation TGVideoMessageScrubber
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
{
|
||||
self = [super initWithFrame:frame];
|
||||
if (self != nil)
|
||||
{
|
||||
_allowsTrimming = true;
|
||||
|
||||
_wrapperView = [[UIControl alloc] initWithFrame:CGRectMake(0, 0, 0, 33)];
|
||||
_wrapperView.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -10, -5, -10);
|
||||
[self addSubview:_wrapperView];
|
||||
|
||||
_zoomedThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 33)];
|
||||
[_wrapperView addSubview:_zoomedThumbnailWrapperView];
|
||||
|
||||
_summaryThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 33)];
|
||||
_summaryThumbnailWrapperView.clipsToBounds = true;
|
||||
[_wrapperView addSubview:_summaryThumbnailWrapperView];
|
||||
|
||||
_leftMaskView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"VideoMessageScrubberLeftMask"]];
|
||||
[_wrapperView addSubview:_leftMaskView];
|
||||
|
||||
_rightMaskView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"VideoMessageScrubberRightMask"]];
|
||||
[_wrapperView addSubview:_rightMaskView];
|
||||
|
||||
_leftCurtainView = [[UIView alloc] init];
|
||||
_leftCurtainView.backgroundColor = [UIColorRGB(0xf7f7f7) colorWithAlphaComponent:0.8f];
|
||||
[_wrapperView addSubview:_leftCurtainView];
|
||||
|
||||
_rightCurtainView = [[UIView alloc] init];
|
||||
_rightCurtainView.backgroundColor = [UIColorRGB(0xf7f7f7) colorWithAlphaComponent:0.8f];
|
||||
[_wrapperView addSubview:_rightCurtainView];
|
||||
|
||||
__weak TGVideoMessageScrubber *weakSelf = self;
|
||||
_trimView = [[TGVideoMessageTrimView alloc] initWithFrame:CGRectZero];
|
||||
_trimView.exclusiveTouch = true;
|
||||
_trimView.trimmingEnabled = _allowsTrimming;
|
||||
_trimView.didBeginEditing = ^(__unused bool start)
|
||||
{
|
||||
__strong TGVideoMessageScrubber *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = strongSelf.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidBeginEditing:)])
|
||||
[delegate videoScrubberDidBeginEditing:strongSelf];
|
||||
|
||||
[strongSelf->_trimView setTrimming:true animated:true];
|
||||
|
||||
[strongSelf setScrubberHandleHidden:true animated:false];
|
||||
};
|
||||
_trimView.didEndEditing = ^(bool start)
|
||||
{
|
||||
__strong TGVideoMessageScrubber *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = strongSelf.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidEndEditing:endValueChanged:)])
|
||||
[delegate videoScrubberDidEndEditing:strongSelf endValueChanged:!start];
|
||||
|
||||
CGRect newTrimRect = strongSelf->_trimView.frame;
|
||||
CGRect trimRect = [strongSelf _scrubbingRect];
|
||||
CGRect normalScrubbingRect = [strongSelf _scrubbingRect];
|
||||
CGFloat maxWidth = trimRect.size.width + normalScrubbingRect.origin.x * 2;
|
||||
|
||||
CGFloat leftmostPosition = trimRect.origin.x - normalScrubbingRect.origin.x;
|
||||
if (newTrimRect.origin.x < leftmostPosition + TGVideoScrubberTrimRectEpsilon)
|
||||
{
|
||||
CGFloat delta = leftmostPosition - newTrimRect.origin.x;
|
||||
|
||||
newTrimRect.origin.x += delta;
|
||||
newTrimRect.size.width = MIN(maxWidth, newTrimRect.size.width - delta);
|
||||
}
|
||||
|
||||
CGFloat rightmostPosition = maxWidth;
|
||||
if (CGRectGetMaxX(newTrimRect) > maxWidth - TGVideoScrubberTrimRectEpsilon)
|
||||
{
|
||||
CGFloat delta = rightmostPosition - CGRectGetMaxX(newTrimRect);
|
||||
|
||||
newTrimRect.size.width = MIN(maxWidth, newTrimRect.size.width + delta);
|
||||
}
|
||||
|
||||
strongSelf->_trimView.frame = newTrimRect;
|
||||
|
||||
NSTimeInterval trimStartPosition = 0.0;
|
||||
NSTimeInterval trimEndPosition = 0.0;
|
||||
|
||||
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:newTrimRect duration:strongSelf.duration];
|
||||
|
||||
strongSelf->_trimStartValue = trimStartPosition;
|
||||
strongSelf->_trimEndValue = trimEndPosition;
|
||||
|
||||
bool isTrimmed = (strongSelf->_trimStartValue > FLT_EPSILON || fabs(strongSelf->_trimEndValue - strongSelf->_duration) > FLT_EPSILON);
|
||||
|
||||
[strongSelf->_trimView setTrimming:isTrimmed animated:true];
|
||||
|
||||
[strongSelf setScrubberHandleHidden:false animated:true];
|
||||
};
|
||||
_trimView.startHandleMoved = ^(CGPoint translation)
|
||||
{
|
||||
__strong TGVideoMessageScrubber *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
UIView *trimView = strongSelf->_trimView;
|
||||
|
||||
CGRect availableTrimRect = [strongSelf _scrubbingRect];
|
||||
CGRect normalScrubbingRect = [strongSelf _scrubbingRect];
|
||||
CGFloat originX = MAX(0, trimView.frame.origin.x + translation.x);
|
||||
CGFloat delta = originX - trimView.frame.origin.x;
|
||||
CGFloat maxWidth = availableTrimRect.size.width + normalScrubbingRect.origin.x * 2 - originX;
|
||||
|
||||
CGRect trimViewRect = CGRectMake(originX, trimView.frame.origin.y, MIN(maxWidth, trimView.frame.size.width - delta), trimView.frame.size.height);
|
||||
|
||||
NSTimeInterval trimStartPosition = 0.0;
|
||||
NSTimeInterval trimEndPosition = 0.0;
|
||||
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration];
|
||||
|
||||
NSTimeInterval duration = trimEndPosition - trimStartPosition;
|
||||
|
||||
if (trimEndPosition - trimStartPosition < TGVideoScrubberMinimumTrimDuration)
|
||||
return;
|
||||
|
||||
if (strongSelf.maximumLength > DBL_EPSILON && duration > strongSelf.maximumLength)
|
||||
{
|
||||
trimViewRect = CGRectMake(trimView.frame.origin.x + delta,
|
||||
trimView.frame.origin.y,
|
||||
trimView.frame.size.width,
|
||||
trimView.frame.size.height);
|
||||
|
||||
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration];
|
||||
}
|
||||
|
||||
trimView.frame = trimViewRect;
|
||||
|
||||
[strongSelf _layoutTrimCurtainViews];
|
||||
|
||||
strongSelf->_trimStartValue = trimStartPosition;
|
||||
strongSelf->_trimEndValue = trimEndPosition;
|
||||
|
||||
[strongSelf setValue:strongSelf->_trimStartValue];
|
||||
|
||||
UIView *handle = strongSelf->_scrubberHandle;
|
||||
handle.center = CGPointMake(trimView.frame.origin.x + 12 + handle.frame.size.width / 2, handle.center.y);
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = strongSelf.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubber:editingStartValueDidChange:)])
|
||||
[delegate videoScrubber:strongSelf editingStartValueDidChange:trimStartPosition];
|
||||
};
|
||||
_trimView.endHandleMoved = ^(CGPoint translation)
|
||||
{
|
||||
__strong TGVideoMessageScrubber *strongSelf = weakSelf;
|
||||
if (strongSelf == nil)
|
||||
return;
|
||||
|
||||
UIView *trimView = strongSelf->_trimView;
|
||||
|
||||
CGRect availableTrimRect = [strongSelf _scrubbingRect];
|
||||
CGRect normalScrubbingRect = [strongSelf _scrubbingRect];
|
||||
CGFloat localOriginX = trimView.frame.origin.x - availableTrimRect.origin.x + normalScrubbingRect.origin.x;
|
||||
CGFloat maxWidth = availableTrimRect.size.width + normalScrubbingRect.origin.x * 2 - localOriginX;
|
||||
|
||||
CGRect trimViewRect = CGRectMake(trimView.frame.origin.x, trimView.frame.origin.y, MIN(maxWidth, trimView.frame.size.width + translation.x), trimView.frame.size.height);
|
||||
|
||||
NSTimeInterval trimStartPosition = 0.0;
|
||||
NSTimeInterval trimEndPosition = 0.0;
|
||||
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration];
|
||||
|
||||
NSTimeInterval duration = trimEndPosition - trimStartPosition;
|
||||
|
||||
if (trimEndPosition - trimStartPosition < TGVideoScrubberMinimumTrimDuration)
|
||||
return;
|
||||
|
||||
if (strongSelf.maximumLength > DBL_EPSILON && duration > strongSelf.maximumLength)
|
||||
{
|
||||
trimViewRect = CGRectMake(trimView.frame.origin.x + translation.x, trimView.frame.origin.y, trimView.frame.size.width, trimView.frame.size.height);
|
||||
[strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration];
|
||||
}
|
||||
|
||||
trimView.frame = trimViewRect;
|
||||
|
||||
[strongSelf _layoutTrimCurtainViews];
|
||||
|
||||
strongSelf->_trimStartValue = trimStartPosition;
|
||||
strongSelf->_trimEndValue = trimEndPosition;
|
||||
|
||||
[strongSelf setValue:strongSelf->_trimEndValue];
|
||||
|
||||
UIView *handle = strongSelf->_scrubberHandle;
|
||||
handle.center = CGPointMake(CGRectGetMaxX(trimView.frame) - 12 - handle.frame.size.width / 2, handle.center.y);
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = strongSelf.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubber:editingEndValueDidChange:)])
|
||||
[delegate videoScrubber:strongSelf editingEndValueDidChange:trimEndPosition];
|
||||
};
|
||||
[_wrapperView addSubview:_trimView];
|
||||
|
||||
_scrubberHandle = [[UIControl alloc] initWithFrame:CGRectMake(0, -1, 8, 33.0f)];
|
||||
_scrubberHandle.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -10, -5, -10);
|
||||
//[_wrapperView addSubview:_scrubberHandle];
|
||||
|
||||
static UIImage *handleViewImage = nil;
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^
|
||||
{
|
||||
UIGraphicsBeginImageContextWithOptions(CGSizeMake(_scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height), false, 0.0f);
|
||||
CGContextRef context = UIGraphicsGetCurrentContext();
|
||||
CGContextSetShadowWithColor(context, CGSizeMake(0, 0.0f), 0.5f, [UIColor colorWithWhite:0.0f alpha:0.65f].CGColor);
|
||||
CGContextSetFillColorWithColor(context, [UIColor whiteColor].CGColor);
|
||||
|
||||
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:CGRectMake(1.0f, 1.5f, _scrubberHandle.frame.size.width - 2.0f, _scrubberHandle.frame.size.height - 2.0f) cornerRadius:2];
|
||||
[path fill];
|
||||
|
||||
handleViewImage = UIGraphicsGetImageFromCurrentImageContext();
|
||||
UIGraphicsEndImageContext();
|
||||
});
|
||||
|
||||
UIImageView *scrubberImageView = [[UIImageView alloc] initWithFrame:_scrubberHandle.bounds];
|
||||
scrubberImageView.image = handleViewImage;
|
||||
[_scrubberHandle addSubview:scrubberImageView];
|
||||
|
||||
_pressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handlePress:)];
|
||||
_pressGestureRecognizer.delegate = self;
|
||||
_pressGestureRecognizer.minimumPressDuration = 0.1f;
|
||||
//[_scrubberHandle addGestureRecognizer:_pressGestureRecognizer];
|
||||
|
||||
_panGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)];
|
||||
_panGestureRecognizer.delegate = self;
|
||||
//[_scrubberHandle addGestureRecognizer:_panGestureRecognizer];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)reloadThumbnails
|
||||
{
|
||||
[self resetThumbnails];
|
||||
|
||||
id<TGVideoMessageScrubberDataSource> dataSource = self.dataSource;
|
||||
|
||||
_summaryThumbnailViews = [[NSMutableArray alloc] init];
|
||||
|
||||
if ([dataSource respondsToSelector:@selector(videoScrubberOriginalSize:cropRect:cropOrientation:cropMirrored:)])
|
||||
_originalSize = [dataSource videoScrubberOriginalSize:self cropRect:&_cropRect cropOrientation:&_cropOrientation cropMirrored:&_cropMirrored];
|
||||
|
||||
CGFloat originalAspectRatio = 1.0f;
|
||||
CGFloat frameAspectRatio = 1.0f;
|
||||
if ([dataSource respondsToSelector:@selector(videoScrubberThumbnailAspectRatio:)])
|
||||
originalAspectRatio = [dataSource videoScrubberThumbnailAspectRatio:self];
|
||||
|
||||
if (!CGRectEqualToRect(_cropRect, CGRectZero))
|
||||
frameAspectRatio = _cropRect.size.width / _cropRect.size.height;
|
||||
else
|
||||
frameAspectRatio = originalAspectRatio;
|
||||
|
||||
_thumbnailAspectRatio = frameAspectRatio;
|
||||
|
||||
NSInteger thumbnailCount = (NSInteger)CGCeil(_summaryThumbnailWrapperView.frame.size.width / [self _thumbnailSizeWithAspectRatio:frameAspectRatio orientation:_cropOrientation].width);
|
||||
|
||||
if ([dataSource respondsToSelector:@selector(videoScrubber:evenlySpacedTimestamps:startingAt:endingAt:)])
|
||||
_summaryTimestamps = [dataSource videoScrubber:self evenlySpacedTimestamps:thumbnailCount startingAt:0 endingAt:_duration];
|
||||
|
||||
CGSize thumbnailImageSize = [self _thumbnailSizeWithAspectRatio:originalAspectRatio orientation:UIImageOrientationUp];
|
||||
CGFloat scale = MIN(2.0f, TGScreenScaling());
|
||||
thumbnailImageSize = CGSizeMake(thumbnailImageSize.width * scale, thumbnailImageSize.height * scale);
|
||||
|
||||
if ([dataSource respondsToSelector:@selector(videoScrubber:requestThumbnailImagesForTimestamps:size:isSummaryThumbnails:)])
|
||||
[dataSource videoScrubber:self requestThumbnailImagesForTimestamps:_summaryTimestamps size:thumbnailImageSize isSummaryThumbnails:true];
|
||||
}
|
||||
|
||||
- (void)ignoreThumbnails
|
||||
{
|
||||
_ignoreThumbnailLoad = true;
|
||||
}
|
||||
|
||||
- (void)resetThumbnails
|
||||
{
|
||||
_ignoreThumbnailLoad = false;
|
||||
|
||||
if (_summaryThumbnailViews.count < _summaryTimestamps.count)
|
||||
{
|
||||
id<TGVideoMessageScrubberDelegate> delegate = self.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidCancelRequestingThumbnails:)])
|
||||
[delegate videoScrubberDidCancelRequestingThumbnails:self];
|
||||
}
|
||||
|
||||
for (UIView *view in _summaryThumbnailWrapperView.subviews)
|
||||
[view removeFromSuperview];
|
||||
|
||||
for (UIView *view in _zoomedThumbnailWrapperView.subviews)
|
||||
[view removeFromSuperview];
|
||||
|
||||
_summaryThumbnailViews = nil;
|
||||
_summaryTimestamps = nil;
|
||||
}
|
||||
|
||||
- (void)reloadData
|
||||
{
|
||||
[self reloadDataAndReset:true];
|
||||
}
|
||||
|
||||
- (void)reloadDataAndReset:(bool)reset
|
||||
{
|
||||
id<TGVideoMessageScrubberDataSource> dataSource = self.dataSource;
|
||||
if ([dataSource respondsToSelector:@selector(videoScrubberDuration:)])
|
||||
_duration = [dataSource videoScrubberDuration:self];
|
||||
else
|
||||
return;
|
||||
|
||||
if (!reset && _summaryThumbnailViews.count > 0 && _summaryThumbnailSnapshotView == nil)
|
||||
{
|
||||
_summaryThumbnailSnapshotView = [_summaryThumbnailWrapperView snapshotViewAfterScreenUpdates:false];
|
||||
_summaryThumbnailSnapshotView.frame = _summaryThumbnailWrapperView.frame;
|
||||
[_summaryThumbnailWrapperView.superview insertSubview:_summaryThumbnailSnapshotView aboveSubview:_summaryThumbnailWrapperView];
|
||||
}
|
||||
else if (reset)
|
||||
{
|
||||
[_summaryThumbnailSnapshotView removeFromSuperview];
|
||||
_summaryThumbnailSnapshotView = nil;
|
||||
}
|
||||
|
||||
[self _layoutTrimView];
|
||||
|
||||
[self reloadThumbnails];
|
||||
}
|
||||
|
||||
- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)__unused timestamp isSummaryThubmnail:(bool)isSummaryThumbnail
|
||||
{
|
||||
TGVideoMessageScrubberThumbnailView *thumbnailView = [[TGVideoMessageScrubberThumbnailView alloc] initWithImage:image originalSize:_originalSize cropRect:_cropRect cropOrientation:_cropOrientation cropMirrored:_cropMirrored];
|
||||
|
||||
if (isSummaryThumbnail)
|
||||
{
|
||||
[_summaryThumbnailWrapperView addSubview:thumbnailView];
|
||||
[_summaryThumbnailViews addObject:thumbnailView];
|
||||
}
|
||||
|
||||
if ((isSummaryThumbnail && _summaryThumbnailViews.count == _summaryTimestamps.count))
|
||||
{
|
||||
if (!_ignoreThumbnailLoad)
|
||||
{
|
||||
id<TGVideoMessageScrubberDelegate> delegate = self.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidFinishRequestingThumbnails:)])
|
||||
[delegate videoScrubberDidFinishRequestingThumbnails:self];
|
||||
}
|
||||
_ignoreThumbnailLoad = false;
|
||||
|
||||
if (isSummaryThumbnail)
|
||||
{
|
||||
[self _layoutSummaryThumbnailViews];
|
||||
|
||||
UIView *snapshotView = _summaryThumbnailSnapshotView;
|
||||
_summaryThumbnailSnapshotView = nil;
|
||||
|
||||
if (snapshotView != nil)
|
||||
{
|
||||
_fadingThumbnailViews = true;
|
||||
[UIView animateWithDuration:0.3f animations:^
|
||||
{
|
||||
snapshotView.alpha = 0.0f;
|
||||
} completion:^(__unused BOOL finished)
|
||||
{
|
||||
_fadingThumbnailViews = false;
|
||||
[snapshotView removeFromSuperview];
|
||||
}];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (CGSize)_thumbnailSize
|
||||
{
|
||||
return [self _thumbnailSizeWithAspectRatio:_thumbnailAspectRatio orientation:_cropOrientation];
|
||||
}
|
||||
|
||||
- (CGSize)_thumbnailSizeWithAspectRatio:(CGFloat)__unused aspectRatio orientation:(UIImageOrientation)__unused orientation
|
||||
{
|
||||
return CGSizeMake(33, 33);
|
||||
}
|
||||
|
||||
- (void)_layoutSummaryThumbnailViews
|
||||
{
|
||||
if (_summaryThumbnailViews.count == 0)
|
||||
return;
|
||||
|
||||
CGSize thumbnailViewSize = [self _thumbnailSize];
|
||||
CGFloat totalWidth = thumbnailViewSize.width * _summaryThumbnailViews.count;
|
||||
CGFloat originX = (_summaryThumbnailWrapperView.frame.size.width - totalWidth) / 2;
|
||||
|
||||
[_summaryThumbnailViews enumerateObjectsUsingBlock:^(UIView *view, NSUInteger index, __unused BOOL *stop)
|
||||
{
|
||||
view.frame = CGRectMake(originX + thumbnailViewSize.width * index, 0, thumbnailViewSize.width, thumbnailViewSize.height);
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setIsPlaying:(bool)isPlaying
|
||||
{
|
||||
_isPlaying = isPlaying;
|
||||
|
||||
if (_isPlaying)
|
||||
[self _updateScrubberAnimationsAndResetCurrentPosition:false];
|
||||
else
|
||||
[self removeHandleAnimation];
|
||||
}
|
||||
|
||||
- (void)setValue:(NSTimeInterval)value
|
||||
{
|
||||
[self setValue:value resetPosition:false];
|
||||
}
|
||||
|
||||
- (void)setValue:(NSTimeInterval)value resetPosition:(bool)resetPosition
|
||||
{
|
||||
if (_duration < FLT_EPSILON)
|
||||
return;
|
||||
|
||||
if (value > _duration)
|
||||
value = _duration;
|
||||
|
||||
_value = value;
|
||||
|
||||
if (resetPosition)
|
||||
[self _updateScrubberAnimationsAndResetCurrentPosition:true];
|
||||
}
|
||||
|
||||
- (void)_updateScrubberAnimationsAndResetCurrentPosition:(bool)resetCurrentPosition
|
||||
{
|
||||
if (_duration < FLT_EPSILON)
|
||||
return;
|
||||
|
||||
CGPoint point = [self _scrubberPositionForPosition:_value duration:_duration];
|
||||
CGRect frame = CGRectMake(CGFloor(point.x) - _scrubberHandle.frame.size.width / 2, _scrubberHandle.frame.origin.y, _scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height);
|
||||
|
||||
if (_trimStartValue > DBL_EPSILON && fabs(_value - _trimStartValue) < 0.01)
|
||||
{
|
||||
frame = CGRectMake(_trimView.frame.origin.x + [self _scrubbingRect].origin.x, _scrubberHandle.frame.origin.y, _scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height);
|
||||
}
|
||||
else if (fabs(_value - _trimEndValue) < 0.01)
|
||||
{
|
||||
frame = CGRectMake(_trimView.frame.origin.x + _trimView.frame.size.width - [self _scrubbingRect].origin.x - _scrubberHandle.frame.size.width, _scrubberHandle.frame.origin.y, _scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height);
|
||||
}
|
||||
|
||||
if (_isPlaying)
|
||||
{
|
||||
if (resetCurrentPosition)
|
||||
_scrubberHandle.frame = frame;
|
||||
|
||||
CGRect scrubbingRect = [self _scrubbingRect];
|
||||
CGFloat maxPosition = scrubbingRect.origin.x + scrubbingRect.size.width - _scrubberHandle.frame.size.width / 2;
|
||||
NSTimeInterval duration = _duration;
|
||||
NSTimeInterval value = _value;
|
||||
|
||||
if (self.allowsTrimming)
|
||||
{
|
||||
maxPosition = MIN(maxPosition, CGRectGetMaxX(_trimView.frame) - scrubbingRect.origin.x - _scrubberHandle.frame.size.width / 2);
|
||||
duration = _trimEndValue - _trimStartValue;
|
||||
value = _value - _trimStartValue;
|
||||
}
|
||||
|
||||
CGRect endFrame = CGRectMake(maxPosition - _scrubberHandle.frame.size.width / 2, frame.origin.y, _scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height);
|
||||
|
||||
[self addHandleAnimationFromFrame:_scrubberHandle.frame toFrame:endFrame duration:MAX(0.0, duration - value)];
|
||||
}
|
||||
else
|
||||
{
|
||||
[self removeHandleAnimation];
|
||||
_scrubberHandle.frame = frame;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)addHandleAnimationFromFrame:(CGRect)fromFrame toFrame:(CGRect)toFrame duration:(NSTimeInterval)duration
|
||||
{
|
||||
[self removeHandleAnimation];
|
||||
|
||||
POPBasicAnimation *animation = [POPBasicAnimation animationWithPropertyNamed:kPOPViewFrame];
|
||||
animation.fromValue = [NSValue valueWithCGRect:fromFrame];
|
||||
animation.toValue = [NSValue valueWithCGRect:toFrame];
|
||||
animation.duration = duration;
|
||||
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
|
||||
animation.clampMode = kPOPAnimationClampBoth;
|
||||
animation.roundingFactor = 0.5f;
|
||||
|
||||
[_scrubberHandle pop_addAnimation:animation forKey:@"progress"];
|
||||
}
|
||||
|
||||
- (void)removeHandleAnimation
|
||||
{
|
||||
[_scrubberHandle pop_removeAnimationForKey:@"progress"];
|
||||
}
|
||||
|
||||
- (void)resetToStart
|
||||
{
|
||||
_value = _trimStartValue;
|
||||
|
||||
[self removeHandleAnimation];
|
||||
_scrubberHandle.center = CGPointMake(_trimView.frame.origin.x + [self _scrubbingRect].origin.x + _scrubberHandle.frame.size.width / 2, _scrubberHandle.center.y);
|
||||
}
|
||||
|
||||
#pragma mark - Scrubber Handle
|
||||
|
||||
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer
|
||||
{
|
||||
if (gestureRecognizer.view != otherGestureRecognizer.view)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
- (void)handlePress:(UILongPressGestureRecognizer *)gestureRecognizer
|
||||
{
|
||||
switch (gestureRecognizer.state)
|
||||
{
|
||||
case UIGestureRecognizerStateBegan:
|
||||
{
|
||||
if (_beganInteraction)
|
||||
return;
|
||||
|
||||
_scrubbing = true;
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = self.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidBeginScrubbing:)])
|
||||
[delegate videoScrubberDidBeginScrubbing:self];
|
||||
|
||||
_endedInteraction = false;
|
||||
_beganInteraction = true;
|
||||
}
|
||||
break;
|
||||
|
||||
case UIGestureRecognizerStateEnded:
|
||||
case UIGestureRecognizerStateCancelled:
|
||||
{
|
||||
_beganInteraction = false;
|
||||
|
||||
if (_endedInteraction)
|
||||
return;
|
||||
|
||||
_scrubbing = false;
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = self.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidEndScrubbing:)])
|
||||
[delegate videoScrubberDidEndScrubbing:self];
|
||||
|
||||
_endedInteraction = true;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)handlePan:(UIPanGestureRecognizer *)gestureRecognizer
|
||||
{
|
||||
CGPoint translation = [gestureRecognizer translationInView:self];
|
||||
[gestureRecognizer setTranslation:CGPointZero inView:self];
|
||||
|
||||
switch (gestureRecognizer.state)
|
||||
{
|
||||
case UIGestureRecognizerStateBegan:
|
||||
{
|
||||
if (_beganInteraction)
|
||||
return;
|
||||
|
||||
_scrubbing = true;
|
||||
|
||||
[self removeHandleAnimation];
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = self.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidBeginScrubbing:)])
|
||||
[delegate videoScrubberDidBeginScrubbing:self];
|
||||
|
||||
_endedInteraction = false;
|
||||
_beganInteraction = true;
|
||||
}
|
||||
break;
|
||||
|
||||
case UIGestureRecognizerStateChanged:
|
||||
{
|
||||
CGRect scrubbingRect = [self _scrubbingRect];
|
||||
CGRect normalScrubbingRect = [self _scrubbingRect];
|
||||
CGFloat minPosition = scrubbingRect.origin.x + _scrubberHandle.frame.size.width / 2;
|
||||
CGFloat maxPosition = scrubbingRect.origin.x + scrubbingRect.size.width - _scrubberHandle.frame.size.width / 2;
|
||||
if (self.allowsTrimming)
|
||||
{
|
||||
minPosition = MAX(minPosition, _trimView.frame.origin.x + normalScrubbingRect.origin.x + _scrubberHandle.frame.size.width / 2);
|
||||
maxPosition = MIN(maxPosition, CGRectGetMaxX(_trimView.frame) - normalScrubbingRect.origin.x - _scrubberHandle.frame.size.width / 2);
|
||||
}
|
||||
|
||||
_scrubberHandle.center = CGPointMake(MIN(MAX(_scrubberHandle.center.x + translation.x, minPosition), maxPosition), _scrubberHandle.center.y);
|
||||
|
||||
NSTimeInterval position = [self _positionForScrubberPosition:_scrubberHandle.center duration:_duration];
|
||||
|
||||
if (self.allowsTrimming)
|
||||
{
|
||||
if (ABS(_scrubberHandle.center.x - minPosition) < FLT_EPSILON)
|
||||
position = _trimStartValue;
|
||||
else if (ABS(_scrubberHandle.center.x - maxPosition) < FLT_EPSILON)
|
||||
position = _trimEndValue;
|
||||
}
|
||||
|
||||
_value = position;
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = self.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubber:valueDidChange:)])
|
||||
[delegate videoScrubber:self valueDidChange:position];
|
||||
}
|
||||
break;
|
||||
|
||||
case UIGestureRecognizerStateEnded:
|
||||
case UIGestureRecognizerStateCancelled:
|
||||
{
|
||||
_beganInteraction = false;
|
||||
|
||||
if (_endedInteraction)
|
||||
return;
|
||||
|
||||
_scrubbing = false;
|
||||
|
||||
id<TGVideoMessageScrubberDelegate> delegate = self.delegate;
|
||||
if ([delegate respondsToSelector:@selector(videoScrubberDidEndScrubbing:)])
|
||||
[delegate videoScrubberDidEndScrubbing:self];
|
||||
|
||||
_endedInteraction = true;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setScrubberHandleHidden:(bool)hidden animated:(bool)animated
|
||||
{
|
||||
if (animated)
|
||||
{
|
||||
_scrubberHandle.hidden = false;
|
||||
[UIView animateWithDuration:0.25f animations:^
|
||||
{
|
||||
_scrubberHandle.alpha = hidden ? 0.0f : 1.0f;
|
||||
} completion:^(BOOL finished)
|
||||
{
|
||||
if (finished)
|
||||
_scrubberHandle.hidden = hidden;
|
||||
}];
|
||||
}
|
||||
else
|
||||
{
|
||||
_scrubberHandle.hidden = hidden;
|
||||
_scrubberHandle.alpha = hidden ? 0.0f : 1.0f;
|
||||
}
|
||||
}
|
||||
|
||||
- (CGPoint)_scrubberPositionForPosition:(NSTimeInterval)position duration:(NSTimeInterval)duration
|
||||
{
|
||||
CGRect scrubbingRect = [self _scrubbingRect];
|
||||
|
||||
if (duration < FLT_EPSILON)
|
||||
{
|
||||
position = 0.0;
|
||||
duration = 1.0;
|
||||
}
|
||||
|
||||
return CGPointMake(_scrubberHandle.frame.size.width / 2 + scrubbingRect.origin.x + (CGFloat)(position / duration) * (scrubbingRect.size.width - _scrubberHandle.frame.size.width), CGRectGetMidY([self _scrubbingRect]));
|
||||
}
|
||||
|
||||
- (NSTimeInterval)_positionForScrubberPosition:(CGPoint)scrubberPosition duration:(NSTimeInterval)duration
|
||||
{
|
||||
CGRect scrubbingRect = [self _scrubbingRect];
|
||||
return (scrubberPosition.x - _scrubberHandle.frame.size.width / 2 - scrubbingRect.origin.x) / (scrubbingRect.size.width - _scrubberHandle.frame.size.width) * duration;
|
||||
}
|
||||
|
||||
- (CGRect)_scrubbingRect
|
||||
{
|
||||
CGFloat width = self.frame.size.width;
|
||||
CGFloat origin = 0;
|
||||
if (self.allowsTrimming)
|
||||
{
|
||||
width = width - 16 * 2;
|
||||
origin = 16;
|
||||
}
|
||||
else
|
||||
{
|
||||
width = width - 2 * 2;
|
||||
origin = 2;
|
||||
}
|
||||
|
||||
return CGRectMake(origin, 0, width, 33);
|
||||
}
|
||||
|
||||
#pragma mark - Trimming
|
||||
|
||||
- (bool)hasTrimming
|
||||
{
|
||||
return (_allowsTrimming && (_trimStartValue > FLT_EPSILON || _trimEndValue < _duration));
|
||||
}
|
||||
|
||||
- (void)setAllowsTrimming:(bool)allowsTrimming
|
||||
{
|
||||
_allowsTrimming = allowsTrimming;
|
||||
_trimView.trimmingEnabled = allowsTrimming;
|
||||
}
|
||||
|
||||
- (NSTimeInterval)trimStartValue
|
||||
{
|
||||
return MAX(0.0, _trimStartValue);
|
||||
}
|
||||
|
||||
- (void)setTrimStartValue:(NSTimeInterval)trimStartValue
|
||||
{
|
||||
_trimStartValue = trimStartValue;
|
||||
|
||||
[self _layoutTrimView];
|
||||
|
||||
if (_value < _trimStartValue)
|
||||
{
|
||||
[self setValue:_trimStartValue];
|
||||
_scrubberHandle.center = CGPointMake(_trimView.frame.origin.x + 12 + _scrubberHandle.frame.size.width / 2, _scrubberHandle.center.y);
|
||||
}
|
||||
}
|
||||
|
||||
- (NSTimeInterval)trimEndValue
|
||||
{
|
||||
return MIN(_duration, _trimEndValue);
|
||||
}
|
||||
|
||||
- (void)setTrimEndValue:(NSTimeInterval)trimEndValue
|
||||
{
|
||||
_trimEndValue = trimEndValue;
|
||||
|
||||
[self _layoutTrimView];
|
||||
|
||||
if (_value > _trimEndValue)
|
||||
{
|
||||
[self setValue:_trimEndValue];
|
||||
_scrubberHandle.center = CGPointMake(CGRectGetMaxX(_trimView.frame) - 12 - _scrubberHandle.frame.size.width / 2, _scrubberHandle.center.y);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setTrimApplied:(bool)trimApplied
|
||||
{
|
||||
[_trimView setTrimming:trimApplied animated:false];
|
||||
}
|
||||
|
||||
- (void)_trimStartPosition:(NSTimeInterval *)trimStartPosition trimEndPosition:(NSTimeInterval *)trimEndPosition forTrimFrame:(CGRect)trimFrame duration:(NSTimeInterval)duration
|
||||
{
|
||||
if (trimStartPosition == NULL || trimEndPosition == NULL)
|
||||
return;
|
||||
|
||||
CGRect trimRect = [self _scrubbingRect];
|
||||
|
||||
*trimStartPosition = (CGRectGetMinX(trimFrame) + 12 - trimRect.origin.x) / trimRect.size.width * duration;
|
||||
*trimEndPosition = (CGRectGetMaxX(trimFrame) - 12 - trimRect.origin.x) / trimRect.size.width * duration;
|
||||
}
|
||||
|
||||
- (CGRect)_trimFrameForStartPosition:(NSTimeInterval)startPosition endPosition:(NSTimeInterval)endPosition duration:(NSTimeInterval)duration
|
||||
{
|
||||
CGRect trimRect = [self _scrubbingRect];
|
||||
CGRect normalScrubbingRect = [self _scrubbingRect];
|
||||
|
||||
CGFloat minX = (CGFloat)startPosition * trimRect.size.width / (CGFloat)duration + trimRect.origin.x - normalScrubbingRect.origin.x;
|
||||
CGFloat maxX = (CGFloat)endPosition * trimRect.size.width / (CGFloat)duration + trimRect.origin.x + normalScrubbingRect.origin.x;
|
||||
|
||||
return CGRectMake(minX, 0, maxX - minX, 33);
|
||||
}
|
||||
|
||||
- (void)_layoutTrimView
|
||||
{
|
||||
if (_duration > DBL_EPSILON)
|
||||
{
|
||||
NSTimeInterval endPosition = _trimEndValue;
|
||||
if (endPosition < DBL_EPSILON)
|
||||
endPosition = _duration;
|
||||
|
||||
_trimView.frame = [self _trimFrameForStartPosition:_trimStartValue endPosition:_trimEndValue duration:_duration];
|
||||
}
|
||||
else
|
||||
{
|
||||
_trimView.frame = _wrapperView.bounds;
|
||||
}
|
||||
|
||||
[self _layoutTrimCurtainViews];
|
||||
}
|
||||
|
||||
- (void)_layoutTrimCurtainViews
|
||||
{
|
||||
_leftCurtainView.hidden = !self.allowsTrimming;
|
||||
_rightCurtainView.hidden = !self.allowsTrimming;
|
||||
|
||||
if (self.allowsTrimming)
|
||||
{
|
||||
CGRect scrubbingRect = [self _scrubbingRect];
|
||||
CGRect normalScrubbingRect = [self _scrubbingRect];
|
||||
|
||||
_leftCurtainView.frame = CGRectMake(scrubbingRect.origin.x - 16.0f, 0.0f, _trimView.frame.origin.x - scrubbingRect.origin.x + normalScrubbingRect.origin.x + 16.0f, 33);
|
||||
_rightCurtainView.frame = CGRectMake(CGRectGetMaxX(_trimView.frame) - 16.0f, 0.0f, scrubbingRect.origin.x + scrubbingRect.size.width - CGRectGetMaxX(_trimView.frame) - scrubbingRect.origin.x + normalScrubbingRect.origin.x + 32.0f, 33);
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Layout
|
||||
|
||||
- (void)setFrame:(CGRect)frame
|
||||
{
|
||||
[super setFrame:frame];
|
||||
|
||||
_summaryThumbnailWrapperView.frame = CGRectMake(0.0f, 0.0f, frame.size.width, 33);
|
||||
_zoomedThumbnailWrapperView.frame = _summaryThumbnailWrapperView.frame;
|
||||
|
||||
_leftMaskView.frame = CGRectMake(0.0f, 0.0f, 16.0f, 33.0f);
|
||||
_rightMaskView.frame = CGRectMake(frame.size.width - 16.0f, 0.0f, 16.0f, 33.0f);
|
||||
}
|
||||
|
||||
- (void)layoutSubviews
|
||||
{
|
||||
_wrapperView.frame = CGRectMake(0, 0, self.frame.size.width, 33);
|
||||
[self _layoutTrimView];
|
||||
|
||||
[self _updateScrubberAnimationsAndResetCurrentPosition:true];
|
||||
}
|
||||
|
||||
@end
|
||||
7
LegacyComponents/TGVideoMessageScrubberThumbnailView.h
Normal file
7
LegacyComponents/TGVideoMessageScrubberThumbnailView.h
Normal file
@@ -0,0 +1,7 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface TGVideoMessageScrubberThumbnailView : UIView
|
||||
|
||||
- (instancetype)initWithImage:(UIImage *)image originalSize:(CGSize)originalSize cropRect:(CGRect)cropRect cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored;
|
||||
|
||||
@end
|
||||
90
LegacyComponents/TGVideoMessageScrubberThumbnailView.m
Normal file
90
LegacyComponents/TGVideoMessageScrubberThumbnailView.m
Normal file
@@ -0,0 +1,90 @@
|
||||
#import "TGVideoMessageScrubberThumbnailView.h"
|
||||
|
||||
#import "LegacyComponentsInternal.h"
|
||||
#import "TGImageUtils.h"
|
||||
|
||||
#import <LegacyComponents/TGPhotoEditorUtils.h>
|
||||
|
||||
@interface TGVideoMessageScrubberThumbnailView ()
|
||||
{
|
||||
CGSize _originalSize;
|
||||
CGRect _cropRect;
|
||||
UIImageOrientation _cropOrientation;
|
||||
bool _cropMirrored;
|
||||
|
||||
UIImageView *_imageView;
|
||||
UIView *_stripeView;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation TGVideoMessageScrubberThumbnailView
|
||||
|
||||
- (instancetype)initWithImage:(UIImage *)image
|
||||
{
|
||||
self = [super initWithFrame:CGRectZero];
|
||||
if (self != nil)
|
||||
{
|
||||
self.clipsToBounds = true;
|
||||
|
||||
_imageView = [[UIImageView alloc] initWithFrame:CGRectZero];
|
||||
_imageView.image = image;
|
||||
[self addSubview:_imageView];
|
||||
|
||||
_stripeView = [[UIView alloc] init];
|
||||
_stripeView.backgroundColor = [UIColor colorWithWhite:0.0f alpha:0.3f];
|
||||
[self addSubview:_stripeView];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithImage:(UIImage *)image originalSize:(CGSize)originalSize cropRect:(CGRect)cropRect cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored
|
||||
{
|
||||
self = [self initWithImage:image];
|
||||
if (self != nil)
|
||||
{
|
||||
_originalSize = originalSize;
|
||||
_cropRect = cropRect;
|
||||
_cropOrientation = cropOrientation;
|
||||
_cropMirrored = cropMirrored;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setFrame:(CGRect)frame
|
||||
{
|
||||
[super setFrame:frame];
|
||||
|
||||
if (_imageView == nil)
|
||||
return;
|
||||
|
||||
CGAffineTransform transform = CGAffineTransformMakeRotation(TGRotationForOrientation(_cropOrientation));
|
||||
if (_cropMirrored)
|
||||
transform = CGAffineTransformScale(transform, -1.0f, 1.0f);
|
||||
_imageView.transform = transform;
|
||||
|
||||
CGRect cropRect = _cropRect;
|
||||
CGSize originalSize = _originalSize;
|
||||
|
||||
if (_cropOrientation == UIImageOrientationLeft)
|
||||
{
|
||||
cropRect = CGRectMake(cropRect.origin.y, originalSize.width - cropRect.size.width - cropRect.origin.x, cropRect.size.height, cropRect.size.width);
|
||||
originalSize = CGSizeMake(originalSize.height, originalSize.width);
|
||||
}
|
||||
else if (_cropOrientation == UIImageOrientationRight)
|
||||
{
|
||||
cropRect = CGRectMake(originalSize.height - cropRect.size.height - cropRect.origin.y, cropRect.origin.x, cropRect.size.height, cropRect.size.width);
|
||||
originalSize = CGSizeMake(originalSize.height, originalSize.width);
|
||||
}
|
||||
else if (_cropOrientation == UIImageOrientationDown)
|
||||
{
|
||||
cropRect = CGRectMake(originalSize.width - cropRect.size.width - cropRect.origin.x, originalSize.height - cropRect.size.height - cropRect.origin.y, cropRect.size.width, cropRect.size.height);
|
||||
}
|
||||
|
||||
CGFloat ratio = frame.size.width / cropRect.size.width;
|
||||
_imageView.frame = CGRectMake(-cropRect.origin.x * ratio, -cropRect.origin.y * ratio, originalSize.width * ratio, originalSize.height * ratio);
|
||||
|
||||
CGFloat thickness = 1.0f - TGRetinaPixel;
|
||||
_stripeView.frame = CGRectMake(frame.size.width - thickness, 0, thickness, frame.size.height);
|
||||
}
|
||||
|
||||
@end
|
||||
14
LegacyComponents/TGVideoMessageTrimView.h
Normal file
14
LegacyComponents/TGVideoMessageTrimView.h
Normal file
@@ -0,0 +1,14 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface TGVideoMessageTrimView : UIControl
|
||||
|
||||
@property (nonatomic, copy) void(^didBeginEditing)(bool start);
|
||||
@property (nonatomic, copy) void(^startHandleMoved)(CGPoint translation);
|
||||
@property (nonatomic, copy) void(^endHandleMoved)(CGPoint translation);
|
||||
@property (nonatomic, copy) void(^didEndEditing)(bool start);
|
||||
|
||||
@property (nonatomic, assign) bool trimmingEnabled;
|
||||
|
||||
- (void)setTrimming:(bool)trimming animated:(bool)animated;
|
||||
|
||||
@end
|
||||
192
LegacyComponents/TGVideoMessageTrimView.m
Normal file
192
LegacyComponents/TGVideoMessageTrimView.m
Normal file
@@ -0,0 +1,192 @@
|
||||
#import "TGVideoMessageTrimView.h"
|
||||
|
||||
#import <LegacyComponents/UIControl+HitTestEdgeInsets.h>
|
||||
|
||||
@interface TGVideoMessageTrimView () <UIGestureRecognizerDelegate>
|
||||
{
|
||||
UIButton *_leftSegmentView;
|
||||
UIButton *_rightSegmentView;
|
||||
|
||||
UILongPressGestureRecognizer *_startHandlePressGestureRecognizer;
|
||||
UILongPressGestureRecognizer *_endHandlePressGestureRecognizer;
|
||||
|
||||
UIPanGestureRecognizer *_startHandlePanGestureRecognizer;
|
||||
UIPanGestureRecognizer *_endHandlePanGestureRecognizer;
|
||||
|
||||
bool _beganInteraction;
|
||||
bool _endedInteraction;
|
||||
|
||||
bool _isTracking;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation TGVideoMessageTrimView
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frame
|
||||
{
|
||||
self = [super initWithFrame:frame];
|
||||
if (self != nil)
|
||||
{
|
||||
self.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -25, -5, -25);
|
||||
|
||||
|
||||
_leftSegmentView = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 16, 33)];
|
||||
_leftSegmentView.exclusiveTouch = true;
|
||||
_leftSegmentView.adjustsImageWhenHighlighted = false;
|
||||
[_leftSegmentView setBackgroundImage:[UIImage imageNamed:@"VideoMessageLeftHandle"] forState:UIControlStateNormal];
|
||||
_leftSegmentView.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -25, -5, -10);
|
||||
[self addSubview:_leftSegmentView];
|
||||
|
||||
_rightSegmentView = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 16, 33)];
|
||||
_rightSegmentView.exclusiveTouch = true;
|
||||
_rightSegmentView.adjustsImageWhenHighlighted = false;
|
||||
[_rightSegmentView setBackgroundImage:[UIImage imageNamed:@"VideoMessageRightHandle"] forState:UIControlStateNormal];
|
||||
_rightSegmentView.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -10, -5, -25);
|
||||
[self addSubview:_rightSegmentView];
|
||||
|
||||
_startHandlePressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleHandlePress:)];
|
||||
_startHandlePressGestureRecognizer.delegate = self;
|
||||
_startHandlePressGestureRecognizer.minimumPressDuration = 0.1f;
|
||||
[_leftSegmentView addGestureRecognizer:_startHandlePressGestureRecognizer];
|
||||
|
||||
_endHandlePressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleHandlePress:)];
|
||||
_endHandlePressGestureRecognizer.delegate = self;
|
||||
_endHandlePressGestureRecognizer.minimumPressDuration = 0.1f;
|
||||
[_rightSegmentView addGestureRecognizer:_endHandlePressGestureRecognizer];
|
||||
|
||||
_startHandlePanGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handleHandlePan:)];
|
||||
_startHandlePanGestureRecognizer.delegate = self;
|
||||
[_leftSegmentView addGestureRecognizer:_startHandlePanGestureRecognizer];
|
||||
|
||||
_endHandlePanGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handleHandlePan:)];
|
||||
_endHandlePanGestureRecognizer.delegate = self;
|
||||
[_rightSegmentView addGestureRecognizer:_endHandlePanGestureRecognizer];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setTrimmingEnabled:(bool)trimmingEnabled
|
||||
{
|
||||
_trimmingEnabled = trimmingEnabled;
|
||||
|
||||
_leftSegmentView.userInteractionEnabled = trimmingEnabled;
|
||||
_rightSegmentView.userInteractionEnabled = trimmingEnabled;
|
||||
|
||||
[self setNeedsLayout];
|
||||
}
|
||||
|
||||
- (void)setTrimming:(bool)__unused trimming animated:(bool)__unused animated
|
||||
{
|
||||
}
|
||||
|
||||
- (void)handleHandlePress:(UILongPressGestureRecognizer *)gestureRecognizer
|
||||
{
|
||||
switch (gestureRecognizer.state)
|
||||
{
|
||||
case UIGestureRecognizerStateBegan:
|
||||
{
|
||||
if (_beganInteraction)
|
||||
return;
|
||||
|
||||
_isTracking = true;
|
||||
|
||||
if (self.didBeginEditing != nil)
|
||||
self.didBeginEditing(gestureRecognizer.view == _leftSegmentView);
|
||||
|
||||
_endedInteraction = false;
|
||||
_beganInteraction = true;
|
||||
}
|
||||
break;
|
||||
|
||||
case UIGestureRecognizerStateEnded:
|
||||
case UIGestureRecognizerStateCancelled:
|
||||
{
|
||||
_beganInteraction = false;
|
||||
|
||||
if (_endedInteraction)
|
||||
return;
|
||||
|
||||
_isTracking = false;
|
||||
|
||||
if (self.didEndEditing != nil)
|
||||
self.didEndEditing(gestureRecognizer.view == _leftSegmentView);
|
||||
|
||||
_endedInteraction = true;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)handleHandlePan:(UIPanGestureRecognizer *)gestureRecognizer
|
||||
{
|
||||
CGPoint translation = [gestureRecognizer translationInView:self];
|
||||
[gestureRecognizer setTranslation:CGPointZero inView:self];
|
||||
|
||||
switch (gestureRecognizer.state)
|
||||
{
|
||||
case UIGestureRecognizerStateBegan:
|
||||
{
|
||||
if (_beganInteraction)
|
||||
return;
|
||||
|
||||
_isTracking = true;
|
||||
|
||||
if (self.didBeginEditing != nil)
|
||||
self.didBeginEditing(gestureRecognizer.view == _leftSegmentView);
|
||||
|
||||
_endedInteraction = false;
|
||||
_beganInteraction = true;
|
||||
}
|
||||
break;
|
||||
|
||||
case UIGestureRecognizerStateChanged:
|
||||
{
|
||||
if (gestureRecognizer == _startHandlePanGestureRecognizer && self.startHandleMoved != nil)
|
||||
self.startHandleMoved(translation);
|
||||
else if (gestureRecognizer == _endHandlePanGestureRecognizer && self.endHandleMoved != nil)
|
||||
self.endHandleMoved(translation);
|
||||
}
|
||||
break;
|
||||
|
||||
case UIGestureRecognizerStateEnded:
|
||||
case UIGestureRecognizerStateCancelled:
|
||||
{
|
||||
_beganInteraction = false;
|
||||
|
||||
if (_endedInteraction)
|
||||
return;
|
||||
|
||||
_isTracking = false;
|
||||
|
||||
if (self.didEndEditing != nil)
|
||||
self.didEndEditing(gestureRecognizer.view == _leftSegmentView);
|
||||
|
||||
_endedInteraction = true;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer
|
||||
{
|
||||
if (gestureRecognizer.view != otherGestureRecognizer.view)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
- (void)layoutSubviews
|
||||
{
|
||||
CGFloat handleWidth = self.trimmingEnabled ? 16.0f : 2.0f;
|
||||
|
||||
_leftSegmentView.frame = CGRectMake(0, 0, handleWidth, self.frame.size.height);
|
||||
_rightSegmentView.frame = CGRectMake(self.frame.size.width - handleWidth, 0, handleWidth, self.frame.size.height);
|
||||
}
|
||||
|
||||
@end
|
||||
Reference in New Issue
Block a user