diff --git a/LegacyComponents.xcodeproj/project.pbxproj b/LegacyComponents.xcodeproj/project.pbxproj index 102108dc7a..fe4b6702b7 100644 --- a/LegacyComponents.xcodeproj/project.pbxproj +++ b/LegacyComponents.xcodeproj/project.pbxproj @@ -478,6 +478,22 @@ D026608B1F34B9F9000E2DC5 /* TGSearchDisplayMixin.m in Sources */ = {isa = PBXBuildFile; fileRef = D02660891F34B9F9000E2DC5 /* TGSearchDisplayMixin.m */; }; D026608E1F34BA71000E2DC5 /* TGPickPinAnnotationView.h in Headers */ = {isa = PBXBuildFile; fileRef = D026608C1F34BA71000E2DC5 /* TGPickPinAnnotationView.h */; }; D026608F1F34BA71000E2DC5 /* TGPickPinAnnotationView.m in Sources */ = {isa = PBXBuildFile; fileRef = D026608D1F34BA71000E2DC5 /* TGPickPinAnnotationView.m */; }; + D04268F91F58687D0037ECE8 /* TGVideoCameraGLView.h in Headers */ = {isa = PBXBuildFile; fileRef = D04268F71F58687D0037ECE8 /* TGVideoCameraGLView.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D04268FA1F58687D0037ECE8 /* TGVideoCameraGLView.m in Sources */ = {isa = PBXBuildFile; fileRef = D04268F81F58687D0037ECE8 /* TGVideoCameraGLView.m */; }; + D04269031F586A070037ECE8 /* TGVideoMessageRingView.h in Headers */ = {isa = PBXBuildFile; fileRef = D04268FB1F586A070037ECE8 /* TGVideoMessageRingView.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D04269041F586A070037ECE8 /* TGVideoMessageRingView.m in Sources */ = {isa = PBXBuildFile; fileRef = D04268FC1F586A070037ECE8 /* TGVideoMessageRingView.m */; }; + D04269051F586A070037ECE8 /* TGVideoMessageScrubber.h in Headers */ = {isa = PBXBuildFile; fileRef = D04268FD1F586A070037ECE8 /* TGVideoMessageScrubber.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D04269061F586A070037ECE8 /* TGVideoMessageScrubber.m in Sources */ = {isa = PBXBuildFile; fileRef = D04268FE1F586A070037ECE8 /* TGVideoMessageScrubber.m */; }; + D04269071F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h in Headers */ = {isa = PBXBuildFile; fileRef = D04268FF1F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D04269081F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m in Sources */ = {isa = PBXBuildFile; fileRef = D04269001F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m */; }; + D04269091F586A070037ECE8 /* TGVideoMessageTrimView.h in Headers */ = {isa = PBXBuildFile; fileRef = D04269011F586A070037ECE8 /* TGVideoMessageTrimView.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D042690A1F586A070037ECE8 /* TGVideoMessageTrimView.m in Sources */ = {isa = PBXBuildFile; fileRef = D04269021F586A070037ECE8 /* TGVideoMessageTrimView.m */; }; + D042690D1F586B140037ECE8 /* TGVideoMessageControls.h in Headers */ = {isa = PBXBuildFile; fileRef = D042690B1F586B140037ECE8 /* TGVideoMessageControls.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D042690E1F586B140037ECE8 /* TGVideoMessageControls.m in Sources */ = {isa = PBXBuildFile; fileRef = D042690C1F586B140037ECE8 /* TGVideoMessageControls.m */; }; + D04269111F586E430037ECE8 /* TGVideoCameraPipeline.h in Headers */ = {isa = PBXBuildFile; fileRef = D042690F1F586E430037ECE8 /* TGVideoCameraPipeline.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D04269121F586E430037ECE8 /* TGVideoCameraPipeline.m in Sources */ = {isa = PBXBuildFile; fileRef = D04269101F586E430037ECE8 /* TGVideoCameraPipeline.m */; }; + D04269151F586EC80037ECE8 /* TGVideoMessageCaptureController.h in Headers */ = {isa = PBXBuildFile; fileRef = D04269131F586EC80037ECE8 /* TGVideoMessageCaptureController.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D04269161F586EC80037ECE8 /* TGVideoMessageCaptureController.m in Sources */ = {isa = PBXBuildFile; fileRef = D04269141F586EC80037ECE8 /* TGVideoMessageCaptureController.m */; }; D07BC6CF1F2A18B700ED97AA /* TGCameraMainPhoneView.h in Headers */ = {isa = PBXBuildFile; fileRef = D07BC6C91F2A18B700ED97AA /* TGCameraMainPhoneView.h */; settings = {ATTRIBUTES = (Public, ); }; }; D07BC6D01F2A18B700ED97AA /* TGCameraMainPhoneView.m in Sources */ = {isa = PBXBuildFile; fileRef = D07BC6CA1F2A18B700ED97AA /* TGCameraMainPhoneView.m */; }; D07BC6D11F2A18B700ED97AA /* TGCameraMainTabletView.h in Headers */ = {isa = PBXBuildFile; fileRef = D07BC6CB1F2A18B700ED97AA /* TGCameraMainTabletView.h */; settings = {ATTRIBUTES = (Public, ); }; }; @@ -1115,6 +1131,11 @@ D07BCBFC1F2B757700ED97AA /* TGEmbedPIPScrubber.m in Sources */ = {isa = PBXBuildFile; fileRef = D07BCBFA1F2B757700ED97AA /* TGEmbedPIPScrubber.m */; }; D07BCC051F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.h in Headers */ = {isa = PBXBuildFile; fileRef = D07BCC031F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.h */; settings = {ATTRIBUTES = (Public, ); }; }; D07BCC061F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.m in Sources */ = {isa = PBXBuildFile; fileRef = D07BCC041F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.m */; }; + D0F7C9C41F55DA49005B255A /* TGVideoCameraMovieRecorder.h in Headers */ = {isa = PBXBuildFile; fileRef = D0F7C9C21F55DA49005B255A /* TGVideoCameraMovieRecorder.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D0F7C9C51F55DA49005B255A /* TGVideoCameraMovieRecorder.m in Sources */ = {isa = PBXBuildFile; fileRef = D0F7C9C31F55DA49005B255A /* TGVideoCameraMovieRecorder.m */; }; + D0F7C9C81F55DA83005B255A /* TGVideoCameraGLRenderer.h in Headers */ = {isa = PBXBuildFile; fileRef = D0F7C9C61F55DA83005B255A /* TGVideoCameraGLRenderer.h */; settings = {ATTRIBUTES = (Public, ); }; }; + D0F7C9C91F55DA83005B255A /* TGVideoCameraGLRenderer.m in Sources */ = {isa = PBXBuildFile; fileRef = D0F7C9C71F55DA83005B255A /* TGVideoCameraGLRenderer.m */; }; + D0F7C9D61F55DB2D005B255A /* TGLiveUploadInterface.h in Headers */ = {isa = PBXBuildFile; fileRef = D0F7C9D41F55DB2D005B255A /* TGLiveUploadInterface.h */; settings = {ATTRIBUTES = (Public, ); }; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ @@ -1591,6 +1612,22 @@ D02660891F34B9F9000E2DC5 /* TGSearchDisplayMixin.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGSearchDisplayMixin.m; sourceTree = ""; }; D026608C1F34BA71000E2DC5 /* TGPickPinAnnotationView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGPickPinAnnotationView.h; sourceTree = ""; }; D026608D1F34BA71000E2DC5 /* TGPickPinAnnotationView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGPickPinAnnotationView.m; sourceTree = ""; }; + D04268F71F58687D0037ECE8 /* TGVideoCameraGLView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoCameraGLView.h; sourceTree = ""; }; + D04268F81F58687D0037ECE8 /* TGVideoCameraGLView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoCameraGLView.m; sourceTree = ""; }; + D04268FB1F586A070037ECE8 /* TGVideoMessageRingView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageRingView.h; sourceTree = ""; }; + D04268FC1F586A070037ECE8 /* TGVideoMessageRingView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageRingView.m; sourceTree = ""; }; + D04268FD1F586A070037ECE8 /* TGVideoMessageScrubber.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageScrubber.h; sourceTree = ""; }; + D04268FE1F586A070037ECE8 /* TGVideoMessageScrubber.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageScrubber.m; sourceTree = ""; }; + D04268FF1F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageScrubberThumbnailView.h; sourceTree = ""; }; + D04269001F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageScrubberThumbnailView.m; sourceTree = ""; }; + D04269011F586A070037ECE8 /* TGVideoMessageTrimView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageTrimView.h; sourceTree = ""; }; + D04269021F586A070037ECE8 /* TGVideoMessageTrimView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageTrimView.m; sourceTree = ""; }; + D042690B1F586B140037ECE8 /* TGVideoMessageControls.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageControls.h; sourceTree = ""; }; + D042690C1F586B140037ECE8 /* TGVideoMessageControls.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageControls.m; sourceTree = ""; }; + D042690F1F586E430037ECE8 /* TGVideoCameraPipeline.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoCameraPipeline.h; sourceTree = ""; }; + D04269101F586E430037ECE8 /* TGVideoCameraPipeline.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoCameraPipeline.m; sourceTree = ""; }; + D04269131F586EC80037ECE8 /* TGVideoMessageCaptureController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoMessageCaptureController.h; sourceTree = ""; }; + D04269141F586EC80037ECE8 /* TGVideoMessageCaptureController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoMessageCaptureController.m; sourceTree = ""; }; D07BC6C91F2A18B700ED97AA /* TGCameraMainPhoneView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGCameraMainPhoneView.h; sourceTree = ""; }; D07BC6CA1F2A18B700ED97AA /* TGCameraMainPhoneView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGCameraMainPhoneView.m; sourceTree = ""; }; D07BC6CB1F2A18B700ED97AA /* TGCameraMainTabletView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGCameraMainTabletView.h; sourceTree = ""; }; @@ -2229,6 +2266,11 @@ D07BCC031F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGModernConversationTitleActivityIndicator.h; sourceTree = ""; }; D07BCC041F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGModernConversationTitleActivityIndicator.m; sourceTree = ""; }; D0EB42021F3142F400838FE6 /* LegacyComponentsResources.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; name = LegacyComponentsResources.bundle; path = Resources/LegacyComponentsResources.bundle; sourceTree = ""; }; + D0F7C9C21F55DA49005B255A /* TGVideoCameraMovieRecorder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoCameraMovieRecorder.h; sourceTree = ""; }; + D0F7C9C31F55DA49005B255A /* TGVideoCameraMovieRecorder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoCameraMovieRecorder.m; sourceTree = ""; }; + D0F7C9C61F55DA83005B255A /* TGVideoCameraGLRenderer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGVideoCameraGLRenderer.h; sourceTree = ""; }; + D0F7C9C71F55DA83005B255A /* TGVideoCameraGLRenderer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TGVideoCameraGLRenderer.m; sourceTree = ""; }; + D0F7C9D41F55DB2D005B255A /* TGLiveUploadInterface.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TGLiveUploadInterface.h; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -2311,6 +2353,7 @@ D07BCB3F1F2B69D400ED97AA /* Embed Video */, D02660331F34A7DA000E2DC5 /* Location */, 09750FAE1F30DAE1001B9886 /* Clipboard Menu */, + D0F7C9C11F55DA29005B255A /* Video Message */, D017772A1F1F8F100044446D /* LegacyComponents.h */, D017772B1F1F8F100044446D /* Info.plist */, ); @@ -2411,6 +2454,7 @@ D07BC9661F2A3F5C00ED97AA /* TGCache.h */, D07BC9671F2A3F5C00ED97AA /* TGCache.m */, D07BCAAC1F2B45DA00ED97AA /* TGFileUtils.h */, + D0F7C9D41F55DB2D005B255A /* TGLiveUploadInterface.h */, ); name = Utils; sourceTree = ""; @@ -3657,6 +3701,33 @@ name = Resources; sourceTree = ""; }; + D0F7C9C11F55DA29005B255A /* Video Message */ = { + isa = PBXGroup; + children = ( + D042690B1F586B140037ECE8 /* TGVideoMessageControls.h */, + D042690C1F586B140037ECE8 /* TGVideoMessageControls.m */, + D0F7C9C61F55DA83005B255A /* TGVideoCameraGLRenderer.h */, + D0F7C9C71F55DA83005B255A /* TGVideoCameraGLRenderer.m */, + D0F7C9C21F55DA49005B255A /* TGVideoCameraMovieRecorder.h */, + D0F7C9C31F55DA49005B255A /* TGVideoCameraMovieRecorder.m */, + D04268F71F58687D0037ECE8 /* TGVideoCameraGLView.h */, + D04268F81F58687D0037ECE8 /* TGVideoCameraGLView.m */, + D04268FB1F586A070037ECE8 /* TGVideoMessageRingView.h */, + D04268FC1F586A070037ECE8 /* TGVideoMessageRingView.m */, + D04268FD1F586A070037ECE8 /* TGVideoMessageScrubber.h */, + D04268FE1F586A070037ECE8 /* TGVideoMessageScrubber.m */, + D04268FF1F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h */, + D04269001F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m */, + D04269011F586A070037ECE8 /* TGVideoMessageTrimView.h */, + D04269021F586A070037ECE8 /* TGVideoMessageTrimView.m */, + D042690F1F586E430037ECE8 /* TGVideoCameraPipeline.h */, + D04269101F586E430037ECE8 /* TGVideoCameraPipeline.m */, + D04269131F586EC80037ECE8 /* TGVideoMessageCaptureController.h */, + D04269141F586EC80037ECE8 /* TGVideoMessageCaptureController.m */, + ); + name = "Video Message"; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXHeadersBuildPhase section */ @@ -3674,6 +3745,7 @@ D01777531F1F8FE60044446D /* PSCoding.h in Headers */, D07BC6F91F2A19A700ED97AA /* TGCameraTimeCodeView.h in Headers */, D0177A131F213B440044446D /* NSValue+JNWAdditions.h in Headers */, + D0F7C9D61F55DB2D005B255A /* TGLiveUploadInterface.h in Headers */, D017781D1F1F961D0044446D /* TGMessageEntityMention.h in Headers */, D07BC7FE1F2A2C0B00ED97AA /* PGFadeTool.h in Headers */, D01778ED1F20CAE60044446D /* TGOverlayController.h in Headers */, @@ -3722,6 +3794,7 @@ D07BC9541F2A3EA900ED97AA /* TGModernConversationMentionsAssociatedPanel.h in Headers */, D01778FB1F20CF6B0044446D /* TGBackdropView.h in Headers */, D01779F51F2139980044446D /* POPBasicAnimationInternal.h in Headers */, + D04269071F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.h in Headers */, D026605A1F34A7F8000E2DC5 /* TGLocationMapModeControl.h in Headers */, D07BC8651F2A2F1300ED97AA /* TGMediaPickerCaptionInputPanel.h in Headers */, D07BC9191F2A380D00ED97AA /* TGPaintRadialBrush.h in Headers */, @@ -3756,6 +3829,7 @@ D07BCB211F2B646A00ED97AA /* TGPasscodeBackground.h in Headers */, D01778371F1F961D0044446D /* TGAudioMediaAttachment.h in Headers */, D07BC6F51F2A19A700ED97AA /* TGCameraSegmentsView.h in Headers */, + D04269051F586A070037ECE8 /* TGVideoMessageScrubber.h in Headers */, D07BC87F1F2A365000ED97AA /* TGProgressWindow.h in Headers */, D07BC8001F2A2C0B00ED97AA /* PGGrainTool.h in Headers */, D01779EA1F2139980044446D /* POPAnimationPrivate.h in Headers */, @@ -3775,6 +3849,7 @@ D0177B181F2641B10044446D /* PGCameraDeviceAngleSampler.h in Headers */, D0177A0A1F2139980044446D /* POPSpringSolver.h in Headers */, D07BCB3D1F2B65F100ED97AA /* TGWallpaperInfo.h in Headers */, + D04268F91F58687D0037ECE8 /* TGVideoCameraGLView.h in Headers */, D017794F1F2100280044446D /* TGMediaSelectionContext.h in Headers */, D01778111F1F961D0044446D /* TGMessageEntityBold.h in Headers */, D07BC6EB1F2A19A700ED97AA /* TGCameraFlashActiveView.h in Headers */, @@ -3805,6 +3880,7 @@ D01778171F1F961D0044446D /* TGMessageEntityEmail.h in Headers */, D01779E21F2139980044446D /* POPAnimation.h in Headers */, D02660721F34A7F8000E2DC5 /* TGLocationVenueCell.h in Headers */, + D042690D1F586B140037ECE8 /* TGVideoMessageControls.h in Headers */, D01777F41F1F961D0044446D /* TGTextCheckingResult.h in Headers */, D01778231F1F961D0044446D /* TGMessageEntityTextUrl.h in Headers */, D0177A9B1F22204A0044446D /* TGModernGalleryEmbeddedStickersHeaderView.h in Headers */, @@ -3859,6 +3935,7 @@ D07BC8CB1F2A37EC00ED97AA /* TGPhotoPaintSelectionContainerView.h in Headers */, D07BCB621F2B6A5600ED97AA /* TGEmbedPlayerControls.h in Headers */, D07BC7371F2A2A7D00ED97AA /* PGPhotoEditorPicture.h in Headers */, + D0F7C9C41F55DA49005B255A /* TGVideoCameraMovieRecorder.h in Headers */, D07BC85A1F2A2DBD00ED97AA /* TGMenuSheetDimView.h in Headers */, D07BC9AE1F2A4A5100ED97AA /* TGItemMenuSheetPreviewView.h in Headers */, D07BC7F81F2A2C0B00ED97AA /* PGCurvesTool.h in Headers */, @@ -3966,15 +4043,18 @@ D07BCA171F2A9A2B00ED97AA /* TGMediaPickerPhotoStripView.h in Headers */, D0177B1C1F2641B10044446D /* PGCameraMomentSession.h in Headers */, D01778C21F200AF70044446D /* TGAnimationBlockDelegate.h in Headers */, + D04269091F586A070037ECE8 /* TGVideoMessageTrimView.h in Headers */, D07BCBC31F2B6F6300ED97AA /* CBJSONCoubMapper.h in Headers */, D01779F61F2139980044446D /* POPCGUtils.h in Headers */, D07BC76A1F2A2B3700ED97AA /* TGPhotoEditorBlurToolView.h in Headers */, D01778401F1F961D0044446D /* TGDocumentAttributeSticker.h in Headers */, D0177A431F21F62A0044446D /* TGMediaVideoConverter.h in Headers */, D0177AA31F2222990044446D /* TGKeyCommandController.h in Headers */, + D04269111F586E430037ECE8 /* TGVideoCameraPipeline.h in Headers */, D07BCACF1F2B4E9000ED97AA /* TGAttachmentMenuCell.h in Headers */, D01779FA1F2139980044446D /* POPDecayAnimation.h in Headers */, D07BCBB11F2B6F6300ED97AA /* CBCoubAuthorVO.h in Headers */, + D0F7C9C81F55DA83005B255A /* TGVideoCameraGLRenderer.h in Headers */, D07BCBF31F2B72DC00ED97AA /* STKHTTPDataSource.h in Headers */, D07BC78C1F2A2B3700ED97AA /* TGPhotoEditorToolButtonsView.h in Headers */, D07BCBBB1F2B6F6300ED97AA /* CBCoubPlayerContance.h in Headers */, @@ -4152,6 +4232,7 @@ D07BC9431F2A3E4400ED97AA /* TGSuggestionContext.h in Headers */, D07BC9011F2A380D00ED97AA /* TGPaintBrushPreview.h in Headers */, D0177AA71F22239A0044446D /* TGModernGalleryController.h in Headers */, + D04269031F586A070037ECE8 /* TGVideoMessageRingView.h in Headers */, D07BC99D1F2A494000ED97AA /* TGStickerCollectionViewCell.h in Headers */, 09750FC11F30DCDC001B9886 /* TGClipboardGalleryPhotoItemView.h in Headers */, D07BC7BE1F2A2BDD00ED97AA /* PGPhotoToolComposer.h in Headers */, @@ -4224,6 +4305,7 @@ D07BCA961F2B443700ED97AA /* TGMediaAssetsPhotoCell.h in Headers */, D01778441F1F961D0044446D /* TGDocumentAttributeAnimated.h in Headers */, D07BCABB1F2B4E2600ED97AA /* TGTransitionLayout.h in Headers */, + D04269151F586EC80037ECE8 /* TGVideoMessageCaptureController.h in Headers */, D01778591F1F961D0044446D /* TGImageMediaAttachment.h in Headers */, D07BC9151F2A380D00ED97AA /* TGPaintPanGestureRecognizer.h in Headers */, D0177A281F2144700044446D /* TGPhotoEditorAnimation.h in Headers */, @@ -4327,6 +4409,7 @@ D07BC7B71F2A2BBE00ED97AA /* PGBlurTool.m in Sources */, D07BCA991F2B443700ED97AA /* TGMediaAssetsPickerController.m in Sources */, D01778471F1F961D0044446D /* TGDocumentAttributeAudio.m in Sources */, + D042690A1F586A070037ECE8 /* TGVideoMessageTrimView.m in Sources */, D01778181F1F961D0044446D /* TGMessageEntityEmail.m in Sources */, D07BC90C1F2A380D00ED97AA /* TGPaintFaceDetector.m in Sources */, D07BC9F21F2A9A2B00ED97AA /* TGMediaPickerCell.m in Sources */, @@ -4354,6 +4437,7 @@ D0177B211F2641B10044446D /* PGCameraShotMetadata.m in Sources */, D017794A1F20FFF60044446D /* TGMediaAssetMoment.m in Sources */, D07BC9691F2A3F5C00ED97AA /* TGCache.m in Sources */, + D04269161F586EC80037ECE8 /* TGVideoMessageCaptureController.m in Sources */, D07BCBF81F2B72DC00ED97AA /* STKQueueEntry.m in Sources */, D07BC81F1F2A2C0B00ED97AA /* PGPhotoSharpenPass.m in Sources */, D02660871F34B9B1000E2DC5 /* TGSearchBar.m in Sources */, @@ -4390,6 +4474,7 @@ D0177AA81F22239A0044446D /* TGModernGalleryController.m in Sources */, D07BC7001F2A1A7700ED97AA /* TGMenuView.m in Sources */, D01779931F2108130044446D /* PSLMDBKeyValueCursor.m in Sources */, + D04269041F586A070037ECE8 /* TGVideoMessageRingView.m in Sources */, D07BCAE41F2B502F00ED97AA /* TGImagePickerController.mm in Sources */, D07BC7791F2A2B3700ED97AA /* TGPhotoEditorHUDView.m in Sources */, D02660771F34A7F8000E2DC5 /* TGLocationViewController.m in Sources */, @@ -4495,6 +4580,7 @@ D0177A191F213B9E0044446D /* TransformationMatrix.cpp in Sources */, D07BC8031F2A2C0B00ED97AA /* PGHighlightsTool.m in Sources */, D0177A981F221DB60044446D /* TGModernGalleryContainerView.m in Sources */, + D04268FA1F58687D0037ECE8 /* TGVideoCameraGLView.m in Sources */, D07BC8611F2A2DBD00ED97AA /* TGMenuSheetView.m in Sources */, D017796E1F2103DB0044446D /* TGPhotoPaintStickerEntity.m in Sources */, D07BC9F61F2A9A2B00ED97AA /* TGMediaPickerGalleryGifItem.m in Sources */, @@ -4522,6 +4608,7 @@ D07BCA041F2A9A2B00ED97AA /* TGMediaPickerGallerySelectedItemsModel.m in Sources */, D017790F1F20F4370044446D /* UIImage+TG.m in Sources */, D07BC6F61F2A19A700ED97AA /* TGCameraSegmentsView.m in Sources */, + D04269061F586A070037ECE8 /* TGVideoMessageScrubber.m in Sources */, D017791B1F20F4A20044446D /* TGImageLuminanceMap.m in Sources */, D07BCB6B1F2B6A5600ED97AA /* TGEmbedSoundCloudPlayerView.m in Sources */, D07BCB631F2B6A5600ED97AA /* TGEmbedPlayerControls.m in Sources */, @@ -4575,6 +4662,7 @@ D07BCB731F2B6A5600ED97AA /* TGEmbedVKPlayerView.m in Sources */, D07BCA0E1F2A9A2B00ED97AA /* TGMediaPickerGalleryVideoTrimView.m in Sources */, D026608B1F34B9F9000E2DC5 /* TGSearchDisplayMixin.m in Sources */, + D04269121F586E430037ECE8 /* TGVideoCameraPipeline.m in Sources */, D07BCA501F2A9DDD00ED97AA /* FLAnimatedImage.m in Sources */, D07BC9881F2A472900ED97AA /* TGPhotoStickersSectionHeader.m in Sources */, D0177ADE1F23D9B80044446D /* SGraphObjectNode.m in Sources */, @@ -4676,6 +4764,7 @@ D07BC8231F2A2C0B00ED97AA /* PGShadowsTool.m in Sources */, D01779971F21082E0044446D /* PSLMDBTable.m in Sources */, D07BCC061F2B82D100ED97AA /* TGModernConversationTitleActivityIndicator.m in Sources */, + D04269081F586A070037ECE8 /* TGVideoMessageScrubberThumbnailView.m in Sources */, D07BCB361F2B65F100ED97AA /* TGBuiltinWallpaperInfo.m in Sources */, D07BC90A1F2A380D00ED97AA /* TGPaintFaceDebugView.m in Sources */, D017785A1F1F961D0044446D /* TGImageMediaAttachment.m in Sources */, @@ -4706,6 +4795,7 @@ D01778141F1F961D0044446D /* TGMessageEntityBotCommand.m in Sources */, D01778541F1F961D0044446D /* TGVideoMediaAttachment.m in Sources */, D07BC7A21F2A2B8900ED97AA /* GLProgram.m in Sources */, + D042690E1F586B140037ECE8 /* TGVideoMessageControls.m in Sources */, D07BC9A71F2A49E300ED97AA /* TGItemPreviewView.m in Sources */, D07BCB611F2B6A5600ED97AA /* TGEmbedPIPPullArrowView.m in Sources */, D01779341F20FFAC0044446D /* TGMediaAssetsModernLibrary.m in Sources */, @@ -4733,6 +4823,7 @@ D0177A291F2144700044446D /* TGPhotoEditorAnimation.m in Sources */, D07BCBB61F2B6F6300ED97AA /* CBCoubLoopCompositionMaker.m in Sources */, D07BC9401F2A3DB900ED97AA /* TGMessageImageViewOverlayView.m in Sources */, + D0F7C9C91F55DA83005B255A /* TGVideoCameraGLRenderer.m in Sources */, D07BC80B1F2A2C0B00ED97AA /* PGPhotoEnhanceInterpolationFilter.m in Sources */, D01779651F2103910044446D /* TGPaintUtils.m in Sources */, D01779AB1F210A2C0044446D /* TGMediaAssetImageSignals.m in Sources */, @@ -4751,6 +4842,7 @@ D07BC7241F2A29E400ED97AA /* TGPhotoToolsController.m in Sources */, D07BC7AF1F2A2B8900ED97AA /* GPUImageTwoInputFilter.m in Sources */, D07BCA8D1F2B443700ED97AA /* TGMediaAssetsMomentsCollectionLayout.m in Sources */, + D0F7C9C51F55DA49005B255A /* TGVideoCameraMovieRecorder.m in Sources */, D07BC8D21F2A37EC00ED97AA /* TGPhotoPaintSparseView.m in Sources */, D07BCA561F2A9E1600ED97AA /* TGDraggableCollectionView.m in Sources */, D07BCA581F2A9E1600ED97AA /* TGDraggableCollectionViewFlowLayout.m in Sources */, diff --git a/LegacyComponents/LegacyComponents.h b/LegacyComponents/LegacyComponents.h index c0f88b42f5..ba8af45913 100644 --- a/LegacyComponents/LegacyComponents.h +++ b/LegacyComponents/LegacyComponents.h @@ -33,6 +33,7 @@ FOUNDATION_EXPORT const unsigned char LegacyComponentsVersionString[]; #import #import #import +#import #import #import @@ -289,3 +290,4 @@ FOUNDATION_EXPORT const unsigned char LegacyComponentsVersionString[]; #import #import +#import diff --git a/LegacyComponents/Resources/LegacyComponentsResources.bundle/VideoMessageMutedIcon@2x.png b/LegacyComponents/Resources/LegacyComponentsResources.bundle/VideoMessageMutedIcon@2x.png new file mode 100644 index 0000000000..6e933e1e40 Binary files /dev/null and b/LegacyComponents/Resources/LegacyComponentsResources.bundle/VideoMessageMutedIcon@2x.png differ diff --git a/LegacyComponents/Resources/LegacyComponentsResources.bundle/VideoMessageMutedIcon@3x.png b/LegacyComponents/Resources/LegacyComponentsResources.bundle/VideoMessageMutedIcon@3x.png new file mode 100644 index 0000000000..167ceb57dd Binary files /dev/null and b/LegacyComponents/Resources/LegacyComponentsResources.bundle/VideoMessageMutedIcon@3x.png differ diff --git a/LegacyComponents/TGLiveUploadInterface.h b/LegacyComponents/TGLiveUploadInterface.h new file mode 100644 index 0000000000..6b4514bbc7 --- /dev/null +++ b/LegacyComponents/TGLiveUploadInterface.h @@ -0,0 +1,8 @@ +#import + +@protocol TGLiveUploadInterface + +- (void)setupWithFileURL:(NSURL *)fileURL; +- (id)fileUpdated:(bool)completed; + +@end diff --git a/LegacyComponents/TGVideoCameraGLRenderer.h b/LegacyComponents/TGVideoCameraGLRenderer.h new file mode 100644 index 0000000000..fac303998c --- /dev/null +++ b/LegacyComponents/TGVideoCameraGLRenderer.h @@ -0,0 +1,19 @@ +#import +#import +#import + +@interface TGVideoCameraGLRenderer : NSObject + +@property (nonatomic, readonly) __attribute__((NSObject)) CMFormatDescriptionRef outputFormatDescription; +@property (nonatomic, assign) AVCaptureVideoOrientation orientation; +@property (nonatomic, assign) bool mirror; +@property (nonatomic, assign) CGFloat opacity; +@property (nonatomic, readonly) bool hasPreviousPixelbuffer; + +- (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint; +- (void)reset; + +- (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer; +- (void)setPreviousPixelBuffer:(CVPixelBufferRef)previousPixelBuffer; + +@end diff --git a/LegacyComponents/TGVideoCameraGLRenderer.m b/LegacyComponents/TGVideoCameraGLRenderer.m new file mode 100644 index 0000000000..291cc5d8bc --- /dev/null +++ b/LegacyComponents/TGVideoCameraGLRenderer.m @@ -0,0 +1,504 @@ +#import "TGVideoCameraGLRenderer.h" +#import +#import + +#import + +@interface TGVideoCameraGLRenderer () +{ + EAGLContext *_context; + CVOpenGLESTextureCacheRef _textureCache; + CVOpenGLESTextureCacheRef _prevTextureCache; + CVOpenGLESTextureCacheRef _renderTextureCache; + CVPixelBufferPoolRef _bufferPool; + CFDictionaryRef _bufferPoolAuxAttributes; + CMFormatDescriptionRef _outputFormatDescription; + + CVPixelBufferRef _previousPixelBuffer; + + TGPaintShader *_shader; + GLint _frameUniform; + GLint _previousFrameUniform; + GLint _opacityUniform; + GLint _aspectRatioUniform; + GLint _noMirrorUniform; + GLuint _offscreenBufferHandle; + + CGFloat _aspectRatio; + float _textureVertices[8]; +} + +@end + +@implementation TGVideoCameraGLRenderer + +- (instancetype)init +{ + self = [super init]; + if ( self ) + { + _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; + if (!_context) + return nil; + } + return self; +} + +- (void)dealloc +{ + [self deleteBuffers]; +} + +- (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint +{ + CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(inputFormatDescription); + CGFloat minSide = MIN(dimensions.width, dimensions.height); + CGFloat maxSide = MAX(dimensions.width, dimensions.height); + CGSize outputSize = CGSizeMake(minSide, minSide); + + _aspectRatio = minSide / maxSide; + [self updateTextureVertices]; + + [self deleteBuffers]; + [self initializeBuffersWithOutputSize:outputSize retainedBufferCountHint:outputRetainedBufferCountHint]; +} + +- (void)setOrientation:(AVCaptureVideoOrientation)orientation +{ + _orientation = orientation; + [self updateTextureVertices]; +} + +- (void)setMirror:(bool)mirror +{ + _mirror = mirror; + [self updateTextureVertices]; +} + +- (void)updateTextureVertices +{ + GLfloat centerOffset = (GLfloat)((1.0f - _aspectRatio) / 2.0f); + + switch (_orientation) + { + case AVCaptureVideoOrientationPortrait: + if (!_mirror) + { + _textureVertices[0] = centerOffset; + _textureVertices[1] = 1.0f; + _textureVertices[2] = centerOffset; + _textureVertices[3] = 0.0f; + _textureVertices[4] = (1.0f - centerOffset); + _textureVertices[5] = 1.0f; + _textureVertices[6] = (1.0f - centerOffset); + _textureVertices[7] = 0.0f; + } + else + { + _textureVertices[0] = (1.0f - centerOffset); + _textureVertices[1] = 0.0f; + _textureVertices[2] = (1.0f - centerOffset); + _textureVertices[3] = 1.0f; + _textureVertices[4] = centerOffset; + _textureVertices[5] = 0.0f; + _textureVertices[6] = centerOffset; + _textureVertices[7] = 1.0f; + } + break; + + case AVCaptureVideoOrientationLandscapeLeft: + if (!_mirror) + { + _textureVertices[0] = (1.0f - centerOffset); + _textureVertices[1] = 1.0f; + _textureVertices[2] = centerOffset; + _textureVertices[3] = 1.0f; + _textureVertices[4] = (1.0f - centerOffset); + _textureVertices[5] = 0.0f; + _textureVertices[6] = centerOffset; + _textureVertices[7] = 0.0f; + } + else + { + _textureVertices[0] = centerOffset; + _textureVertices[1] = 0.0f; + _textureVertices[2] = (1.0f - centerOffset); + _textureVertices[3] = 0.0f; + _textureVertices[4] = centerOffset; + _textureVertices[5] = 1.0f; + _textureVertices[6] = (1.0f - centerOffset); + _textureVertices[7] = 1.0f; + } + break; + + case AVCaptureVideoOrientationLandscapeRight: + if (!_mirror) + { + _textureVertices[0] = centerOffset; + _textureVertices[1] = 0.0f; + _textureVertices[2] = (1.0f - centerOffset); + _textureVertices[3] = 0.0f; + _textureVertices[4] = centerOffset; + _textureVertices[5] = 1.0f; + _textureVertices[6] = (1.0f - centerOffset); + _textureVertices[7] = 1.0f; + } + else + { + _textureVertices[0] = (1.0f - centerOffset); + _textureVertices[1] = 1.0f; + _textureVertices[2] = centerOffset; + _textureVertices[3] = 1.0f; + _textureVertices[4] = (1.0f - centerOffset); + _textureVertices[5] = 0.0f; + _textureVertices[6] = centerOffset; + _textureVertices[7] = 0.0f; + } + break; + + default: + break; + } +} + +- (void)reset +{ + [self deleteBuffers]; +} + +- (bool)hasPreviousPixelbuffer +{ + return _previousPixelBuffer != NULL; +} + +- (void)setPreviousPixelBuffer:(CVPixelBufferRef)previousPixelBuffer +{ + if (_previousPixelBuffer != NULL) + { + CFRelease(_previousPixelBuffer); + _previousPixelBuffer = NULL; + } + + _previousPixelBuffer = previousPixelBuffer; + if (_previousPixelBuffer != NULL) + CFRetain(_previousPixelBuffer); +} + +- (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer +{ + static const GLfloat squareVertices[] = + { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + if (_offscreenBufferHandle == 0) + return NULL; + + if (pixelBuffer == NULL) + return NULL; + + const CMVideoDimensions srcDimensions = { (int32_t)CVPixelBufferGetWidth(pixelBuffer), (int32_t)CVPixelBufferGetHeight(pixelBuffer) }; + const CMVideoDimensions dstDimensions = CMVideoFormatDescriptionGetDimensions(_outputFormatDescription); + + EAGLContext *oldContext = [EAGLContext currentContext]; + if (oldContext != _context) + { + if (![EAGLContext setCurrentContext:_context]) + return NULL; + } + + CVReturn err = noErr; + CVOpenGLESTextureRef srcTexture = NULL; + CVOpenGLESTextureRef prevTexture = NULL; + CVOpenGLESTextureRef dstTexture = NULL; + CVPixelBufferRef dstPixelBuffer = NULL; + + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, srcDimensions.width, srcDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &srcTexture); + + if (!srcTexture || err) + goto bail; + + bool hasPreviousTexture = false; + if (_previousPixelBuffer != NULL) + { + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _prevTextureCache, _previousPixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, srcDimensions.width, srcDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &prevTexture); + + if (!prevTexture || err) + goto bail; + + hasPreviousTexture = true; + } + + err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer); + if (err == kCVReturnWouldExceedAllocationThreshold) + { + CVOpenGLESTextureCacheFlush(_renderTextureCache, 0); + err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer); + } + + if (err) + goto bail; + + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _renderTextureCache, dstPixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, dstDimensions.width, dstDimensions.height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &dstTexture); + + if (!dstTexture || err) + goto bail; + + glBindFramebuffer(GL_FRAMEBUFFER, _offscreenBufferHandle); + glViewport(0, 0, dstDimensions.width, dstDimensions.height); + glUseProgram(_shader.program); + + glActiveTexture(GL_TEXTURE0); + glBindTexture(CVOpenGLESTextureGetTarget(dstTexture), CVOpenGLESTextureGetName(dstTexture)); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, CVOpenGLESTextureGetTarget(dstTexture), CVOpenGLESTextureGetName(dstTexture), 0); + + glActiveTexture(GL_TEXTURE1); + glBindTexture(CVOpenGLESTextureGetTarget(srcTexture), CVOpenGLESTextureGetName(srcTexture)); + glUniform1i(_frameUniform, 1); + + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + if (hasPreviousTexture) + { + glActiveTexture(GL_TEXTURE2); + glBindTexture(CVOpenGLESTextureGetTarget(prevTexture), CVOpenGLESTextureGetName(prevTexture)); + glUniform1i(_previousFrameUniform, 2); + + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + } + + glVertexAttribPointer(0, 2, GL_FLOAT, 0, 0, squareVertices); + glEnableVertexAttribArray(0); + glVertexAttribPointer(1, 2, GL_FLOAT, 0, 0, _textureVertices); + glEnableVertexAttribArray(1); + + glUniform1f(_opacityUniform, (GLfloat)_opacity); + glUniform1f(_aspectRatioUniform, (GLfloat)(1.0f / _aspectRatio)); + glUniform1f(_noMirrorUniform, (GLfloat)(_mirror ? 1 : -1)); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + glBindTexture(CVOpenGLESTextureGetTarget(srcTexture), 0); + if (hasPreviousTexture) + glBindTexture(CVOpenGLESTextureGetTarget(prevTexture), 0); + glBindTexture(CVOpenGLESTextureGetTarget(dstTexture), 0); + + glFlush(); + +bail: + if (oldContext != _context) + [EAGLContext setCurrentContext:oldContext]; + + if (srcTexture) + CFRelease(srcTexture); + + if (prevTexture) + CFRetain(prevTexture); + + if (dstTexture) + CFRelease(dstTexture); + + return dstPixelBuffer; +} + +- (CMFormatDescriptionRef)outputFormatDescription +{ + return _outputFormatDescription; +} + +- (bool)initializeBuffersWithOutputSize:(CGSize)outputSize retainedBufferCountHint:(size_t)clientRetainedBufferCountHint +{ + bool success = true; + + EAGLContext *oldContext = [EAGLContext currentContext]; + if (oldContext != _context) + { + if (![EAGLContext setCurrentContext:_context]) + return false; + } + + glDisable(GL_DEPTH_TEST); + + glGenFramebuffers(1, &_offscreenBufferHandle); + glBindFramebuffer(GL_FRAMEBUFFER, _offscreenBufferHandle); + + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_textureCache); + if (err) + { + success = false; + goto bail; + } + + err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_prevTextureCache); + if (err) + { + success = false; + goto bail; + } + + err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_renderTextureCache); + if (err) + { + success = false; + goto bail; + } + + _shader = [[TGPaintShader alloc] initWithVertexShader:@"VideoMessage" fragmentShader:@"VideoMessage" attributes:@[ @"inPosition", @"inTexcoord" ] uniforms:@[ @"texture", @"previousTexture", @"opacity", @"aspectRatio", @"noMirror" ]]; + + _frameUniform = [_shader uniformForKey:@"texture"]; + _previousFrameUniform = [_shader uniformForKey:@"previousTexture"]; + _opacityUniform = [_shader uniformForKey:@"opacity"]; + _aspectRatioUniform = [_shader uniformForKey:@"aspectRatio"]; + _noMirrorUniform = [_shader uniformForKey:@"noMirror"]; + + size_t maxRetainedBufferCount = clientRetainedBufferCountHint + 1; + _bufferPool = [TGVideoCameraGLRenderer createPixelBufferPoolWithWidth:(int32_t)outputSize.width height:(int32_t)outputSize.height pixelFormat:kCVPixelFormatType_32BGRA maxBufferCount:(int32_t)maxRetainedBufferCount]; + + if (!_bufferPool) + { + success = NO; + goto bail; + } + + _bufferPoolAuxAttributes = [TGVideoCameraGLRenderer createPixelBufferPoolAuxAttribute:(int32_t)maxRetainedBufferCount]; + [TGVideoCameraGLRenderer preallocatePixelBuffersInPool:_bufferPool auxAttributes:_bufferPoolAuxAttributes]; + + CMFormatDescriptionRef outputFormatDescription = NULL; + CVPixelBufferRef testPixelBuffer = NULL; + CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &testPixelBuffer); + if (!testPixelBuffer) + { + success = false; + goto bail; + } + CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, testPixelBuffer, &outputFormatDescription); + _outputFormatDescription = outputFormatDescription; + CFRelease( testPixelBuffer ); + +bail: + if (!success) + [self deleteBuffers]; + + if (oldContext != _context) + [EAGLContext setCurrentContext:oldContext]; + + return success; +} + +- (void)deleteBuffers +{ + EAGLContext *oldContext = [EAGLContext currentContext]; + if (oldContext != _context) + { + if (![EAGLContext setCurrentContext:_context]) + return; + } + + if (_offscreenBufferHandle) + { + glDeleteFramebuffers(1, &_offscreenBufferHandle); + _offscreenBufferHandle = 0; + } + + if (_shader) + { + [_shader cleanResources]; + _shader = nil; + } + + if (_textureCache) + { + CFRelease(_textureCache); + _textureCache = 0; + } + + if (_prevTextureCache) + { + CFRelease(_prevTextureCache); + _prevTextureCache = 0; + } + + if (_renderTextureCache) + { + CFRelease(_renderTextureCache); + _renderTextureCache = 0; + } + + if (_bufferPool) + { + CFRelease(_bufferPool); + _bufferPool = NULL; + } + + if (_bufferPoolAuxAttributes) + { + CFRelease(_bufferPoolAuxAttributes); + _bufferPoolAuxAttributes = NULL; + } + + if (_outputFormatDescription) + { + CFRelease(_outputFormatDescription); + _outputFormatDescription = NULL; + } + + if (oldContext != _context) + [EAGLContext setCurrentContext:oldContext]; +} + ++ (CVPixelBufferPoolRef)createPixelBufferPoolWithWidth:(int32_t)width height:(int32_t)height pixelFormat:(FourCharCode)pixelFormat maxBufferCount:(int32_t) maxBufferCount +{ + CVPixelBufferPoolRef outputPool = NULL; + + NSDictionary *sourcePixelBufferOptions = @ + { + (id)kCVPixelBufferPixelFormatTypeKey : @(pixelFormat), + (id)kCVPixelBufferWidthKey : @(width), + (id)kCVPixelBufferHeightKey : @(height), + (id)kCVPixelFormatOpenGLESCompatibility : @true, + (id)kCVPixelBufferIOSurfacePropertiesKey : @{ } + }; + + NSDictionary *pixelBufferPoolOptions = @{ (id)kCVPixelBufferPoolMinimumBufferCountKey : @(maxBufferCount) }; + CVPixelBufferPoolCreate(kCFAllocatorDefault, (__bridge CFDictionaryRef)pixelBufferPoolOptions, (__bridge CFDictionaryRef)sourcePixelBufferOptions, &outputPool); + + return outputPool; +} + ++ (CFDictionaryRef)createPixelBufferPoolAuxAttribute:(int32_t)maxBufferCount +{ + return CFBridgingRetain( @{ (id)kCVPixelBufferPoolAllocationThresholdKey : @(maxBufferCount) } ); +} + ++ (void)preallocatePixelBuffersInPool:(CVPixelBufferPoolRef)pool auxAttributes:(CFDictionaryRef)auxAttributes +{ + NSMutableArray *pixelBuffers = [[NSMutableArray alloc] init]; + + while (true) + { + CVPixelBufferRef pixelBuffer = NULL; + OSStatus err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer); + + if (err == kCVReturnWouldExceedAllocationThreshold) + break; + + [pixelBuffers addObject:CFBridgingRelease(pixelBuffer)]; + } + + [pixelBuffers removeAllObjects]; +} + +@end diff --git a/LegacyComponents/TGVideoCameraGLView.h b/LegacyComponents/TGVideoCameraGLView.h new file mode 100644 index 0000000000..c8513bd7c7 --- /dev/null +++ b/LegacyComponents/TGVideoCameraGLView.h @@ -0,0 +1,10 @@ +#import +#import + +@interface TGVideoCameraGLView : UIView + +- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer; +- (void)flushPixelBufferCache; +- (void)reset; + +@end diff --git a/LegacyComponents/TGVideoCameraGLView.m b/LegacyComponents/TGVideoCameraGLView.m new file mode 100644 index 0000000000..7c44c8651c --- /dev/null +++ b/LegacyComponents/TGVideoCameraGLView.m @@ -0,0 +1,231 @@ +#import "TGVideoCameraGLView.h" +#import +#import +#import + +#import + +#import "LegacyComponentsInternal.h" + +@interface TGVideoCameraGLView () +{ + EAGLContext *_context; + CVOpenGLESTextureCacheRef _textureCache; + GLint _width; + GLint _height; + GLuint _framebuffer; + GLuint _colorbuffer; + + TGPaintShader *_shader; + GLint _frame; +} +@end + +@implementation TGVideoCameraGLView + ++ (Class)layerClass +{ + return [CAEAGLLayer class]; +} + +- (instancetype)initWithFrame:(CGRect)frame +{ + self = [super initWithFrame:frame]; + if (self != nil) + { + if (iosMajorVersion() >= 8) + self.contentScaleFactor = [UIScreen mainScreen].nativeScale; + else + self.contentScaleFactor = [UIScreen mainScreen].scale; + + CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; + eaglLayer.opaque = true; + eaglLayer.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking : @false, kEAGLDrawablePropertyColorFormat : kEAGLColorFormatRGBA8 }; + + _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; + if (!_context) + return nil; + } + return self; +} + +- (bool)initializeBuffers +{ + bool success = YES; + + glDisable(GL_DEPTH_TEST); + + glGenFramebuffers(1, &_framebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer); + + glGenRenderbuffers(1, &_colorbuffer ); + glBindRenderbuffer(GL_RENDERBUFFER, _colorbuffer); + + [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer]; + + glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_width); + glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_height); + + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorbuffer); + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) + { + success = false; + goto bail; + } + + + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_textureCache); + if (err) + { + success = false; + goto bail; + } + + _shader = [[TGPaintShader alloc] initWithVertexShader:@"Passthrough" fragmentShader:@"Passthrough" attributes:@[ @"inPosition", @"inTexcoord" ] uniforms:@[ @"texture" ]]; + + _frame = [_shader uniformForKey:@"texture"]; + +bail: + if ( ! success ) { + [self reset]; + } + return success; +} + +- (void)reset +{ + EAGLContext *oldContext = [EAGLContext currentContext]; + if (oldContext != _context) + { + if (![EAGLContext setCurrentContext:_context]) + return; + } + + if (_framebuffer) + { + glDeleteFramebuffers(1, &_framebuffer); + _framebuffer = 0; + } + + if (_colorbuffer) + { + glDeleteRenderbuffers(1, &_colorbuffer); + _colorbuffer = 0; + } + + if (_shader != nil) + { + [_shader cleanResources]; + _shader = nil; + } + + if (_textureCache) + { + CFRelease(_textureCache); + _textureCache = 0; + } + + if (oldContext != _context) + [EAGLContext setCurrentContext:oldContext]; +} + +- (void)dealloc +{ + [self reset]; +} + +- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer +{ + static const GLfloat squareVertices[] = + { + -1.0f, -1.0f, // bottom left + 1.0f, -1.0f, // bottom right + -1.0f, 1.0f, // top left + 1.0f, 1.0f, // top right + }; + + if (pixelBuffer == NULL) + return; + + EAGLContext *oldContext = [EAGLContext currentContext]; + if (oldContext != _context) + { + if (![EAGLContext setCurrentContext:_context]) + return; + } + + if (_framebuffer == 0) + { + bool success = [self initializeBuffers]; + if (!success) + return; + } + + size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer); + size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer); + CVOpenGLESTextureRef texture = NULL; + CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, (GLsizei)frameWidth, (GLsizei)frameHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture); + + if (!texture || err) + return; + + glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer); + glViewport(0, 0, _width, _height); + + glUseProgram(_shader.program); + glActiveTexture(GL_TEXTURE0); + glBindTexture(CVOpenGLESTextureGetTarget(texture), CVOpenGLESTextureGetName(texture)); + glUniform1i(_frame, 0); + + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + glVertexAttribPointer(0, 2, GL_FLOAT, 0, 0, squareVertices); + glEnableVertexAttribArray(0); + + CGSize textureSamplingSize; + CGSize cropScaleAmount = CGSizeMake(self.bounds.size.width / (CGFloat)frameWidth, self.bounds.size.height / (CGFloat)frameHeight); + if (cropScaleAmount.height > cropScaleAmount.width) + { + textureSamplingSize.width = self.bounds.size.width / ( frameWidth * cropScaleAmount.height ); + textureSamplingSize.height = 1.0; + } + else + { + textureSamplingSize.width = 1.0; + textureSamplingSize.height = self.bounds.size.height / ( frameHeight * cropScaleAmount.width ); + } + + GLfloat passThroughTextureVertices[] = + { + (GLfloat)((1.0 - textureSamplingSize.width) / 2.0), (GLfloat)((1.0 + textureSamplingSize.height) / 2.0), // top left + (GLfloat)((1.0 + textureSamplingSize.width) / 2.0), (GLfloat)((1.0 + textureSamplingSize.height) / 2.0), // top right + (GLfloat)((1.0 - textureSamplingSize.width) / 2.0), (GLfloat)((1.0 - textureSamplingSize.height) / 2.0), // bottom left + (GLfloat)((1.0 + textureSamplingSize.width) / 2.0), (GLfloat)((1.0 - textureSamplingSize.height) / 2.0), // bottom right + }; + + glVertexAttribPointer(1, 2, GL_FLOAT, 0, 0, passThroughTextureVertices ); + glEnableVertexAttribArray(1); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + glBindRenderbuffer(GL_RENDERBUFFER, _colorbuffer); + [_context presentRenderbuffer:GL_RENDERBUFFER]; + + glBindTexture(CVOpenGLESTextureGetTarget(texture), 0); + glBindTexture(GL_TEXTURE_2D, 0); + CFRelease(texture); + + if (oldContext != _context) + [EAGLContext setCurrentContext:oldContext]; +} + +- (void)flushPixelBufferCache +{ + if (_textureCache) + CVOpenGLESTextureCacheFlush(_textureCache, 0); +} + +@end diff --git a/LegacyComponents/TGVideoCameraMovieRecorder.h b/LegacyComponents/TGVideoCameraMovieRecorder.h new file mode 100644 index 0000000000..a78b7baae3 --- /dev/null +++ b/LegacyComponents/TGVideoCameraMovieRecorder.h @@ -0,0 +1,32 @@ +#import +#import + +@protocol TGVideoCameraMovieRecorderDelegate; + +@interface TGVideoCameraMovieRecorder : NSObject + +@property (nonatomic, assign) bool paused; + +- (instancetype)initWithURL:(NSURL *)URL delegate:(id)delegate callbackQueue:(dispatch_queue_t)queue; + +- (void)addVideoTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings; +- (void)addAudioTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription settings:(NSDictionary *)audioSettings; + + +- (void)prepareToRecord; + +- (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime; +- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer; + +- (void)finishRecording; + +- (NSTimeInterval)videoDuration; + +@end + +@protocol TGVideoCameraMovieRecorderDelegate +@required +- (void)movieRecorderDidFinishPreparing:(TGVideoCameraMovieRecorder *)recorder; +- (void)movieRecorder:(TGVideoCameraMovieRecorder *)recorder didFailWithError:(NSError *)error; +- (void)movieRecorderDidFinishRecording:(TGVideoCameraMovieRecorder *)recorder; +@end diff --git a/LegacyComponents/TGVideoCameraMovieRecorder.m b/LegacyComponents/TGVideoCameraMovieRecorder.m new file mode 100644 index 0000000000..0be65e1b85 --- /dev/null +++ b/LegacyComponents/TGVideoCameraMovieRecorder.m @@ -0,0 +1,468 @@ +#import "TGVideoCameraMovieRecorder.h" +#import + +typedef enum { + TGMovieRecorderStatusIdle = 0, + TGMovieRecorderStatusPreparingToRecord, + TGMovieRecorderStatusRecording, + TGMovieRecorderStatusFinishingWaiting, + TGMovieRecorderStatusFinishingCommiting, + TGMovieRecorderStatusFinished, + TGMovieRecorderStatusFailed +} TGMovieRecorderStatus; + + +@interface TGVideoCameraMovieRecorder () +{ + TGMovieRecorderStatus _status; + + dispatch_queue_t _writingQueue; + + NSURL *_url; + + AVAssetWriter *_assetWriter; + bool _haveStartedSession; + + CMFormatDescriptionRef _audioTrackSourceFormatDescription; + NSDictionary *_audioTrackSettings; + AVAssetWriterInput *_audioInput; + + CMFormatDescriptionRef _videoTrackSourceFormatDescription; + CGAffineTransform _videoTrackTransform; + NSDictionary *_videoTrackSettings; + AVAssetWriterInput *_videoInput; + + __weak id _delegate; + dispatch_queue_t _delegateCallbackQueue; + + CMTime _startTimeStamp; + CMTime _lastAudioTimeStamp; + + CMTime _timeOffset; + + bool _wasPaused; +} +@end + + +@implementation TGVideoCameraMovieRecorder + +- (instancetype)initWithURL:(NSURL *)URL delegate:(id)delegate callbackQueue:(dispatch_queue_t)queue +{ + self = [super init]; + if (self != nil) + { + _writingQueue = dispatch_queue_create("org.telegram.movierecorder.writing", DISPATCH_QUEUE_SERIAL); + _videoTrackTransform = CGAffineTransformIdentity; + _url = URL; + _delegate = delegate; + _delegateCallbackQueue = queue; + } + return self; +} + +- (void)addVideoTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings +{ + if (formatDescription == NULL) + return; + + @synchronized (self) + { + if (_status != TGMovieRecorderStatusIdle) + return; + + if (_videoTrackSourceFormatDescription) + return; + + _videoTrackSourceFormatDescription = (CMFormatDescriptionRef)CFRetain(formatDescription); + _videoTrackTransform = transform; + _videoTrackSettings = [videoSettings copy]; + } +} + +- (void)addAudioTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription settings:(NSDictionary *)audioSettings +{ + if (formatDescription == NULL) + return; + + @synchronized (self) + { + if (_status != TGMovieRecorderStatusIdle) + return; + + if (_audioTrackSourceFormatDescription) + return; + + _audioTrackSourceFormatDescription = (CMFormatDescriptionRef)CFRetain(formatDescription); + _audioTrackSettings = [audioSettings copy]; + } +} + +- (void)prepareToRecord +{ + @synchronized( self ) + { + if (_status != TGMovieRecorderStatusIdle) + return; + + [self transitionToStatus:TGMovieRecorderStatusPreparingToRecord error:nil]; + } + + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^ + { + @autoreleasepool + { + NSError *error = nil; + + [[NSFileManager defaultManager] removeItemAtURL:_url error:NULL]; + + _assetWriter = [[AVAssetWriter alloc] initWithURL:_url fileType:AVFileTypeMPEG4 error:&error]; + + bool succeed = false; + if (error == nil && _videoTrackSourceFormatDescription) + { + succeed = [self setupAssetWriterVideoInputWithSourceFormatDescription:_videoTrackSourceFormatDescription transform:_videoTrackTransform settings:_videoTrackSettings]; + } + + if (error == nil && succeed && _audioTrackSourceFormatDescription) + { + succeed = [self setupAssetWriterAudioInputWithSourceFormatDescription:_audioTrackSourceFormatDescription settings:_audioTrackSettings]; + } + + if (error == nil && succeed) + { + if (![_assetWriter startWriting]) + error = _assetWriter.error; + } + + @synchronized (self) + { + if (error || !succeed) + [self transitionToStatus:TGMovieRecorderStatusFailed error:error]; + else + [self transitionToStatus:TGMovieRecorderStatusRecording error:nil]; + } + } + } ); +} + +- (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime +{ + CMSampleBufferRef sampleBuffer = NULL; + + CMSampleTimingInfo timingInfo; + timingInfo.duration = kCMTimeInvalid; + timingInfo.decodeTimeStamp = kCMTimeInvalid; + timingInfo.presentationTimeStamp = presentationTime; + + CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, _videoTrackSourceFormatDescription, &timingInfo, &sampleBuffer); + + if (sampleBuffer) + { + [self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeVideo]; + CFRelease(sampleBuffer); + } +} + +- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + [self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeAudio]; +} + +- (void)finishRecording +{ + @synchronized (self) + { + bool shouldFinishRecording = false; + switch (_status) + { + case TGMovieRecorderStatusIdle: + case TGMovieRecorderStatusPreparingToRecord: + case TGMovieRecorderStatusFinishingWaiting: + case TGMovieRecorderStatusFinishingCommiting: + case TGMovieRecorderStatusFinished: + case TGMovieRecorderStatusFailed: + break; + + case TGMovieRecorderStatusRecording: + shouldFinishRecording = true; + break; + } + + if (shouldFinishRecording) + [self transitionToStatus:TGMovieRecorderStatusFinishingWaiting error:nil]; + else + return; + } + + dispatch_async(_writingQueue, ^ + { + @autoreleasepool + { + @synchronized (self) + { + if (_status != TGMovieRecorderStatusFinishingWaiting) + return; + + [self transitionToStatus:TGMovieRecorderStatusFinishingCommiting error:nil]; + } + + [_assetWriter finishWritingWithCompletionHandler:^ + { + @synchronized (self) + { + NSError *error = _assetWriter.error; + if (error) + [self transitionToStatus:TGMovieRecorderStatusFailed error:error]; + else + [self transitionToStatus:TGMovieRecorderStatusFinished error:nil]; + } + }]; + } + } ); +} + +- (void)dealloc +{ + if (_audioTrackSourceFormatDescription) + CFRelease(_audioTrackSourceFormatDescription); + + if (_videoTrackSourceFormatDescription) + CFRelease(_videoTrackSourceFormatDescription); +} + +- (void)setPaused:(bool)paused +{ + @synchronized (self) + { + _paused = paused; + if (_paused) + _wasPaused = true; + } +} + +- (CMSampleBufferRef)adjustTimeOfSample:(CMSampleBufferRef)sample byOffset:(CMTime)offset +{ + CMItemCount count; + CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); + CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count); + CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); + for (CMItemCount i = 0; i < count; i++) + { + pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); + pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); + } + CMSampleBufferRef sout; + CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); + free(pInfo); + return sout; +} + +- (void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer ofMediaType:(NSString *)mediaType +{ + if (sampleBuffer == NULL) + return; + + @synchronized (self) + { + if (_status < TGMovieRecorderStatusRecording || (mediaType == AVMediaTypeAudio && !_haveStartedSession)) + return; + } + + CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + + CFRetain(sampleBuffer); + dispatch_async(_writingQueue, ^ + { + CMSampleBufferRef buffer = sampleBuffer; + + @autoreleasepool + { + @synchronized (self) + { + if (_status > TGMovieRecorderStatusFinishingWaiting) + { + CFRelease(sampleBuffer); + return; + } + } + + if (!_haveStartedSession) + { + [_assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; + _haveStartedSession = true; + + _startTimeStamp = timestamp; + } + + AVAssetWriterInput *input = (mediaType == AVMediaTypeVideo) ? _videoInput : _audioInput; + @synchronized (self) + { + if (_wasPaused) + { + if (input == _videoInput) + return; + + _wasPaused = false; + + CMTime pts = CMSampleBufferGetPresentationTimeStamp(buffer); + CMTime last = _lastAudioTimeStamp; + if (last.flags & kCMTimeFlags_Valid) + { + CMTime offset = CMTimeSubtract(pts, last); + if (_timeOffset.value == 0) + _timeOffset = offset; + else + _timeOffset = CMTimeAdd(_timeOffset, offset); + } + _lastAudioTimeStamp.flags = 0; + } + } + + if (_timeOffset.value > 0 && input == _videoInput) + { + buffer = [self adjustTimeOfSample:buffer byOffset:_timeOffset]; + CFRelease(sampleBuffer); + } + + CMTime pts = CMSampleBufferGetPresentationTimeStamp(buffer); + CMTime duration = CMSampleBufferGetDuration(buffer); + if (duration.value > 0) + pts = CMTimeAdd(pts, duration); + + if (input == _audioInput) + _lastAudioTimeStamp = pts; + + if (input.readyForMoreMediaData) + { + if (![input appendSampleBuffer:buffer]) + { + NSError *error = _assetWriter.error; + @synchronized (self) + { + [self transitionToStatus:TGMovieRecorderStatusFailed error:error]; + } + } + } + CFRelease(buffer); + } + }); +} + +- (void)transitionToStatus:(TGMovieRecorderStatus)newStatus error:(NSError *)error +{ + bool shouldNotifyDelegate = false; + + if (newStatus != _status) + { + if ((newStatus == TGMovieRecorderStatusFinished) || (newStatus == TGMovieRecorderStatusFailed)) + { + shouldNotifyDelegate = true; + + dispatch_async(_writingQueue, ^ + { + [self teardownAssetWriterAndInputs]; + if (newStatus == TGMovieRecorderStatusFailed) + { + [[NSFileManager defaultManager] removeItemAtURL:_url error:NULL]; + } + }); + } + else if (newStatus == TGMovieRecorderStatusRecording) + { + shouldNotifyDelegate = true; + } + + _status = newStatus; + } + + if (shouldNotifyDelegate) + { + dispatch_async(_delegateCallbackQueue, ^ + { + @autoreleasepool + { + switch ( newStatus ) + { + case TGMovieRecorderStatusRecording: + [_delegate movieRecorderDidFinishPreparing:self]; + break; + + case TGMovieRecorderStatusFinished: + [_delegate movieRecorderDidFinishRecording:self]; + break; + + case TGMovieRecorderStatusFailed: + [_delegate movieRecorder:self didFailWithError:error]; + break; + + default: + break; + } + } + }); + } +} + +- (bool)setupAssetWriterAudioInputWithSourceFormatDescription:(CMFormatDescriptionRef)audioFormatDescription settings:(NSDictionary *)audioSettings +{ + if ([_assetWriter canApplyOutputSettings:audioSettings forMediaType:AVMediaTypeAudio]) + { + _audioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings sourceFormatHint:audioFormatDescription]; + _audioInput.expectsMediaDataInRealTime = true; + + if ([_assetWriter canAddInput:_audioInput]) + { + [_assetWriter addInput:_audioInput]; + } + else + { + return false; + } + } + else + { + return false; + } + + return true; +} + +- (bool)setupAssetWriterVideoInputWithSourceFormatDescription:(CMFormatDescriptionRef)videoFormatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings +{ + if ([_assetWriter canApplyOutputSettings:videoSettings forMediaType:AVMediaTypeVideo]) + { + _videoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings sourceFormatHint:videoFormatDescription]; + _videoInput.expectsMediaDataInRealTime = true; + _videoInput.transform = transform; + + if ([_assetWriter canAddInput:_videoInput]) + { + [_assetWriter addInput:_videoInput]; + } + else + { + return false; + } + } + else + { + return false; + } + + return true; +} + +- (void)teardownAssetWriterAndInputs +{ + _videoInput = nil; + _audioInput = nil; + _assetWriter = nil; +} + +- (NSTimeInterval)videoDuration +{ + return CMTimeGetSeconds(CMTimeSubtract(_lastAudioTimeStamp, _startTimeStamp)); +} + +@end diff --git a/LegacyComponents/TGVideoCameraPipeline.h b/LegacyComponents/TGVideoCameraPipeline.h new file mode 100644 index 0000000000..46cbde9207 --- /dev/null +++ b/LegacyComponents/TGVideoCameraPipeline.h @@ -0,0 +1,47 @@ +#import +#import +#import + +@protocol TGVideoCameraPipelineDelegate; + + +@interface TGVideoCameraPipeline : NSObject + +@property (nonatomic, assign) AVCaptureVideoOrientation orientation; +@property (nonatomic, assign) bool renderingEnabled; +@property (nonatomic, readonly) NSTimeInterval videoDuration; +@property (nonatomic, readonly) CGAffineTransform videoTransform; +@property (nonatomic, readonly) bool isRecording; + +@property (nonatomic, copy) void (^micLevel)(CGFloat); + +- (instancetype)initWithDelegate:(id)delegate position:(AVCaptureDevicePosition)position callbackQueue:(dispatch_queue_t)queue liveUploadInterface:(id)liveUploadInterface; + +- (void)startRunning; +- (void)stopRunning; + +- (void)startRecording:(NSURL *)url preset:(TGMediaVideoConversionPreset)preset liveUpload:(bool)liveUpload; +- (void)stopRecording; + +- (CGAffineTransform)transformForOrientation:(AVCaptureVideoOrientation)orientation; + +- (void)setCameraPosition:(AVCaptureDevicePosition)position; ++ (bool)cameraPositionChangeAvailable; + +@end + + +@protocol TGVideoCameraPipelineDelegate +@required + +- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline didStopRunningWithError:(NSError *)error; + +- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline previewPixelBufferReadyForDisplay:(CVPixelBufferRef)previewPixelBuffer; +- (void)capturePipelineDidRunOutOfPreviewBuffers:(TGVideoCameraPipeline *)capturePipeline; + +- (void)capturePipelineRecordingDidStart:(TGVideoCameraPipeline *)capturePipeline; +- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline recordingDidFailWithError:(NSError *)error; +- (void)capturePipelineRecordingWillStop:(TGVideoCameraPipeline *)capturePipeline; +- (void)capturePipelineRecordingDidStop:(TGVideoCameraPipeline *)capturePipeline duration:(NSTimeInterval)duration liveUploadData:(id)liveUploadData thumbnailImage:(UIImage *)thumbnailImage thumbnails:(NSDictionary *)thumbnails; + +@end diff --git a/LegacyComponents/TGVideoCameraPipeline.m b/LegacyComponents/TGVideoCameraPipeline.m new file mode 100644 index 0000000000..487653fa25 --- /dev/null +++ b/LegacyComponents/TGVideoCameraPipeline.m @@ -0,0 +1,996 @@ +#import "TGVideoCameraPipeline.h" + +#import "LegacyComponentsInternal.h" + +#import +#import +#import +#import + +#import + +#import +#import + +typedef enum { + TGVideoCameraRecordingStatusIdle = 0, + TGVideoCameraRecordingStatusStartingRecording, + TGVideoCameraRecordingStatusRecording, + TGVideoCameraRecordingStatusStoppingRecording, +} TGVideoCameraRecordingStatus; + +const NSInteger TGVideoCameraRetainedBufferCount = 16; + +@interface TGVideoCameraPipeline () +{ + AVCaptureSession *_captureSession; + + AVCaptureDevice *_videoDevice; + AVCaptureConnection *_videoConnection; + AVCaptureDeviceInput *_videoInput; + AVCaptureVideoDataOutput *_videoOutput; + + AVCaptureDevice *_audioDevice; + AVCaptureConnection *_audioConnection; + AVCaptureDeviceInput *_audioInput; + AVCaptureAudioDataOutput *_audioOutput; + + AVCaptureVideoOrientation _videoBufferOrientation; + AVCaptureDevicePosition _preferredPosition; + bool _running; + bool _startCaptureSessionOnEnteringForeground; + id _applicationWillEnterForegroundObserver; + + dispatch_queue_t _audioDataOutputQueue; + dispatch_queue_t _videoDataOutputQueue; + + TGVideoCameraGLRenderer *_renderer; + bool _renderingEnabled; + + TGVideoCameraMovieRecorder *_recorder; + NSURL *_recordingURL; + TGVideoCameraRecordingStatus _recordingStatus; + UIImage *_recordingThumbnail; + + __weak id _delegate; + dispatch_queue_t _delegateCallbackQueue; + + NSTimeInterval _resultDuration; + + CVPixelBufferRef _previousPixelBuffer; + int32_t _repeatingCount; + + NSMutableData *_audioBuffer; + int16_t _micLevelPeak; + int _micLevelPeakCount; + + TGMediaVideoConversionPreset _preset; + + bool _liveUpload; + id _watcher; + id _liveUploadData; + + OSSpinLock _recordLock; + bool _startRecordAfterAudioBuffer; + + CVPixelBufferRef _currentPreviewPixelBuffer; + NSMutableDictionary *_thumbnails; + + NSTimeInterval _firstThumbnailTime; + NSTimeInterval _previousThumbnailTime; + + id _liveUploadInterface; +} + +@property (nonatomic, strong) __attribute__((NSObject)) CMFormatDescriptionRef outputVideoFormatDescription; +@property (nonatomic, strong) __attribute__((NSObject)) CMFormatDescriptionRef outputAudioFormatDescription; + +@end + +@implementation TGVideoCameraPipeline + +- (instancetype)initWithDelegate:(id)delegate position:(AVCaptureDevicePosition)position callbackQueue:(dispatch_queue_t)queue liveUploadInterface:(id)liveUploadInterface +{ + self = [super init]; + if (self != nil) + { + _liveUploadInterface = liveUploadInterface; + _preferredPosition = position; + + _videoDataOutputQueue = dispatch_queue_create("org.telegram.VideoCameraPipeline.video", DISPATCH_QUEUE_SERIAL); + dispatch_set_target_queue(_videoDataOutputQueue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); + + _renderer = [[TGVideoCameraGLRenderer alloc] init]; + + _delegate = delegate; + _delegateCallbackQueue = queue; + + _thumbnails = [[NSMutableDictionary alloc] init]; + } + return self; +} + +- (void)dealloc +{ + [self destroyCaptureSession]; +} + +- (void)startRunning +{ + [[TGVideoCameraPipeline cameraQueue] dispatch:^ + { + [self setupCaptureSession]; + + if (_captureSession != nil) + { + [_captureSession startRunning]; + _running = true; + } + }]; +} + +- (void)stopRunning +{ + [[TGVideoCameraPipeline cameraQueue] dispatch:^ + { + _running = false; + + [self stopRecording]; + + [_captureSession stopRunning]; + [self captureSessionDidStopRunning]; + [self destroyCaptureSession]; + }]; +} + +- (void)setupCaptureSession +{ + if (_captureSession != nil) + return; + + _captureSession = [[AVCaptureSession alloc] init]; + + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(captureSessionNotification:) name:nil object:_captureSession]; + _applicationWillEnterForegroundObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationWillEnterForegroundNotification object:[[LegacyComponentsGlobals provider] applicationInstance] queue:nil usingBlock:^(__unused NSNotification *note) + { + [self applicationWillEnterForeground]; + }]; + + _audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; + _audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:_audioDevice error:nil]; + if ([_captureSession canAddInput:_audioInput]) + [_captureSession addInput:_audioInput]; + + _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; + _audioDataOutputQueue = dispatch_queue_create("org.telegram.VideoCameraPipeline.audio", DISPATCH_QUEUE_SERIAL); + [_audioOutput setSampleBufferDelegate:self queue:_audioDataOutputQueue]; + + if ([_captureSession canAddOutput:_audioOutput]) + [_captureSession addOutput:_audioOutput]; + + _audioConnection = [_audioOutput connectionWithMediaType:AVMediaTypeAudio]; + + + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + for (AVCaptureDevice *device in devices) + { + if (device.position == _preferredPosition) + { + videoDevice = device; + break; + } + } + + _renderer.mirror = (videoDevice.position == AVCaptureDevicePositionFront); + _renderer.orientation = _orientation; + + NSError *videoDeviceError = nil; + _videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoDevice error:&videoDeviceError]; + if ([_captureSession canAddInput:_videoInput]) + { + [_captureSession addInput:_videoInput]; + _videoDevice = videoDevice; + } + else + { + [self handleNonRecoverableCaptureSessionRuntimeError:videoDeviceError]; + return; + } + + _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; + _videoOutput.alwaysDiscardsLateVideoFrames = false; + _videoOutput.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }; + [_videoOutput setSampleBufferDelegate:self queue:_videoDataOutputQueue]; + + if ([_captureSession canAddOutput:_videoOutput]) + [_captureSession addOutput:_videoOutput]; + + _videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; + + if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) + _captureSession.sessionPreset = AVCaptureSessionPreset640x480; + else + _captureSession.sessionPreset = AVCaptureSessionPresetMedium; + + [self _configureFPS]; + + [self _enableLowLightBoost]; + [self _enableVideoStabilization]; + + _videoBufferOrientation = _videoConnection.videoOrientation; + + return; +} + +- (void)destroyCaptureSession +{ + if (_captureSession) + { + [[NSNotificationCenter defaultCenter] removeObserver:self name:nil object:_captureSession]; + + [[NSNotificationCenter defaultCenter] removeObserver:_applicationWillEnterForegroundObserver]; + _applicationWillEnterForegroundObserver = nil; + + _captureSession = nil; + } +} + +- (void)captureSessionNotification:(NSNotification *)notification +{ + [[TGVideoCameraPipeline cameraQueue] dispatch:^ + { + if ([notification.name isEqualToString:AVCaptureSessionWasInterruptedNotification]) + { + [self captureSessionDidStopRunning]; + } + else if ([notification.name isEqualToString:AVCaptureSessionRuntimeErrorNotification]) + { + [self captureSessionDidStopRunning]; + + NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; + if (error.code == AVErrorDeviceIsNotAvailableInBackground) + { + if (_running) + _startCaptureSessionOnEnteringForeground = true; + } + else if (error.code == AVErrorMediaServicesWereReset) + { + [self handleRecoverableCaptureSessionRuntimeError:error]; + } + else + { + [self handleNonRecoverableCaptureSessionRuntimeError:error]; + } + } + }]; +} + +- (void)handleRecoverableCaptureSessionRuntimeError:(NSError *)__unused error +{ + if (_running) + [_captureSession startRunning]; +} + +- (void)handleNonRecoverableCaptureSessionRuntimeError:(NSError *)error +{ + _running = false; + [self destroyCaptureSession]; + + [self invokeDelegateCallbackAsync:^ + { + [_delegate capturePipeline:self didStopRunningWithError:error]; + }]; +} + +- (void)captureSessionDidStopRunning +{ + [self stopRecording]; + [self destroyVideoPipeline]; +} + +- (void)applicationWillEnterForeground +{ + [[TGVideoCameraPipeline cameraQueue] dispatch:^ + { + if (_startCaptureSessionOnEnteringForeground) + { + _startCaptureSessionOnEnteringForeground = false; + if (_running) + [_captureSession startRunning]; + } + }]; +} + +- (void)setupVideoPipelineWithInputFormatDescription:(CMFormatDescriptionRef)inputFormatDescription +{ + [_renderer prepareForInputWithFormatDescription:inputFormatDescription outputRetainedBufferCountHint:TGVideoCameraRetainedBufferCount]; + self.outputVideoFormatDescription = _renderer.outputFormatDescription; +} + +- (void)destroyVideoPipeline +{ + dispatch_sync(_videoDataOutputQueue, ^ + { + if (self.outputVideoFormatDescription == NULL) + return; + + self.outputVideoFormatDescription = NULL; + [_renderer reset]; + + if (_currentPreviewPixelBuffer != NULL) + { + CFRelease(_currentPreviewPixelBuffer); + _currentPreviewPixelBuffer = NULL; + } + }); +} + +- (void)videoPipelineDidRunOutOfBuffers +{ + [self invokeDelegateCallbackAsync:^ + { + [_delegate capturePipelineDidRunOutOfPreviewBuffers:self]; + }]; +} + +- (void)setRenderingEnabled:(bool)renderingEnabled +{ + @synchronized (_renderer) + { + _renderingEnabled = renderingEnabled; + } +} + +- (bool)renderingEnabled +{ + @synchronized (_renderer) + { + return _renderingEnabled; + } +} + +- (void)captureOutput:(AVCaptureOutput *)__unused captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection +{ + CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); + + if (connection == _videoConnection) + { + if (self.outputVideoFormatDescription == NULL) + [self setupVideoPipelineWithInputFormatDescription:formatDescription]; + else + [self renderVideoSampleBuffer:sampleBuffer]; + } + else if (connection == _audioConnection) + { + self.outputAudioFormatDescription = formatDescription; + + @synchronized (self) + { + if (_recordingStatus == TGVideoCameraRecordingStatusRecording) + [_recorder appendAudioSampleBuffer:sampleBuffer]; + } + + CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); + uint32_t numSamplesInBuffer = (uint32_t)CMSampleBufferGetNumSamples(sampleBuffer); + + AudioBufferList audioBufferList; + + CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer ); + + for (uint32_t bufferCount = 0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) + { + int16_t *samples = (int16_t *)audioBufferList.mBuffers[bufferCount].mData; + [self processWaveformPreview:samples count:numSamplesInBuffer]; + } + + CFRelease(blockBuffer); + + OSSpinLockLock(&_recordLock); + if (_startRecordAfterAudioBuffer) + { + _startRecordAfterAudioBuffer = false; + TGDispatchOnMainThread(^ + { + [self startRecording:_recordingURL preset:_preset liveUpload:_liveUpload]; + }); + } + OSSpinLockUnlock(&_recordLock); + } +} + +- (void)processWaveformPreview:(int16_t const *)samples count:(int)count { + for (int i = 0; i < count; i++) { + int16_t sample = samples[i]; + if (sample < 0) { + sample = -sample; + } + + if (_micLevelPeak < sample) { + _micLevelPeak = sample; + } + _micLevelPeakCount++; + + if (_micLevelPeakCount >= 1200) { + if (_micLevel) { + CGFloat level = (CGFloat)_micLevelPeak / 4000.0; + _micLevel(level); + } + _micLevelPeak = 0; + _micLevelPeakCount = 0; + } + } +} + +- (UIImage *)imageFromImageBuffer:(CVPixelBufferRef)imageBuffer +{ + CVPixelBufferLockBaseAddress(imageBuffer, 0); + + void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); + + size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); + + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + + CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); + + CGImageRef cgImage = CGBitmapContextCreateImage(context); + CVPixelBufferUnlockBaseAddress(imageBuffer,0); + + CGContextRelease(context); + CGColorSpaceRelease(colorSpace); + + UIImage *image = [UIImage imageWithCGImage:cgImage]; + CGImageRelease(cgImage); + + return image; +} + + +- (void)renderVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + CVPixelBufferRef renderedPixelBuffer = NULL; + CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + + @synchronized (_renderer) + { + if (_renderingEnabled) + { + bool repeatingFrames = false; + @synchronized (self) + { + if (_recorder.paused && _previousPixelBuffer != NULL) + { + _recorder.paused = false; + _repeatingCount = 11; + + [_renderer setPreviousPixelBuffer:_previousPixelBuffer]; + CFRelease(_previousPixelBuffer); + _previousPixelBuffer = NULL; + } + + if (_repeatingCount > 0) + { + repeatingFrames = true; + _repeatingCount--; + } + + CGFloat opacity = 1.0f; + if (_repeatingCount < 10) + opacity = _repeatingCount / 9.0f; + + [_renderer setOpacity:opacity]; + + if (_repeatingCount == 0) + [_renderer setPreviousPixelBuffer:NULL]; + } + + CVPixelBufferRef sourcePixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + renderedPixelBuffer = [_renderer copyRenderedPixelBuffer:sourcePixelBuffer]; + + @synchronized (self) + { + if (_recordingStatus == TGVideoCameraRecordingStatusRecording && _recordingThumbnail == nil) + { + UIImage *image = [self imageFromImageBuffer:sourcePixelBuffer]; + _recordingThumbnail = image; + } + + if (_recordingStatus == TGVideoCameraRecordingStatusRecording && !repeatingFrames) + { + NSTimeInterval currentTime = CMTimeGetSeconds(timestamp); + if (_previousThumbnailTime < DBL_EPSILON) + { + _firstThumbnailTime = currentTime; + _previousThumbnailTime = currentTime; + + [self storeThumbnailWithSampleBuffer:sampleBuffer time:0.0 mirror:_renderer.mirror]; + } + else + { + NSTimeInterval relativeThumbnailTime = _previousThumbnailTime - _firstThumbnailTime; + NSTimeInterval interval = MAX(0.1, relativeThumbnailTime / 10.0); + + if (currentTime - _previousThumbnailTime >= interval) + { + [self storeThumbnailWithSampleBuffer:sampleBuffer time:relativeThumbnailTime mirror:_renderer.mirror]; + _previousThumbnailTime = currentTime; + } + } + } + + if (!repeatingFrames) + { + if (_previousPixelBuffer != NULL) + { + CFRelease(_previousPixelBuffer); + _previousPixelBuffer = NULL; + } + + _previousPixelBuffer = sourcePixelBuffer; + CFRetain(sourcePixelBuffer); + } + } + } + else + { + return; + } + } + + if (renderedPixelBuffer) + { + @synchronized (self) + { + [self outputPreviewPixelBuffer:renderedPixelBuffer]; + + if (_recordingStatus == TGVideoCameraRecordingStatusRecording) + [_recorder appendVideoPixelBuffer:renderedPixelBuffer withPresentationTime:timestamp]; + } + + CFRelease(renderedPixelBuffer); + } + else + { + [self videoPipelineDidRunOutOfBuffers]; + } +} + +- (void)outputPreviewPixelBuffer:(CVPixelBufferRef)previewPixelBuffer +{ + if (_currentPreviewPixelBuffer != NULL) + { + CFRelease(_currentPreviewPixelBuffer); + _currentPreviewPixelBuffer = NULL; + } + + if (_previousPixelBuffer != NULL) + { + _currentPreviewPixelBuffer = previewPixelBuffer; + CFRetain(_currentPreviewPixelBuffer); + } + + [self invokeDelegateCallbackAsync:^ + { + CVPixelBufferRef currentPreviewPixelBuffer = NULL; + @synchronized (self) + { + currentPreviewPixelBuffer = _currentPreviewPixelBuffer; + if (currentPreviewPixelBuffer != NULL) + { + CFRetain(currentPreviewPixelBuffer); + if (_currentPreviewPixelBuffer != NULL) + { + CFRelease(_currentPreviewPixelBuffer); + _currentPreviewPixelBuffer = NULL; + } + } + } + + if (currentPreviewPixelBuffer != NULL) + { + [_delegate capturePipeline:self previewPixelBufferReadyForDisplay:currentPreviewPixelBuffer]; + CFRelease(currentPreviewPixelBuffer); + } + }]; +} + +- (void)storeThumbnailWithSampleBuffer:(CMSampleBufferRef)sampleBuffer time:(NSTimeInterval)time mirror:(bool)mirror +{ + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + size_t width = CVPixelBufferGetWidth(imageBuffer); + size_t height = CVPixelBufferGetHeight(imageBuffer); + + size_t cropX = (size_t)((width - height) / 2.0); + size_t cropY = 0; + size_t cropWidth = height; + size_t cropHeight = height; + size_t outWidth = 66; + size_t outHeight = 66; + + CVPixelBufferLockBaseAddress(imageBuffer,0); + void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); + size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); + + vImage_Buffer inBuff; + inBuff.height = cropHeight; + inBuff.width = cropWidth; + inBuff.rowBytes = bytesPerRow; + + unsigned long startpos = cropY * bytesPerRow + 4 * cropX; + inBuff.data = baseAddress + startpos; + + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + CGContextRef context = CGBitmapContextCreateWithData(NULL, outWidth, outHeight, 8, outWidth * 4, colorSpace, kCGImageByteOrder32Little | kCGImageAlphaPremultipliedFirst, NULL, nil); + + unsigned char *outImg = CGBitmapContextGetData(context); + vImage_Buffer outBuff = {outImg, outHeight, outWidth, 4 * outWidth}; + + vImage_Error err = vImageScale_ARGB8888(&inBuff, &outBuff, NULL, 0); + if (err != kvImageNoError) + TGLegacyLog(@"Video Message thumbnail generation error %ld", err); + + CVPixelBufferUnlockBaseAddress(imageBuffer,0); + + CGImageRef cgImage = CGBitmapContextCreateImage(context); + CGContextRelease(context); + CGColorSpaceRelease(colorSpace); + + UIImage *image = [UIImage imageWithCGImage:cgImage scale:1.0f orientation:mirror ? UIImageOrientationLeftMirrored : UIImageOrientationRight]; + CGImageRelease(cgImage); + + _thumbnails[@(time)] = image; +} + +- (void)startRecording:(NSURL *)url preset:(TGMediaVideoConversionPreset)preset liveUpload:(bool)liveUpload +{ + _recordingURL = url; + _preset = preset; + _liveUpload = liveUpload; + + OSSpinLockLock(&_recordLock); + if (self.outputAudioFormatDescription == NULL) + { + _startRecordAfterAudioBuffer = true; + OSSpinLockUnlock(&_recordLock); + return; + } + OSSpinLockUnlock(&_recordLock); + + @synchronized (self) + { + if (_recordingStatus != TGVideoCameraRecordingStatusIdle) + return; + + [self transitionToRecordingStatus:TGVideoCameraRecordingStatusStartingRecording error:nil]; + } + + dispatch_queue_t callbackQueue = dispatch_queue_create("org.telegram.VideoCameraPipeline.recorder", DISPATCH_QUEUE_SERIAL); + TGVideoCameraMovieRecorder *recorder = [[TGVideoCameraMovieRecorder alloc] initWithURL:_recordingURL delegate:self callbackQueue:callbackQueue]; + + NSDictionary *audioSettings = [TGMediaVideoConversionPresetSettings audioSettingsForPreset:preset]; + [recorder addAudioTrackWithSourceFormatDescription:self.outputAudioFormatDescription settings:audioSettings]; + + _videoTransform = [self transformForOrientation:self.orientation]; + + CGSize size = [TGMediaVideoConversionPresetSettings maximumSizeForPreset:preset]; + NSDictionary *videoSettings = [TGMediaVideoConversionPresetSettings videoSettingsForPreset:preset dimensions:size]; + [recorder addVideoTrackWithSourceFormatDescription:self.outputVideoFormatDescription transform:CGAffineTransformIdentity settings:videoSettings]; + _recorder = recorder; + + [recorder prepareToRecord]; +} + +- (void)stopRecording +{ + [[TGVideoCameraPipeline cameraQueue] dispatch:^ + { + @synchronized (self) + { + if (_recordingStatus != TGVideoCameraRecordingStatusRecording) + return; + + [self transitionToRecordingStatus:TGVideoCameraRecordingStatusStoppingRecording error:nil]; + } + + _resultDuration = _recorder.videoDuration; + [_recorder finishRecording]; + }]; +} + +- (bool)isRecording +{ + return _recorder != nil && !_recorder.paused; +} + +- (void)movieRecorderDidFinishPreparing:(TGVideoCameraMovieRecorder *)__unused recorder +{ + @synchronized (self) + { + if (_recordingStatus != TGVideoCameraRecordingStatusStartingRecording) + return; + + [self transitionToRecordingStatus:TGVideoCameraRecordingStatusRecording error:nil]; + + if (_liveUpload) + { + _watcher = _liveUploadInterface; + [_watcher setupWithFileURL:_recordingURL]; + } + } +} + +- (void)movieRecorder:(TGVideoCameraMovieRecorder *)__unused recorder didFailWithError:(NSError *)error +{ + @synchronized (self) + { + _recorder = nil; + [self transitionToRecordingStatus:TGVideoCameraRecordingStatusIdle error:error]; + } +} + +- (void)movieRecorderDidFinishRecording:(TGVideoCameraMovieRecorder *)__unused recorder +{ + @synchronized (self) + { + if (_recordingStatus != TGVideoCameraRecordingStatusStoppingRecording) + return; + } + + _recorder = nil; + + if (_watcher != nil) + _liveUploadData = [_watcher fileUpdated:true]; + + [self transitionToRecordingStatus:TGVideoCameraRecordingStatusIdle error:nil]; +} + +- (void)transitionToRecordingStatus:(TGVideoCameraRecordingStatus)newStatus error:(NSError *)error +{ + TGVideoCameraRecordingStatus oldStatus = _recordingStatus; + _recordingStatus = newStatus; + + if (newStatus != oldStatus) + { + dispatch_block_t delegateCallbackBlock = nil; + + if (error && newStatus == TGVideoCameraRecordingStatusIdle) + { + delegateCallbackBlock = ^{ [_delegate capturePipeline:self recordingDidFailWithError:error]; }; + } + else + { + if ((oldStatus == TGVideoCameraRecordingStatusStartingRecording) && (newStatus == TGVideoCameraRecordingStatusRecording)) + delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingDidStart:self]; }; + else if ((oldStatus == TGVideoCameraRecordingStatusRecording) && (newStatus == TGVideoCameraRecordingStatusStoppingRecording)) + delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingWillStop:self]; }; + else if ((oldStatus == TGVideoCameraRecordingStatusStoppingRecording) && (newStatus == TGVideoCameraRecordingStatusIdle)) + delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingDidStop:self duration:_resultDuration liveUploadData:_liveUploadData thumbnailImage:_recordingThumbnail thumbnails:_thumbnails]; }; + } + + if (delegateCallbackBlock != nil) + [self invokeDelegateCallbackAsync:delegateCallbackBlock]; + } +} + +- (void)invokeDelegateCallbackAsync:(dispatch_block_t)callbackBlock +{ + dispatch_async(_delegateCallbackQueue, ^ + { + @autoreleasepool + { + callbackBlock(); + } + }); +} + +- (CGAffineTransform)transformForOrientation:(AVCaptureVideoOrientation)orientation +{ + CGAffineTransform transform = CGAffineTransformIdentity; + + CGFloat orientationAngleOffset = angleOffsetFromPortraitOrientationToOrientation(orientation); + CGFloat videoOrientationAngleOffset = angleOffsetFromPortraitOrientationToOrientation(_videoBufferOrientation); + + CGFloat angleOffset = orientationAngleOffset - videoOrientationAngleOffset; + transform = CGAffineTransformMakeRotation(angleOffset); + + return transform; +} + +static CGFloat angleOffsetFromPortraitOrientationToOrientation(AVCaptureVideoOrientation orientation) +{ + CGFloat angle = 0.0; + + switch (orientation) + { + case AVCaptureVideoOrientationPortrait: + angle = 0.0; + break; + case AVCaptureVideoOrientationPortraitUpsideDown: + angle = M_PI; + break; + case AVCaptureVideoOrientationLandscapeRight: + angle = -M_PI_2; + break; + case AVCaptureVideoOrientationLandscapeLeft: + angle = M_PI_2; + break; + default: + break; + } + + return angle; +} + +- (NSTimeInterval)videoDuration +{ + return _recorder.videoDuration; +} + +- (void)setCameraPosition:(AVCaptureDevicePosition)position +{ + @synchronized (self) + { + _recorder.paused = true; + } + + [[TGVideoCameraPipeline cameraQueue] dispatch:^ + { + NSError *error; + + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + AVCaptureDevice *deviceForTargetPosition = nil; + for (AVCaptureDevice *device in devices) + { + if (device.position == position) + { + deviceForTargetPosition = device; + break; + } + } + + _renderer.mirror = deviceForTargetPosition.position == AVCaptureDevicePositionFront; + _renderer.orientation = _orientation; + + AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:deviceForTargetPosition error:&error]; + if (newVideoInput != nil) + { + [_captureSession beginConfiguration]; + + [_captureSession removeInput:_videoInput]; + if ([_captureSession canAddInput:newVideoInput]) + { + [_captureSession addInput:newVideoInput]; + _videoInput = newVideoInput; + } + else + { + [_captureSession addInput:_videoInput]; + } + + [_captureSession commitConfiguration]; + } + + _videoDevice = deviceForTargetPosition; + + _videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; + + [self _configureFPS]; + + [self _enableLowLightBoost]; + [self _enableVideoStabilization]; + + _videoBufferOrientation = _videoConnection.videoOrientation; + }]; +} + + +- (void)_enableLowLightBoost +{ + [self _reconfigureDevice:_videoDevice withBlock:^(AVCaptureDevice *device) + { + if (device.isLowLightBoostSupported) + device.automaticallyEnablesLowLightBoostWhenAvailable = true; + }]; +} + +- (void)_enableVideoStabilization +{ + AVCaptureConnection *videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; + if (videoConnection.supportsVideoStabilization) + { + if ([videoConnection respondsToSelector:@selector(setPreferredVideoStabilizationMode:)]) + videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; + else + videoConnection.enablesVideoStabilizationWhenAvailable = true; + } +} + +- (void)_reconfigureDevice:(AVCaptureDevice *)device withBlock:(void (^)(AVCaptureDevice *device))block +{ + if (block == nil) + return; + + NSError *error = nil; + [device lockForConfiguration:&error]; + block(device); + [device unlockForConfiguration]; + + if (error != nil) + TGLegacyLog(@"ERROR: failed to reconfigure camera: %@", error); +} + +- (void)_addAudioInput +{ + if (_audioDevice != nil || _audioDataOutputQueue == NULL) + return; + + AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; + + NSError *error = nil; + if (audioDevice != nil) + { + _audioDevice = audioDevice; + AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_audioDevice error:&error]; + if ([_captureSession canAddInput:audioInput]) + { + [_captureSession addInput:audioInput]; + _audioInput = audioInput; + } + } + + AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init]; + if ([_captureSession canAddOutput:audioOutput]) + { + [audioOutput setSampleBufferDelegate:self queue:_audioDataOutputQueue]; + [_captureSession addOutput:audioOutput]; + _audioOutput = audioOutput; + } +} + +- (void)_removeAudioInput +{ + if (_audioDevice == nil) + return; + + [_captureSession removeInput:_audioInput]; + _audioInput = nil; + + [_audioOutput setSampleBufferDelegate:nil queue:NULL]; + [_captureSession removeOutput:_audioOutput]; + _audioOutput = nil; + + _audioDevice = nil; +} + +- (void)_configureFPS +{ + CMTime frameDuration = CMTimeMake(1, 30); + [self _reconfigureDevice:_videoDevice withBlock:^(AVCaptureDevice *device) + { + device.activeVideoMaxFrameDuration = frameDuration; + device.activeVideoMinFrameDuration = frameDuration; + }]; +} + ++ (bool)cameraPositionChangeAvailable +{ + return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].count > 1; +} + ++ (SQueue *)cameraQueue +{ + static dispatch_once_t onceToken; + static SQueue *queue = nil; + dispatch_once(&onceToken, ^ + { + queue = [[SQueue alloc] init]; + }); + + return queue; +} + +@end diff --git a/LegacyComponents/TGVideoMessageCaptureController.h b/LegacyComponents/TGVideoMessageCaptureController.h new file mode 100644 index 0000000000..4cce5d7b92 --- /dev/null +++ b/LegacyComponents/TGVideoMessageCaptureController.h @@ -0,0 +1,27 @@ +#import + +@class TGVideoEditAdjustments; + +@interface TGVideoMessageCaptureController : TGOverlayController + +@property (nonatomic, copy) id (^requestActivityHolder)(); +@property (nonatomic, copy) void (^micLevel)(CGFloat level); +@property (nonatomic, copy) void(^finishedWithVideo)(NSURL *videoURL, UIImage *previewImage, NSUInteger fileSize, NSTimeInterval duration, CGSize dimensions, id liveUploadData, TGVideoEditAdjustments *adjustments); +@property (nonatomic, copy) void(^onDismiss)(bool isAuto); +@property (nonatomic, copy) void(^onStop)(void); +@property (nonatomic, copy) void(^onCancel)(void); + +- (instancetype)initWithContext:(id)context transitionInView:(UIView *(^)())transitionInView parentController:(TGViewController *)parentController controlsFrame:(CGRect)controlsFrame isAlreadyLocked:(bool (^)(void))isAlreadyLocked liveUploadInterface:(id)liveUploadInterface; +- (void)buttonInteractionUpdate:(CGPoint)value; +- (void)setLocked; + +- (void)complete; +- (void)dismiss; +- (void)stop; + ++ (void)clearStartImage; + ++ (void)requestCameraAccess:(void (^)(bool granted, bool wasNotDetermined))resultBlock; ++ (void)requestMicrophoneAccess:(void (^)(bool granted, bool wasNotDetermined))resultBlock; + +@end diff --git a/LegacyComponents/TGVideoMessageCaptureController.m b/LegacyComponents/TGVideoMessageCaptureController.m new file mode 100644 index 0000000000..ca44c93743 --- /dev/null +++ b/LegacyComponents/TGVideoMessageCaptureController.m @@ -0,0 +1,1283 @@ +#import "TGVideoMessageCaptureController.h" + +#import "LegacyComponentsInternal.h" + +#import + +#import +#import +#import +#import +#import + +#import + +#import +#import +#import "TGVideoCameraPipeline.h" +#import + +#import +#import +#import +#import + +#import "TGColor.h" +#import "TGImageUtils.h" + +const NSTimeInterval TGVideoMessageMaximumDuration = 60.0; + +typedef enum +{ + TGVideoMessageTransitionTypeUsual, + TGVideoMessageTransitionTypeSimplified, + TGVideoMessageTransitionTypeLegacy +} TGVideoMessageTransitionType; + +@interface TGVideoMessageCaptureControllerWindow : TGOverlayControllerWindow + +@property (nonatomic, assign) CGRect controlsFrame; +@property (nonatomic, assign) bool locked; + +@end + +@interface TGVideoMessageCaptureController () +{ + SQueue *_queue; + + AVCaptureDevicePosition _preferredPosition; + TGVideoCameraPipeline *_capturePipeline; + NSURL *_url; + + PGCameraVolumeButtonHandler *_buttonHandler; + bool _autorotationWasEnabled; + bool _dismissed; + bool _changing; + bool _gpuAvailable; + bool _locked; + bool _positionChangeLocked; + bool _alreadyStarted; + + CGRect _controlsFrame; + TGVideoMessageControls *_controlsView; + TGModernButton *_switchButton; + + UIView *_wrapperView; + + UIView *_blurView; + + UIView *_fadeView; + UIView *_circleWrapperView; + UIImageView *_shadowView; + UIView *_circleView; + TGVideoCameraGLView *_previewView; + TGVideoMessageRingView *_ringView; + + UIView *_separatorView; + + UIImageView *_placeholderView; + + bool _automaticDismiss; + NSTimeInterval _startTimestamp; + NSTimer *_recordingTimer; + + NSTimeInterval _previousDuration; + NSUInteger _audioRecordingDurationSeconds; + NSUInteger _audioRecordingDurationMilliseconds; + + id _activityHolder; + SMetaDisposable *_activityDisposable; + + SMetaDisposable *_currentAudioSession; + bool _otherAudioPlaying; + + id _didEnterBackgroundObserver; + + bool _stopped; + id _liveUploadData; + UIImage *_thumbnailImage; + NSDictionary *_thumbnails; + NSTimeInterval _duration; + AVPlayer *_player; + id _didPlayToEndObserver; + + TGModernGalleryVideoView *_videoView; + UIImageView *_muteView; + bool _muted; + + SMetaDisposable *_thumbnailsDisposable; + id _context; + UIView *(^_transitionInView)(); + id _liveUploadInterface; +} + +@property (nonatomic, copy) bool(^isAlreadyLocked)(void); + +@end + +@implementation TGVideoMessageCaptureController + +- (instancetype)initWithContext:(id)context transitionInView:(UIView *(^)())transitionInView parentController:(TGViewController *)parentController controlsFrame:(CGRect)controlsFrame isAlreadyLocked:(bool (^)(void))isAlreadyLocked liveUploadInterface:(id)liveUploadInterface +{ + self = [super init]; + if (self != nil) + { + _context = context; + _transitionInView = [transitionInView copy]; + self.isAlreadyLocked = isAlreadyLocked; + _liveUploadInterface = liveUploadInterface; + + _url = [TGVideoMessageCaptureController tempOutputPath]; + _queue = [[SQueue alloc] init]; + + _previousDuration = 0.0; + _preferredPosition = AVCaptureDevicePositionFront; + + self.isImportant = true; + _controlsFrame = controlsFrame; + + TGVideoMessageCaptureControllerWindow *window = [[TGVideoMessageCaptureControllerWindow alloc] initWithManager:[_context makeOverlayWindowManager] parentController:parentController contentController:self keepKeyboard:true]; + window.windowLevel = 1000000000.0f - 0.001f; + window.hidden = false; + window.controlsFrame = controlsFrame; + + _gpuAvailable = true; + + _activityDisposable = [[SMetaDisposable alloc] init]; + _currentAudioSession = [[SMetaDisposable alloc] init]; + + __weak TGVideoMessageCaptureController *weakSelf = self; + _didEnterBackgroundObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationDidEnterBackgroundNotification object:nil queue:nil usingBlock:^(__unused NSNotification *notification) + { + __strong TGVideoMessageCaptureController *strongSelf = weakSelf; + if (strongSelf != nil && !strongSelf->_stopped) + { + strongSelf->_automaticDismiss = true; + strongSelf->_gpuAvailable = false; + [strongSelf dismiss:true]; + } + }]; + + _thumbnailsDisposable = [[SMetaDisposable alloc] init]; + } + return self; +} + +- (void)dealloc +{ + [_thumbnailsDisposable dispose]; + [[NSNotificationCenter defaultCenter] removeObserver:_didEnterBackgroundObserver]; + [_activityDisposable dispose]; +} + ++ (NSURL *)tempOutputPath +{ + return [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[[NSString alloc] initWithFormat:@"cam_%x.mp4", (int)arc4random()]]]; +} + +- (void)loadView +{ + [super loadView]; + + self.view.backgroundColor = [UIColor clearColor]; + + CGRect wrapperFrame = TGIsPad() ? CGRectMake(0.0f, 0.0f, self.view.frame.size.width, CGRectGetMaxY(_controlsFrame)): CGRectMake(0.0f, 0.0f, self.view.frame.size.width, CGRectGetMinY(_controlsFrame)); + + _wrapperView = [[UIView alloc] initWithFrame:wrapperFrame]; + _wrapperView.clipsToBounds = true; + [self.view addSubview:_wrapperView]; + + TGVideoMessageTransitionType type = [self _transitionType]; + CGRect fadeFrame = CGRectMake(0.0f, 0.0f, _wrapperView.frame.size.width, _wrapperView.frame.size.height); + if (type != TGVideoMessageTransitionTypeLegacy) + { + UIBlurEffect *effect = nil; + if (type == TGVideoMessageTransitionTypeSimplified) + effect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleLight]; + + _blurView = [[UIVisualEffectView alloc] initWithEffect:effect]; + [_wrapperView addSubview:_blurView]; + + if (type == TGVideoMessageTransitionTypeSimplified) + { + _blurView.alpha = 0.0f; + } + else + { + _fadeView = [[UIView alloc] initWithFrame:fadeFrame]; + _fadeView.alpha = 0.0f; + _fadeView.backgroundColor = UIColorRGBA(0xffffff, 0.4f); + [_wrapperView addSubview:_fadeView]; + } + } + else + { + _fadeView = [[UIView alloc] initWithFrame:fadeFrame]; + _fadeView.alpha = 0.0f; + _fadeView.backgroundColor = UIColorRGBA(0xffffff, 0.6f); + [_wrapperView addSubview:_fadeView]; + } + + _circleWrapperView = [[UIView alloc] initWithFrame:CGRectMake((_wrapperView.frame.size.width - 216.0f - 38.0f) / 2.0f, _wrapperView.frame.size.height + 100.0f, 216.0f + 38.0f, 216.0f + 38.0f)]; + _circleWrapperView.alpha = 0.0f; + _circleWrapperView.clipsToBounds = false; + [_wrapperView addSubview:_circleWrapperView]; + + _shadowView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"VideoMessageShadow"]]; + _shadowView.frame = _circleWrapperView.bounds; + [_circleWrapperView addSubview:_shadowView]; + + _circleView = [[UIView alloc] initWithFrame:CGRectInset(_circleWrapperView.bounds, 19.0f, 19.0f)]; + _circleView.clipsToBounds = true; + _circleView.layer.cornerRadius = _circleView.frame.size.width / 2.0f; + [_circleWrapperView addSubview:_circleView]; + + _placeholderView = [[UIImageView alloc] initWithFrame:_circleView.bounds]; + _placeholderView.backgroundColor = [UIColor blackColor]; + _placeholderView.image = [TGVideoMessageCaptureController startImage]; + [_circleView addSubview:_placeholderView]; + + _ringView = [[TGVideoMessageRingView alloc] initWithFrame:CGRectMake((_circleWrapperView.frame.size.width - 234.0f) / 2.0f, (_circleWrapperView.frame.size.height - 234.0f) / 2.0f, 234.0f, 234.0f)]; + [_circleWrapperView addSubview:_ringView]; + + CGRect controlsFrame = _controlsFrame; + CGFloat height = TGIsPad() ? 56.0f : 45.0f; + controlsFrame.origin.y = CGRectGetMaxY(controlsFrame) - height; + controlsFrame.size.height = height; + + _controlsView = [[TGVideoMessageControls alloc] initWithFrame:controlsFrame]; + _controlsView.clipsToBounds = true; + _controlsView.parent = self; + _controlsView.isAlreadyLocked = self.isAlreadyLocked; + _controlsView.controlsHeight = _controlsFrame.size.height; + + __weak TGVideoMessageCaptureController *weakSelf = self; + _controlsView.cancel = ^ + { + __strong TGVideoMessageCaptureController *strongSelf = weakSelf; + if (strongSelf != nil) + { + strongSelf->_automaticDismiss = true; + [strongSelf dismiss:true]; + + if (strongSelf.onCancel != nil) + strongSelf.onCancel(); + } + }; + _controlsView.deletePressed = ^ + { + __strong TGVideoMessageCaptureController *strongSelf = weakSelf; + if (strongSelf != nil) + { + strongSelf->_automaticDismiss = true; + [strongSelf dismiss:true]; + + if (strongSelf.onCancel != nil) + strongSelf.onCancel(); + + }; + }; + _controlsView.sendPressed = ^ + { + __strong TGVideoMessageCaptureController *strongSelf = weakSelf; + if (strongSelf != nil) + { + [strongSelf sendPressed]; + }; + }; + [self.view addSubview:_controlsView]; + + _separatorView = [[UIView alloc] initWithFrame:CGRectMake(_controlsView.frame.origin.x, _controlsFrame.origin.y - TGScreenPixel, _controlsView.frame.size.width, TGScreenPixel)]; + _separatorView.backgroundColor = UIColorRGB(0xb2b2b2); + _separatorView.userInteractionEnabled = false; + [self.view addSubview:_separatorView]; + + if ([TGVideoCameraPipeline cameraPositionChangeAvailable]) + { + _switchButton = [[TGModernButton alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 44.0f, 44.0f)]; + _switchButton.alpha = 0.0f; + _switchButton.adjustsImageWhenHighlighted = false; + _switchButton.adjustsImageWhenDisabled = false; + [_switchButton setImage:[UIImage imageNamed:@"VideoRecordPositionSwitch"] forState:UIControlStateNormal]; + [_switchButton addTarget:self action:@selector(changeCameraPosition) forControlEvents:UIControlEventTouchUpInside]; + [self.view addSubview:_switchButton]; + } + + void (^voidBlock)(void) = ^{}; + _buttonHandler = [[PGCameraVolumeButtonHandler alloc] initWithUpButtonPressedBlock:voidBlock upButtonReleasedBlock:voidBlock downButtonPressedBlock:voidBlock downButtonReleasedBlock:voidBlock]; + + [self configureCamera]; +} + +- (TGVideoMessageTransitionType)_transitionType +{ + static dispatch_once_t onceToken; + static TGVideoMessageTransitionType type; + dispatch_once(&onceToken, ^ + { + CGSize screenSize = TGScreenSize(); + if (iosMajorVersion() < 8 || (NSInteger)screenSize.height == 480) + type = TGVideoMessageTransitionTypeLegacy; + else if (iosMajorVersion() == 8) + type = TGVideoMessageTransitionTypeSimplified; + else + type = TGVideoMessageTransitionTypeUsual; + }); + + return type; +} + +- (void)setupPreviewView +{ + _previewView = [[TGVideoCameraGLView alloc] initWithFrame:_circleView.bounds]; + [_circleView insertSubview:_previewView belowSubview:_placeholderView]; + + [self captureStarted]; +} + +- (void)_transitionIn +{ + TGVideoMessageTransitionType type = [self _transitionType]; + if (type == TGVideoMessageTransitionTypeUsual) + { + UIBlurEffect *effect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleLight]; + + UIView *rootView = _transitionInView(); + rootView.superview.backgroundColor = [UIColor whiteColor]; + + [UIView animateWithDuration:0.22 delay:0.0 options:UIViewAnimationOptionCurveEaseOut animations:^ + { + ((UIVisualEffectView *)_blurView).effect = effect; + _fadeView.alpha = 1.0f; + } completion:nil]; + } + else if (type == TGVideoMessageTransitionTypeSimplified) + { + [UIView animateWithDuration:0.22 delay:0.0 options:UIViewAnimationOptionCurveEaseOut animations:^ + { + _blurView.alpha = 1.0f; + } completion:nil]; + } + else + { + [UIView animateWithDuration:0.25 animations:^ + { + _fadeView.alpha = 1.0f; + }]; + } +} + +- (void)_transitionOut +{ + TGVideoMessageTransitionType type = [self _transitionType]; + if (type == TGVideoMessageTransitionTypeUsual) + { + [UIView animateWithDuration:0.22 delay:0.0 options:UIViewAnimationOptionCurveEaseOut animations:^ + { + ((UIVisualEffectView *)_blurView).effect = nil; + _fadeView.alpha = 0.0f; + } completion:nil]; + } + else if (type == TGVideoMessageTransitionTypeSimplified) + { + [UIView animateWithDuration:0.22 delay:0.0 options:UIViewAnimationOptionCurveEaseOut animations:^ + { + _blurView.alpha = 0.0f; + } completion:nil]; + } + else + { + [UIView animateWithDuration:0.15 animations:^ + { + _fadeView.alpha = 0.0f; + }]; + } +} + +- (void)viewWillAppear:(BOOL)animated +{ + [super viewWillAppear:animated]; + + _capturePipeline.renderingEnabled = true; + + _startTimestamp = CFAbsoluteTimeGetCurrent(); + + [_controlsView setShowRecordingInterface:true velocity:0.0f]; + + [[[LegacyComponentsGlobals provider] applicationInstance] setIdleTimerDisabled:true]; + + [self _transitionIn]; + + [self _beginAudioSession:false]; + [_queue dispatch:^ + { + [_capturePipeline startRunning]; + }]; +} + +- (void)viewDidAppear:(BOOL)animated +{ + [super viewDidAppear:animated]; + + _autorotationWasEnabled = [TGViewController autorotationAllowed]; + [TGViewController disableAutorotation]; + + _circleWrapperView.transform = CGAffineTransformMakeScale(0.3f, 0.3f); + + CGPoint targetPosition = CGPointMake(_wrapperView.frame.size.width / 2.0f, _wrapperView.frame.size.height / 2.0f - _controlsView.frame.size.height); + switch (self.interfaceOrientation) + { + case UIInterfaceOrientationLandscapeLeft: + targetPosition.x = MIN(_wrapperView.frame.size.width - _circleWrapperView.bounds.size.width / 2.0f - 20.0f, _wrapperView.frame.size.width / 4.0f * 3.0f); + targetPosition.y = self.view.frame.size.height / 2.0f; + break; + + case UIInterfaceOrientationLandscapeRight: + targetPosition.x = MAX(_circleWrapperView.bounds.size.width / 2.0f + 20.0f, _wrapperView.frame.size.width / 4.0f); + targetPosition.y = self.view.frame.size.height / 2.0f; + break; + + default: + if (self.view.frame.size.height > self.view.frame.size.width && fabs(_wrapperView.frame.size.height - self.view.frame.size.height) < 50.0f) + targetPosition.y = _wrapperView.frame.size.height / 3.0f - 20.0f; + + targetPosition.y = MAX(_circleWrapperView.bounds.size.height / 2.0f + 40.0f, targetPosition.y); + + break; + } + + [UIView animateWithDuration:0.5 delay:0.0 usingSpringWithDamping:0.8f initialSpringVelocity:0.2f options:kNilOptions animations:^ + { + _circleWrapperView.center = targetPosition; + _circleWrapperView.transform = CGAffineTransformIdentity; + } completion:nil]; + + [UIView animateWithDuration:0.2 animations:^ + { + _circleWrapperView.alpha = 1.0f; + } completion:nil]; +} + +- (void)viewWillLayoutSubviews +{ + [super viewWillLayoutSubviews]; + + CGRect fadeFrame = TGIsPad() ? self.view.bounds : CGRectMake(0.0f, 0.0f, _wrapperView.frame.size.width, _wrapperView.frame.size.height); + _blurView.frame = fadeFrame; +} + +- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)__unused toInterfaceOrientation duration:(NSTimeInterval)__unused duration +{ + if (TGIsPad()) + { + _automaticDismiss = true; + [self dismiss:true]; + } +} + +- (void)dismissImmediately +{ + [super dismiss]; + + [self _endAudioSession]; + + [[[LegacyComponentsGlobals provider] applicationInstance] setIdleTimerDisabled:false]; + [self stopCapture]; + + if (_autorotationWasEnabled) + [TGViewController enableAutorotation]; +} + +- (void)dismiss +{ + [self dismiss:true]; +} + +- (void)dismiss:(bool)cancelled +{ + _dismissed = cancelled; + + if (self.onDismiss != nil) + self.onDismiss(_automaticDismiss); + + if (_player != nil) + [_player pause]; + + self.view.backgroundColor = [UIColor clearColor]; + self.view.userInteractionEnabled = false; + + [UIView animateWithDuration:0.15 animations:^ + { + _circleWrapperView.alpha = 0.0f; + _switchButton.alpha = 0.0f; + }]; + + [self _transitionOut]; + + [_controlsView setShowRecordingInterface:false velocity:0.0f]; + + TGDispatchAfter(0.3, dispatch_get_main_queue(), ^ + { + [self dismissImmediately]; + }); +} + +- (void)complete +{ + if (_stopped) + return; + + [_activityDisposable dispose]; + [self stopRecording]; + + [self dismiss:false]; +} + +- (void)buttonInteractionUpdate:(CGPoint)value +{ + [_controlsView buttonInteractionUpdate:value]; +} + +- (void)setLocked +{ + ((TGVideoMessageCaptureControllerWindow *)self.view.window).locked = true; + [_controlsView setLocked]; +} + +- (void)stop +{ + if (!_capturePipeline.isRecording) + return; + + ((TGVideoMessageCaptureControllerWindow *)self.view.window).locked = false; + _stopped = true; + _gpuAvailable = false; + _switchButton.userInteractionEnabled = false; + + [_activityDisposable dispose]; + [self stopRecording]; +} + +- (void)sendPressed +{ + _automaticDismiss = true; + [self dismiss:false]; + + [self finishWithURL:_url dimensions:CGSizeMake(240.0f, 240.0f) duration:_duration liveUploadData:_liveUploadData thumbnailImage:_thumbnailImage]; +} + +- (void)unmutePressed +{ + [self _updateMuted:false]; + + [[SQueue concurrentDefaultQueue] dispatch:^ + { + _player.muted = false; + + [self _seekToPosition:_controlsView.scrubberView.trimStartValue]; + }]; +} + +- (void)_stop +{ + [_controlsView setStopped]; + [UIView animateWithDuration:0.2 animations:^ + { + _switchButton.alpha = 0.0f; + _ringView.alpha = 0.0f; + } completion:^(__unused BOOL finished) + { + _ringView.hidden = true; + _switchButton.hidden = true; + }]; +} + +- (UIImage *)systemUnmuteButton { + static UIImage *image = nil; + if (image == nil) + { + UIGraphicsBeginImageContextWithOptions(CGSizeMake(24.0f, 24.0f), false, 0.0f); + CGContextRef context = UIGraphicsGetCurrentContext(); + + UIColor *color = UIColorRGBA(0x000000, 0.4f); + + CGContextSetFillColorWithColor(context, color.CGColor); + CGContextFillEllipseInRect(context, CGRectMake(0.0f, 0.0f, 24.0f, 24.0f)); + + UIImage *iconImage = TGComponentsImageNamed(@"VideoMessageMutedIcon.png"); + [iconImage drawAtPoint:CGPointMake(CGFloor((24.0f - iconImage.size.width) / 2.0f), CGFloor((24.0f - iconImage.size.height) / 2.0f))]; + + image = UIGraphicsGetImageFromCurrentImageContext(); + UIGraphicsEndImageContext(); + } + return image; +} + +- (void)setupVideoView +{ + _controlsView.scrubberView.trimStartValue = 0.0; + _controlsView.scrubberView.trimEndValue = _duration; + [_controlsView.scrubberView setTrimApplied:false]; + [_controlsView.scrubberView reloadData]; + + _player = [[AVPlayer alloc] initWithURL:_url]; + _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; + _player.muted = true; + + _didPlayToEndObserver = [[TGObserverProxy alloc] initWithTarget:self targetSelector:@selector(playerItemDidPlayToEndTime:) name:AVPlayerItemDidPlayToEndTimeNotification object:_player.currentItem]; + + _videoView = [[TGModernGalleryVideoView alloc] initWithFrame:_previewView.frame player:_player]; + [_previewView.superview insertSubview:_videoView belowSubview:_previewView]; + + UITapGestureRecognizer *gestureRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(unmutePressed)]; + [_videoView addGestureRecognizer:gestureRecognizer]; + + _muted = true; + _muteView = [[UIImageView alloc] initWithImage:[self systemUnmuteButton]]; + _muteView.frame = CGRectMake(floor(CGRectGetMidX(_circleView.bounds) - 12.0f), CGRectGetMaxY(_circleView.bounds) - 24.0f - 8.0f, 24.0f, 24.0f); + [_previewView.superview addSubview:_muteView]; + + [_player play]; + + [UIView animateWithDuration:0.1 delay:0.1 options:kNilOptions animations:^ + { + _previewView.alpha = 0.0f; + } completion:nil]; +} + +- (void)_updateMuted:(bool)muted +{ + if (muted == _muted) + return; + + _muted = muted; + + UIView *muteButtonView = _muteView; + [muteButtonView.layer removeAllAnimations]; + + if ((muteButtonView.transform.a < 0.3f || muteButtonView.transform.a > 1.0f) || muteButtonView.alpha < FLT_EPSILON) + { + muteButtonView.transform = CGAffineTransformMakeScale(0.001f, 0.001f); + muteButtonView.alpha = 0.0f; + } + + [UIView animateWithDuration:0.3 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | 7 << 16 animations:^ + { + muteButtonView.transform = muted ? CGAffineTransformIdentity : CGAffineTransformMakeScale(0.001f, 0.001f); + } completion:nil]; + + [UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState animations:^ + { + muteButtonView.alpha = muted ? 1.0f : 0.0f; + } completion:nil]; +} + +- (void)_seekToPosition:(NSTimeInterval)position +{ + CMTime targetTime = CMTimeMakeWithSeconds(MIN(position, _duration - 0.1), NSEC_PER_SEC); + [_player.currentItem seekToTime:targetTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero]; +} + +- (void)playerItemDidPlayToEndTime:(NSNotification *)__unused notification +{ + [self _seekToPosition:_controlsView.scrubberView.trimStartValue]; + + TGDispatchOnMainThread(^ + { + [self _updateMuted:true]; + + [[SQueue concurrentDefaultQueue] dispatch:^ + { + _player.muted = true; + }]; + }); +} + +#pragma mark - + +- (void)changeCameraPosition +{ + if (_positionChangeLocked) + return; + + _preferredPosition = (_preferredPosition == AVCaptureDevicePositionFront) ? AVCaptureDevicePositionBack : AVCaptureDevicePositionFront; + + _gpuAvailable = false; + [_previewView removeFromSuperview]; + _previewView = nil; + + _ringView.alpha = 0.0f; + + dispatch_async(dispatch_get_main_queue(), ^ + { + [UIView transitionWithView:_circleWrapperView duration:0.4f options:UIViewAnimationOptionTransitionFlipFromLeft | UIViewAnimationOptionCurveEaseOut animations:^ + { + _placeholderView.hidden = false; + } completion:^(__unused BOOL finished) + { + _ringView.alpha = 1.0f; + _gpuAvailable = true; + }]; + + [_capturePipeline setCameraPosition:_preferredPosition]; + + _positionChangeLocked = true; + TGDispatchAfter(1.0, dispatch_get_main_queue(), ^ + { + _positionChangeLocked = false; + }); + }); +} + +#pragma mark - + +- (void)startRecording +{ + [_buttonHandler ignoreEventsFor:1.0f andDisable:false]; + [_capturePipeline startRecording:_url preset:TGMediaVideoConversionPresetVideoMessage liveUpload:true]; + + [self startRecordingTimer]; +} + +- (void)stopRecording +{ + [_capturePipeline stopRecording]; + [_buttonHandler ignoreEventsFor:1.0f andDisable:true]; +} + +- (void)finishWithURL:(NSURL *)url dimensions:(CGSize)dimensions duration:(NSTimeInterval)duration liveUploadData:(id )liveUploadData thumbnailImage:(UIImage *)thumbnailImage +{ + if (duration < 1.0) + _dismissed = true; + + CGFloat minSize = MIN(thumbnailImage.size.width, thumbnailImage.size.height); + CGFloat maxSize = MAX(thumbnailImage.size.width, thumbnailImage.size.height); + + bool mirrored = true; + UIImageOrientation orientation = [self orientationForThumbnailWithTransform:_capturePipeline.videoTransform mirrored:mirrored]; + + UIImage *image = TGPhotoEditorCrop(thumbnailImage, nil, orientation, 0.0f, CGRectMake((maxSize - minSize) / 2.0f, 0.0f, minSize, minSize), mirrored, CGSizeMake(240.0f, 240.0f), thumbnailImage.size, true); + + NSDictionary *fileDictionary = [[NSFileManager defaultManager] attributesOfItemAtPath:url.path error:NULL]; + NSUInteger fileSize = (NSUInteger)[fileDictionary fileSize]; + + UIImage *startImage = TGSecretBlurredAttachmentImage(image, image.size, NULL, false); + [TGVideoMessageCaptureController saveStartImage:startImage]; + + TGVideoEditAdjustments *adjustments = nil; + if (_stopped) + { + NSTimeInterval trimStartValue = _controlsView.scrubberView.trimStartValue; + NSTimeInterval trimEndValue = _controlsView.scrubberView.trimEndValue; + + if (trimStartValue > DBL_EPSILON || trimEndValue < _duration - DBL_EPSILON) + { + adjustments = [TGVideoEditAdjustments editAdjustmentsWithOriginalSize:dimensions cropRect:CGRectMake(0.0f, 0.0f, dimensions.width, dimensions.height) cropOrientation:UIImageOrientationUp cropLockedAspectRatio:1.0 cropMirrored:false trimStartValue:trimStartValue trimEndValue:trimEndValue paintingData:nil sendAsGif:false preset:TGMediaVideoConversionPresetVideoMessage]; + + duration = trimEndValue - trimStartValue; + } + + if (trimStartValue > DBL_EPSILON) + { + NSArray *thumbnail = [self thumbnailsForTimestamps:@[@(trimStartValue)]]; + image = thumbnail.firstObject; + } + } + + if (!_dismissed && !_changing && self.finishedWithVideo != nil) + self.finishedWithVideo(url, image, fileSize, duration, dimensions, liveUploadData, adjustments); + else + [[NSFileManager defaultManager] removeItemAtURL:url error:NULL]; + + _changing = false; +} + +- (UIImageOrientation)orientationForThumbnailWithTransform:(CGAffineTransform)transform mirrored:(bool)mirrored +{ + CGFloat angle = atan2(transform.b, transform.a); + NSInteger degrees = (360 + (NSInteger)TGRadiansToDegrees(angle)) % 360; + + switch (degrees) + { + case 90: + return mirrored ? UIImageOrientationLeft : UIImageOrientationRight; + break; + + case 180: + return UIImageOrientationDown; + break; + + case 270: + return mirrored ? UIImageOrientationLeft : UIImageOrientationRight; + + default: + break; + } + + return UIImageOrientationUp; +} + +#pragma mark - + +- (void)startRecordingTimer +{ + [_controlsView recordingStarted]; + [_controlsView setDurationString:@"0:00,00"]; + + _audioRecordingDurationSeconds = 0; + _audioRecordingDurationMilliseconds = 0.0; + _recordingTimer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(timerEvent) interval:2.0 / 60.0 repeat:false]; +} + +- (void)timerEvent +{ + if (_recordingTimer != nil) + { + [_recordingTimer invalidate]; + _recordingTimer = nil; + } + + NSTimeInterval recordingDuration = _capturePipeline.videoDuration; + if (isnan(recordingDuration)) + recordingDuration = 0.0; + + if (recordingDuration < _previousDuration) + recordingDuration = _previousDuration; + + _previousDuration = recordingDuration; + [_ringView setValue:recordingDuration / TGVideoMessageMaximumDuration]; + + CFAbsoluteTime currentTime = CACurrentMediaTime(); + NSUInteger currentDurationSeconds = (NSUInteger)recordingDuration; + NSUInteger currentDurationMilliseconds = (int)(recordingDuration * 100.0f) % 100; + if (currentDurationSeconds == _audioRecordingDurationSeconds && currentDurationMilliseconds == _audioRecordingDurationMilliseconds) + { + _recordingTimer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(timerEvent) interval:MAX(0.01, _audioRecordingDurationSeconds + 2.0 / 60.0 - currentTime) repeat:false]; + } + else + { + _audioRecordingDurationSeconds = currentDurationSeconds; + _audioRecordingDurationMilliseconds = currentDurationMilliseconds; + [_controlsView setDurationString:[[NSString alloc] initWithFormat:@"%d:%02d,%02d", (int)_audioRecordingDurationSeconds / 60, (int)_audioRecordingDurationSeconds % 60, (int)_audioRecordingDurationMilliseconds]]; + _recordingTimer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(timerEvent) interval:2.0 / 60.0 repeat:false]; + } + + if (recordingDuration >= TGVideoMessageMaximumDuration) + { + [_recordingTimer invalidate]; + _recordingTimer = nil; + + _automaticDismiss = true; + [self stop]; + + if (self.onStop != nil) + self.onStop(); + } +} + +- (void)stopRecordingTimer +{ + if (_recordingTimer != nil) + { + [_recordingTimer invalidate]; + _recordingTimer = nil; + } +} + +#pragma mark - + +- (void)captureStarted +{ + bool firstTime = !_alreadyStarted; + _alreadyStarted = true; + + _switchButton.frame = CGRectMake(11.0f, _controlsFrame.origin.y - _switchButton.frame.size.height - 7.0f, _switchButton.frame.size.width, _switchButton.frame.size.height); + + NSTimeInterval delay = firstTime ? 0.1 : 0.2; + [UIView animateWithDuration:0.3 delay:delay options:kNilOptions animations:^ + { + _placeholderView.alpha = 0.0f; + _switchButton.alpha = 1.0f; + } completion:^(__unused BOOL finished) + { + _placeholderView.hidden = true; + _placeholderView.alpha = 1.0f; + }]; + + if (firstTime) + { + TGDispatchAfter(0.2, dispatch_get_main_queue(), ^ + { + [self startRecording]; + }); + } +} + +- (void)stopCapture +{ + [_capturePipeline stopRunning]; +} + +- (void)configureCamera +{ + _capturePipeline = [[TGVideoCameraPipeline alloc] initWithDelegate:self position:_preferredPosition callbackQueue:dispatch_get_main_queue() liveUploadInterface:_liveUploadInterface]; + _capturePipeline.orientation = (AVCaptureVideoOrientation)self.interfaceOrientation; + + __weak TGVideoMessageCaptureController *weakSelf = self; + _capturePipeline.micLevel = ^(CGFloat level) + { + TGDispatchOnMainThread(^ + { + __strong TGVideoMessageCaptureController *strongSelf = weakSelf; + if (strongSelf != nil && strongSelf.micLevel != nil) + strongSelf.micLevel(level); + }); + }; +} + +#pragma mark - + +- (void)capturePipeline:(TGVideoCameraPipeline *)__unused capturePipeline didStopRunningWithError:(NSError *)__unused error +{ +} + +- (void)capturePipeline:(TGVideoCameraPipeline *)__unused capturePipeline previewPixelBufferReadyForDisplay:(CVPixelBufferRef)previewPixelBuffer +{ + if (!_gpuAvailable) + return; + + if (!_previewView) + [self setupPreviewView]; + + [_previewView displayPixelBuffer:previewPixelBuffer]; +} + +- (void)capturePipelineDidRunOutOfPreviewBuffers:(TGVideoCameraPipeline *)__unused capturePipeline +{ + if (_gpuAvailable) + [_previewView flushPixelBufferCache]; +} + +- (void)capturePipelineRecordingDidStart:(TGVideoCameraPipeline *)__unused capturePipeline +{ + __weak TGVideoMessageCaptureController *weakSelf = self; + [_activityDisposable setDisposable:[[[SSignal complete] delay:0.3 onQueue:[SQueue mainQueue]] startWithNext:nil error:nil completed:^{ + __strong TGVideoMessageCaptureController *strongSelf = weakSelf; + if (strongSelf != nil && strongSelf->_requestActivityHolder) { + strongSelf->_activityHolder = strongSelf->_requestActivityHolder(); + } + }]]; +} + +- (void)capturePipelineRecordingWillStop:(TGVideoCameraPipeline *)__unused capturePipeline +{ +} + +- (void)capturePipelineRecordingDidStop:(TGVideoCameraPipeline *)__unused capturePipeline duration:(NSTimeInterval)duration liveUploadData:(id)liveUploadData thumbnailImage:(UIImage *)thumbnailImage thumbnails:(NSDictionary *)thumbnails +{ + if (_stopped && duration > 0.33) + { + _duration = duration; + _liveUploadData = liveUploadData; + _thumbnailImage = thumbnailImage; + _thumbnails = thumbnails; + TGDispatchOnMainThread(^ + { + [self _stop]; + [self setupVideoView]; + }); + } + else + { + [self finishWithURL:_url dimensions:CGSizeMake(240.0f, 240.0f) duration:duration liveUploadData:liveUploadData thumbnailImage:thumbnailImage]; + } +} + +- (void)capturePipeline:(TGVideoCameraPipeline *)__unused capturePipeline recordingDidFailWithError:(NSError *)__unused error +{ +} + +#pragma mark - + +- (void)_beginAudioSession:(bool)speaker +{ + [_queue dispatch:^ + { + _otherAudioPlaying = [[AVAudioSession sharedInstance] isOtherAudioPlaying]; + + __weak TGVideoMessageCaptureController *weakSelf = self; + [_currentAudioSession setDisposable:[[LegacyComponentsGlobals provider] requestAudioSession:speaker ? TGAudioSessionTypePlayAndRecordHeadphones : TGAudioSessionTypePlayAndRecord interrupted:^ + { + __strong TGVideoMessageCaptureController *strongSelf = weakSelf; + if (strongSelf != nil) + { + strongSelf->_automaticDismiss = true; + [strongSelf complete]; + } + }]]; + }]; +} + +- (void)_endAudioSession +{ + id currentAudioSession = _currentAudioSession; + [_queue dispatch:^ + { + [currentAudioSession dispose]; + }]; +} + +#pragma mark - + +static UIImage *startImage = nil; + ++ (NSString *)_startImagePath +{ + return [[[LegacyComponentsGlobals provider] dataCachePath] stringByAppendingPathComponent:@"startImage.jpg"]; +} + ++ (UIImage *)startImage +{ + if (startImage == nil) + startImage = [UIImage imageWithContentsOfFile:[self _startImagePath]] ? : [UIImage imageNamed:@"VideoMessagePlaceholder.jpg"]; + + return startImage; +} + ++ (void)saveStartImage:(UIImage *)image +{ + if (image == nil) + return; + + [self clearStartImage]; + + startImage = image; + + NSData *data = UIImageJPEGRepresentation(image, 0.8f); + [data writeToFile:[self _startImagePath] atomically:true]; +} + ++ (void)clearStartImage +{ + startImage = nil; + [[NSFileManager defaultManager] removeItemAtPath:[self _startImagePath] error:NULL]; +} + ++ (void)requestCameraAccess:(void (^)(bool granted, bool wasNotDetermined))resultBlock +{ + if (iosMajorVersion() < 7) + { + if (resultBlock != nil) + resultBlock(true, false); + return; + } + + bool wasNotDetermined = ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] == AVAuthorizationStatusNotDetermined); + [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) + { + TGDispatchOnMainThread(^ + { + if (resultBlock != nil) + resultBlock(granted, wasNotDetermined); + }); + }]; +} + ++ (void)requestMicrophoneAccess:(void (^)(bool granted, bool wasNotDetermined))resultBlock +{ + if (iosMajorVersion() < 7) + { + if (resultBlock != nil) + resultBlock(true, false); + return; + } + + bool wasNotDetermined = ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio] == AVAuthorizationStatusNotDetermined); + [[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) + { + TGDispatchOnMainThread(^ + { + if (resultBlock != nil) + resultBlock(granted, wasNotDetermined); + }); + }]; +} + +#pragma mark - Scrubbing + +- (NSTimeInterval)videoScrubberDuration:(TGVideoMessageScrubber *)__unused videoScrubber +{ + return _duration; +} + +- (void)videoScrubberDidBeginScrubbing:(TGVideoMessageScrubber *)__unused videoScrubber +{ +} + +- (void)videoScrubberDidEndScrubbing:(TGVideoMessageScrubber *)__unused videoScrubber +{ +} + +- (void)videoScrubber:(TGVideoMessageScrubber *)__unused videoScrubber valueDidChange:(NSTimeInterval)__unused position +{ +} + +#pragma mark - Trimming + +- (void)videoScrubberDidBeginEditing:(TGVideoMessageScrubber *)__unused videoScrubber +{ + [_player pause]; +} + +- (void)videoScrubberDidEndEditing:(TGVideoMessageScrubber *)videoScrubber endValueChanged:(bool)endValueChanged +{ + [self updatePlayerRange:videoScrubber.trimEndValue]; + + if (endValueChanged) + [self _seekToPosition:videoScrubber.trimStartValue]; + + [_player play]; +} + +- (void)videoScrubber:(TGVideoMessageScrubber *)__unused videoScrubber editingStartValueDidChange:(NSTimeInterval)startValue +{ + [self _seekToPosition:startValue]; +} + +- (void)videoScrubber:(TGVideoMessageScrubber *)__unused videoScrubber editingEndValueDidChange:(NSTimeInterval)endValue +{ + [self _seekToPosition:endValue]; +} + +- (void)updatePlayerRange:(NSTimeInterval)trimEndValue +{ + _player.currentItem.forwardPlaybackEndTime = CMTimeMakeWithSeconds(trimEndValue, NSEC_PER_SEC); +} + +#pragma mark - Thumbnails + +- (CGFloat)videoScrubberThumbnailAspectRatio:(TGVideoMessageScrubber *)__unused videoScrubber +{ + return 1.0f; +} + +- (NSArray *)videoScrubber:(TGVideoMessageScrubber *)videoScrubber evenlySpacedTimestamps:(NSInteger)count startingAt:(NSTimeInterval)startTimestamp endingAt:(NSTimeInterval)endTimestamp +{ + if (endTimestamp < startTimestamp) + return nil; + + if (count == 0) + return nil; + + NSTimeInterval duration = [self videoScrubberDuration:videoScrubber]; + if (endTimestamp > duration) + endTimestamp = duration; + + NSTimeInterval interval = (endTimestamp - startTimestamp) / count; + + NSMutableArray *timestamps = [[NSMutableArray alloc] init]; + for (NSInteger i = 0; i < count; i++) + [timestamps addObject:@(startTimestamp + i * interval)]; + + return timestamps; +} + +- (NSArray *)thumbnailsForTimestamps:(NSArray *)timestamps +{ + NSArray *thumbnailTimestamps = [_thumbnails.allKeys sortedArrayUsingSelector:@selector(compare:)]; + NSMutableArray *thumbnails = [[NSMutableArray alloc] init]; + + __block NSUInteger i = 1; + [timestamps enumerateObjectsUsingBlock:^(NSNumber *timestampVal, __unused NSUInteger index, __unused BOOL *stop) + { + NSTimeInterval timestamp = timestampVal.doubleValue; + NSNumber *closestTimestamp = [self closestTimestampForTimestamp:timestamp timestamps:thumbnailTimestamps start:i finalIndex:&i]; + + [thumbnails addObject:_thumbnails[closestTimestamp]]; + }]; + + return thumbnails; +} + +- (NSNumber *)closestTimestampForTimestamp:(NSTimeInterval)timestamp timestamps:(NSArray *)timestamps start:(NSUInteger)start finalIndex:(NSUInteger *)finalIndex +{ + NSTimeInterval leftTimestamp = [timestamps[start - 1] doubleValue]; + NSTimeInterval rightTimestamp = [timestamps[start] doubleValue]; + + if (fabs(leftTimestamp - timestamp) < fabs(rightTimestamp - timestamp)) + { + *finalIndex = start; + return timestamps[start - 1]; + } + else + { + if (start == timestamps.count - 1) + { + *finalIndex = start; + return timestamps[start]; + } + + return [self closestTimestampForTimestamp:timestamp timestamps:timestamps start:start + 1 finalIndex:finalIndex]; + } +} + +- (void)videoScrubber:(TGVideoMessageScrubber *)__unused videoScrubber requestThumbnailImagesForTimestamps:(NSArray *)timestamps size:(CGSize)__unused size isSummaryThumbnails:(bool)isSummaryThumbnails +{ + if (timestamps.count == 0) + return; + + NSArray *thumbnails = [self thumbnailsForTimestamps:timestamps]; + [thumbnails enumerateObjectsUsingBlock:^(UIImage *image, NSUInteger index, __unused BOOL *stop) + { + if (index < timestamps.count) + [_controlsView.scrubberView setThumbnailImage:image forTimestamp:[timestamps[index] doubleValue] isSummaryThubmnail:isSummaryThumbnails]; + }]; +} + +- (void)videoScrubberDidFinishRequestingThumbnails:(TGVideoMessageScrubber *)__unused videoScrubber +{ + [_controlsView showScrubberView]; +} + +- (void)videoScrubberDidCancelRequestingThumbnails:(TGVideoMessageScrubber *)__unused videoScrubber +{ +} + +- (CGSize)videoScrubberOriginalSize:(TGVideoMessageScrubber *)__unused videoScrubber cropRect:(CGRect *)cropRect cropOrientation:(UIImageOrientation *)cropOrientation cropMirrored:(bool *)cropMirrored +{ + if (cropRect != NULL) + *cropRect = CGRectMake(0.0f, 0.0f, 240.0f, 240.0f); + + if (cropOrientation != NULL) + { + if (UIInterfaceOrientationIsPortrait(self.interfaceOrientation)) + *cropOrientation = UIImageOrientationUp; + else if (self.interfaceOrientation == UIInterfaceOrientationLandscapeLeft) + *cropOrientation = UIImageOrientationRight; + else if (self.interfaceOrientation == UIInterfaceOrientationLandscapeRight) + *cropOrientation = UIImageOrientationLeft; + } + + if (cropMirrored != NULL) + *cropMirrored = false; + + return CGSizeMake(240.0f, 240.0f); +} + +@end + + +@implementation TGVideoMessageCaptureControllerWindow + +- (BOOL)pointInside:(CGPoint)point withEvent:(UIEvent *)event +{ + bool flag = [super pointInside:point withEvent:event]; + if (_locked) + { + if (point.x >= self.frame.size.width - 60.0f && point.y >= self.controlsFrame.origin.y && point.y < CGRectGetMaxY(self.controlsFrame)) + return false; + } + return flag; +} + +@end diff --git a/LegacyComponents/TGVideoMessageControls.h b/LegacyComponents/TGVideoMessageControls.h new file mode 100644 index 0000000000..5db5dfc143 --- /dev/null +++ b/LegacyComponents/TGVideoMessageControls.h @@ -0,0 +1,31 @@ +#import +#import + +@interface TGVideoMessageControls : UIView + +@property (nonatomic, readonly) TGVideoMessageScrubber *scrubberView; + +@property (nonatomic, assign) CGFloat controlsHeight; +@property (nonatomic, copy) void (^positionChanged)(void); +@property (nonatomic, copy) void (^cancel)(void); +@property (nonatomic, copy) void (^deletePressed)(void); +@property (nonatomic, copy) void (^sendPressed)(void); + +@property (nonatomic, copy) bool(^isAlreadyLocked)(void); + +@property (nonatomic, assign) bool positionChangeAvailable; + +@property (nonatomic, weak) id parent; + +- (void)captureStarted; +- (void)recordingStarted; +- (void)setShowRecordingInterface:(bool)show velocity:(CGFloat)velocity; +- (void)buttonInteractionUpdate:(CGPoint)value; +- (void)setLocked; +- (void)setStopped; + +- (void)showScrubberView; + +- (void)setDurationString:(NSString *)string; + +@end diff --git a/LegacyComponents/TGVideoMessageControls.m b/LegacyComponents/TGVideoMessageControls.m new file mode 100644 index 0000000000..9d78941930 --- /dev/null +++ b/LegacyComponents/TGVideoMessageControls.m @@ -0,0 +1,471 @@ +#import "TGVideoMessageControls.h" + +#import + +#import +//#import "TGModernConversationInputMicButton.h" +#import + +#import "LegacyComponentsInternal.h" +#import "TGColor.h" + +static void setViewFrame(UIView *view, CGRect frame) +{ + CGAffineTransform transform = view.transform; + view.transform = CGAffineTransformIdentity; + if (!CGRectEqualToRect(view.frame, frame)) + view.frame = frame; + view.transform = transform; +} + +static CGRect viewFrame(UIView *view) +{ + CGAffineTransform transform = view.transform; + view.transform = CGAffineTransformIdentity; + CGRect result = view.frame; + view.transform = transform; + + return result; +} + +@interface TGVideoMessageControls () // +{ + UIImageView *_slideToCancelArrow; + UILabel *_slideToCancelLabel; + + TGModernButton *_cancelButton; + + TGModernButton *_deleteButton; + TGModernButton *_sendButton; + + UIImageView *_recordIndicatorView; + UILabel *_recordDurationLabel; + + CFAbsoluteTime _recordingInterfaceShowTime; +} +@end + +@implementation TGVideoMessageControls + +- (void)captureStarted +{ + [UIView transitionWithView:_recordDurationLabel duration:0.3 options:UIViewAnimationOptionTransitionCrossDissolve animations:^{ + _recordDurationLabel.textColor = [UIColor whiteColor]; + } completion:nil]; + + [UIView transitionWithView:_slideToCancelLabel duration:0.3 options:UIViewAnimationOptionTransitionCrossDissolve animations:^{ + _slideToCancelLabel.textColor = [UIColor whiteColor]; + } completion:nil]; + + [UIView transitionWithView:_slideToCancelArrow duration:0.3 options:UIViewAnimationOptionTransitionCrossDissolve animations:^{ + _slideToCancelArrow.image = TGTintedImage(_slideToCancelArrow.image, [UIColor whiteColor]); + } completion:nil]; +} + +- (void)setShowRecordingInterface:(bool)show velocity:(CGFloat)velocity +{ + CGFloat hideLeftOffset = 400.0f; + + bool isAlreadyLocked = self.isAlreadyLocked(); + + if (show) + { + _recordingInterfaceShowTime = CFAbsoluteTimeGetCurrent(); + + if (_recordIndicatorView == nil) + { + static UIImage *indicatorImage = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^ + { + indicatorImage = TGCircleImage(9.0f, UIColorRGB(0xF33D2B)); + }); + _recordIndicatorView = [[UIImageView alloc] initWithImage:indicatorImage]; + } + + setViewFrame(_recordIndicatorView, CGRectMake(11.0f, CGFloor((self.frame.size.height - 9.0f) / 2.0f), 9.0f, 9.0f)); + _recordIndicatorView.transform = CGAffineTransformMakeTranslation(-80.0f, 0.0f); + + if (_recordDurationLabel == nil) + { + _recordDurationLabel = [[UILabel alloc] init]; + _recordDurationLabel.backgroundColor = [UIColor clearColor]; + _recordDurationLabel.textColor = [UIColor blackColor]; + _recordDurationLabel.font = TGSystemFontOfSize(15.0f); + _recordDurationLabel.text = @"0:00,00 "; + [_recordDurationLabel sizeToFit]; + _recordDurationLabel.alpha = 0.0f; + _recordDurationLabel.layer.anchorPoint = CGPointMake((26.0f - _recordDurationLabel.frame.size.width) / (2 * 26.0f), 0.5f); + _recordDurationLabel.textAlignment = NSTextAlignmentLeft; + _recordDurationLabel.userInteractionEnabled = false; + } + + setViewFrame(_recordDurationLabel, CGRectMake(26.0f, CGFloor((self.frame.size.height - _recordDurationLabel.frame.size.height) / 2.0f), _recordDurationLabel.frame.size.width, _recordDurationLabel.frame.size.height)); + + _recordDurationLabel.transform = CGAffineTransformMakeTranslation(-80.0f, 0.0f); + + if (_slideToCancelLabel == nil) + { + _slideToCancelLabel = [[UILabel alloc] init]; + _slideToCancelLabel.backgroundColor = [UIColor clearColor]; + _slideToCancelLabel.textColor = UIColorRGB(0x9597a0); + _slideToCancelLabel.font = TGSystemFontOfSize(15.0f); + _slideToCancelLabel.text = TGLocalized(@"Conversation.SlideToCancel"); + _slideToCancelLabel.clipsToBounds = false; + _slideToCancelLabel.userInteractionEnabled = false; + [_slideToCancelLabel sizeToFit]; + setViewFrame(_slideToCancelLabel, CGRectMake(CGFloor((self.frame.size.width - _slideToCancelLabel.frame.size.width) / 2.0f), CGFloor((self.frame.size.height - _slideToCancelLabel.frame.size.height) / 2.0f), _slideToCancelLabel.frame.size.width, _slideToCancelLabel.frame.size.height)); + _slideToCancelLabel.alpha = 0.0f; + + _slideToCancelArrow = [[UIImageView alloc] initWithImage:TGTintedImage([UIImage imageNamed:@"ModernConversationAudioSlideToCancel.png"], UIColorRGB(0x9597a0))]; + CGRect slideToCancelArrowFrame = viewFrame(_slideToCancelArrow); + setViewFrame(_slideToCancelArrow, CGRectMake(CGFloor((self.frame.size.width - _slideToCancelLabel.frame.size.width) / 2.0f) - slideToCancelArrowFrame.size.width - 7.0f, CGFloor((self.frame.size.height - _slideToCancelLabel.frame.size.height) / 2.0f), slideToCancelArrowFrame.size.width, slideToCancelArrowFrame.size.height)); + _slideToCancelArrow.alpha = 0.0f; + [self addSubview:_slideToCancelArrow]; + + _slideToCancelArrow.transform = CGAffineTransformMakeTranslation(hideLeftOffset, 0.0f); + _slideToCancelLabel.transform = CGAffineTransformMakeTranslation(hideLeftOffset, 0.0f); + + _cancelButton = [[TGModernButton alloc] init]; + _cancelButton.titleLabel.font = TGSystemFontOfSize(17.0f); + [_cancelButton setTitle:TGLocalized(@"Common.Cancel") forState:UIControlStateNormal]; + [_cancelButton setTitleColor:TGAccentColor()]; + [_cancelButton addTarget:self action:@selector(cancelPressed) forControlEvents:UIControlEventTouchUpInside]; + [_cancelButton sizeToFit]; + [self addSubview:_cancelButton]; + + setViewFrame(_cancelButton, CGRectMake(CGFloor((self.frame.size.width - _cancelButton.frame.size.width) / 2.0f), CGFloor((self.frame.size.height - _cancelButton.frame.size.height) / 2.0f) - 1.0f, _cancelButton.frame.size.width, _cancelButton.frame.size.height)); + } + + if (!isAlreadyLocked) + { + _cancelButton.alpha = 0.0f; + _cancelButton.userInteractionEnabled = false; + } + + _recordDurationLabel.text = @"0:00,00"; + + if (_recordIndicatorView.superview == nil) + [self addSubview:_recordIndicatorView]; + [_recordIndicatorView.layer removeAllAnimations]; + + if (_recordDurationLabel.superview == nil) + [self addSubview:_recordDurationLabel]; + [_recordDurationLabel.layer removeAllAnimations]; + + _slideToCancelArrow.transform = CGAffineTransformMakeTranslation(300.0f, 0.0f); + _slideToCancelLabel.transform = CGAffineTransformMakeTranslation(300.0f, 0.0f); + + int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0; + + [UIView animateWithDuration:0.25 delay:0.06 options:animationCurveOption animations:^ + { + _recordIndicatorView.transform = CGAffineTransformIdentity; + } completion:nil]; + + [UIView animateWithDuration:0.25 delay:0.0 options:animationCurveOption animations:^ + { + _recordDurationLabel.alpha = 1.0f; + _recordDurationLabel.transform = CGAffineTransformIdentity; + } completion:nil]; + + if (!isAlreadyLocked) + { + if (_slideToCancelLabel.superview == nil) + [self addSubview:_slideToCancelLabel]; + + [UIView animateWithDuration:0.18 delay:0.0 options:animationCurveOption animations:^ + { + _slideToCancelArrow.alpha = 1.0f; + _slideToCancelArrow.transform = CGAffineTransformIdentity; + + _slideToCancelLabel.alpha = 1.0f; + _slideToCancelLabel.transform = CGAffineTransformIdentity; + } completion:nil]; + } + } + else + { + [self removeDotAnimation]; + NSTimeInterval durationFactor = MIN(0.4, MAX(1.0, velocity / 1000.0)); + + int options = 0; + + if (ABS(CFAbsoluteTimeGetCurrent() - _recordingInterfaceShowTime) < 0.2) + { + options = UIViewAnimationOptionBeginFromCurrentState; + } + + int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0; + [UIView animateWithDuration:0.25 * durationFactor delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^ + { + _recordIndicatorView.transform = CGAffineTransformMakeTranslation(-90.0f, 0.0f); + } completion:^(BOOL finished) + { + if (finished) + [_recordIndicatorView removeFromSuperview]; + }]; + + [UIView animateWithDuration:0.25 * durationFactor delay:0.05 * durationFactor options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^ + { + _recordDurationLabel.alpha = 0.0f; + _recordDurationLabel.transform = CGAffineTransformMakeTranslation(-90.0f, 0.0f); + } completion:^(BOOL finished) + { + if (finished) + [_recordDurationLabel removeFromSuperview]; + }]; + + [UIView animateWithDuration:0.2 * durationFactor delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^ + { + _slideToCancelArrow.alpha = 0.0f; + _slideToCancelArrow.transform = CGAffineTransformMakeTranslation(-300, 0.0f); + _slideToCancelLabel.alpha = 0.0f; + _slideToCancelLabel.transform = CGAffineTransformMakeTranslation(-200, 0.0f); + + CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f); + transform = CGAffineTransformScale(transform, 0.25f, 0.25f); + _cancelButton.transform = transform; + _cancelButton.alpha = 0.0f; + + _sendButton.transform = CGAffineTransformMakeScale(0.01, 0.01); + _sendButton.alpha = 0.0f; + + transform = CGAffineTransformMakeTranslation(0.0f, -44.0f); + transform = CGAffineTransformScale(transform, 0.25f, 0.25f); + + _deleteButton.transform = transform; + _deleteButton.alpha = 0.0f; + + _scrubberView.transform = transform; + _scrubberView.alpha = 0.0f; + } completion:nil]; + } +} + +- (void)buttonInteractionUpdate:(CGPoint)value +{ + CGFloat valueX = value.x; + CGFloat offset = valueX * 300.0f; + + offset = MAX(0.0f, offset - 5.0f); + + _slideToCancelArrow.transform = CGAffineTransformMakeTranslation(-offset, 0.0f); + + CGAffineTransform labelTransform = CGAffineTransformIdentity; + labelTransform = CGAffineTransformTranslate(labelTransform, -offset, 0.0f); + _slideToCancelLabel.transform = labelTransform; + + CGAffineTransform indicatorTransform = CGAffineTransformIdentity; + CGAffineTransform durationTransform = CGAffineTransformIdentity; + + static CGFloat freeOffsetLimit = 35.0f; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^ + { + CGFloat labelWidth = [TGLocalized(@"Conversation.SlideToCancel") sizeWithFont:TGSystemFontOfSize(14.0f)].width; + CGFloat arrowOrigin = CGFloor((TGScreenSize().width - labelWidth) / 2.0f) - 9.0f - 6.0f; + CGFloat timerWidth = 90.0f; + + freeOffsetLimit = MAX(0.0f, arrowOrigin - timerWidth); + }); + + if (offset > freeOffsetLimit) + { + indicatorTransform = CGAffineTransformMakeTranslation(freeOffsetLimit - offset, 0.0f); + durationTransform = CGAffineTransformMakeTranslation(freeOffsetLimit - offset, 0.0f); + } + + if (!CGAffineTransformEqualToTransform(indicatorTransform, _recordIndicatorView.transform)) + _recordIndicatorView.transform = indicatorTransform; + + if (!CGAffineTransformEqualToTransform(durationTransform, _recordDurationLabel.transform)) + _recordDurationLabel.transform = durationTransform; +} + +- (void)setLocked +{ + CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, 22.0f); + transform = CGAffineTransformScale(transform, 0.25f, 0.25f); + _cancelButton.alpha = 0.0f; + _cancelButton.transform = transform; + _cancelButton.userInteractionEnabled = true; + + [UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^ + { + _cancelButton.transform = CGAffineTransformIdentity; + + CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f); + transform = CGAffineTransformScale(transform, 0.25f, 0.25f); + _slideToCancelLabel.transform = transform; + } completion:^(__unused BOOL finished) + { + _slideToCancelLabel.transform = CGAffineTransformIdentity; + }]; + + [UIView animateWithDuration:0.25 animations:^ + { + _slideToCancelArrow.alpha = 0.0f; + _slideToCancelLabel.alpha = 0.0f; + _cancelButton.alpha = 1.0f; + }]; +} + +- (void)setStopped +{ + UIImage *deleteImage = [UIImage imageNamed:@"ModernConversationActionDelete.png"]; + + _deleteButton = [[TGModernButton alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 45.0f, 45.0f)]; + [_deleteButton setImage:deleteImage forState:UIControlStateNormal]; + _deleteButton.adjustsImageWhenDisabled = false; + _deleteButton.adjustsImageWhenHighlighted = false; + [_deleteButton addTarget:self action:@selector(deleteButtonPressed) forControlEvents:UIControlEventTouchUpInside]; + [self addSubview:_deleteButton]; + + CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, 45.0f); + transform = CGAffineTransformScale(transform, 0.88f, 0.88f); + _deleteButton.transform = transform; + + TGModernButton *sendButton = [[TGModernButton alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 45.0f, 45.0f)]; + sendButton.modernHighlight = true; + _sendButton = sendButton; + _sendButton.alpha = 0.0f; + _sendButton.exclusiveTouch = true; + [_sendButton setImage:[UIImage imageNamed:@"ModernConversationSend"] forState:UIControlStateNormal]; + _sendButton.adjustsImageWhenHighlighted = false; + [_sendButton addTarget:self action:@selector(sendButtonPressed) forControlEvents:UIControlEventTouchUpInside]; + [self addSubview:_sendButton]; + + _scrubberView = [[TGVideoMessageScrubber alloc] init]; + _scrubberView.dataSource = self.parent; + _scrubberView.delegate = self.parent; + [self addSubview:_scrubberView]; + + [self layoutSubviews]; + + transform = CGAffineTransformMakeTranslation(0.0f, 44.0f); + _scrubberView.transform = transform; + + int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0; + [UIView animateWithDuration:0.25 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^ + { + _recordIndicatorView.transform = CGAffineTransformMakeTranslation(-90.0f, 0.0f); + _recordIndicatorView.alpha = 0.0f; + } completion:^(BOOL finished) + { + if (finished) + { + [self removeDotAnimation]; + [_recordIndicatorView removeFromSuperview]; + } + }]; + + [UIView animateWithDuration:0.25 delay:0.05 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^ + { + _recordDurationLabel.alpha = 0.0f; + _recordDurationLabel.transform = CGAffineTransformMakeTranslation(-90.0f, 0.0f); + } completion:^(BOOL finished) + { + if (finished) + [_recordDurationLabel removeFromSuperview]; + }]; + + [UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^ + { + CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f); + transform = CGAffineTransformScale(transform, 0.25f, 0.25f); + _cancelButton.transform = transform; + _cancelButton.alpha = 0.0f; + } completion:nil]; + + [UIView animateWithDuration:0.2 delay:0.07 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^ + { + _deleteButton.transform = CGAffineTransformMakeScale(0.88f, 0.88f); + } completion:nil]; + + [UIView animateWithDuration:0.3 animations:^ + { + _sendButton.alpha = 1.0f; + }]; +} + +- (void)showScrubberView +{ + int animationCurveOption = iosMajorVersion() >= 7 ? (7 << 16) : 0; + [UIView animateWithDuration:0.2 delay:0.0 options:UIViewAnimationOptionBeginFromCurrentState | animationCurveOption animations:^ + { + CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, -22.0f); + transform = CGAffineTransformScale(transform, 0.25f, 0.25f); + + _scrubberView.transform = CGAffineTransformIdentity; + } completion:nil]; +} + +- (void)deleteButtonPressed +{ + _deleteButton.userInteractionEnabled = false; + + if (self.deletePressed != nil) + self.deletePressed(); +} + +- (void)sendButtonPressed +{ + _sendButton.userInteractionEnabled = false; + + if (self.sendPressed != nil) + self.sendPressed(); +} + +- (void)cancelPressed +{ + dispatch_async(dispatch_get_main_queue(), ^ + { + if (self.cancel != nil) + self.cancel(); + }); +} + +- (void)setDurationString:(NSString *)string +{ + _recordDurationLabel.text = string; +} + +- (void)recordingStarted +{ + [self addRecordingDotAnimation]; +} + +- (void)addRecordingDotAnimation { + CAKeyframeAnimation *animation = [CAKeyframeAnimation animationWithKeyPath:@"opacity"]; + animation.values = @[@1.0f, @1.0f, @0.0f]; + animation.keyTimes = @[@.0, @0.4546, @0.9091, @1]; + animation.duration = 0.5; + animation.duration = 0.5; + animation.autoreverses = true; + animation.repeatCount = INFINITY; + + [_recordIndicatorView.layer addAnimation:animation forKey:@"opacity-dot"]; +} + +- (void)removeDotAnimation { + [_recordIndicatorView.layer removeAnimationForKey:@"opacity-dot"]; +} + +- (void)layoutSubviews +{ + if (_slideToCancelLabel != nil) + { + CGRect slideToCancelLabelFrame = viewFrame(_slideToCancelLabel); + setViewFrame(_slideToCancelLabel, CGRectMake(CGFloor((self.frame.size.width - slideToCancelLabelFrame.size.width) / 2.0f), CGFloor((self.frame.size.height - slideToCancelLabelFrame.size.height) / 2.0f), slideToCancelLabelFrame.size.width, slideToCancelLabelFrame.size.height)); + + CGRect slideToCancelArrowFrame = viewFrame(_slideToCancelArrow); + setViewFrame(_slideToCancelArrow, CGRectMake(CGFloor((self.frame.size.width - slideToCancelLabelFrame.size.width) / 2.0f) - slideToCancelArrowFrame.size.width - 7.0f, CGFloor((self.frame.size.height - slideToCancelLabelFrame.size.height) / 2.0f), slideToCancelArrowFrame.size.width, slideToCancelArrowFrame.size.height)); + } + + setViewFrame(_sendButton, CGRectMake(self.frame.size.width - _sendButton.frame.size.width, 0.0f, _sendButton.frame.size.width, self.frame.size.height)); + _deleteButton.center = CGPointMake(24.0f, 22.0f); + setViewFrame(_scrubberView, CGRectMake(46.0f, (self.frame.size.height - 33.0f) / 2.0f, self.frame.size.width - 46.0f * 2.0f, 33.0f)); +} + +@end diff --git a/LegacyComponents/TGVideoMessageRingView.h b/LegacyComponents/TGVideoMessageRingView.h new file mode 100644 index 0000000000..58df786776 --- /dev/null +++ b/LegacyComponents/TGVideoMessageRingView.h @@ -0,0 +1,7 @@ +#import + +@interface TGVideoMessageRingView : UIView + +- (void)setValue:(CGFloat)value; + +@end diff --git a/LegacyComponents/TGVideoMessageRingView.m b/LegacyComponents/TGVideoMessageRingView.m new file mode 100644 index 0000000000..f8b3723cbc --- /dev/null +++ b/LegacyComponents/TGVideoMessageRingView.m @@ -0,0 +1,52 @@ +#import "TGVideoMessageRingView.h" + +#import "TGColor.h" + +@interface TGVideoMessageRingView () +{ + CGFloat _value; +} +@end + +@implementation TGVideoMessageRingView + +- (instancetype)initWithFrame:(CGRect)frame +{ + self = [super initWithFrame:frame]; + if (self != nil) + { + self.backgroundColor = [UIColor clearColor]; + } + return self; +} + +- (void)setValue:(CGFloat)value +{ + _value = value; + [self setNeedsDisplay]; +} + +- (void)drawRect:(CGRect)rect +{ + if (_value < DBL_EPSILON) + return; + + CGContextRef context = UIGraphicsGetCurrentContext(); + CGContextSetFillColorWithColor(context, TGAccentColor().CGColor); + + CGMutablePathRef path = CGPathCreateMutable(); + CGPoint centerPoint = CGPointMake(rect.size.width / 2.0f, rect.size.height / 2.0f); + CGFloat lineWidth = 4.0f; + + CGPathAddArc(path, NULL, centerPoint.x, centerPoint.y, rect.size.width / 2.0f - lineWidth / 2.0f, -M_PI_2, -M_PI_2 + 2 * M_PI * _value, false); + + CGPathRef strokedArc = CGPathCreateCopyByStrokingPath(path, NULL, lineWidth, kCGLineCapRound, kCGLineJoinMiter, 10); + CGPathRelease(path); + + CGContextAddPath(context, strokedArc); + CGPathRelease(strokedArc); + + CGContextFillPath(context); +} + +@end diff --git a/LegacyComponents/TGVideoMessageScrubber.h b/LegacyComponents/TGVideoMessageScrubber.h new file mode 100644 index 0000000000..23f0759533 --- /dev/null +++ b/LegacyComponents/TGVideoMessageScrubber.h @@ -0,0 +1,68 @@ +#import + +@protocol TGVideoMessageScrubberDelegate; +@protocol TGVideoMessageScrubberDataSource; + +@interface TGVideoMessageScrubber : UIView + +@property (nonatomic, weak) id delegate; +@property (nonatomic, weak) id dataSource; + +@property (nonatomic, readonly) NSTimeInterval duration; + +@property (nonatomic, assign) bool allowsTrimming; +@property (nonatomic, readonly) bool hasTrimming; +@property (nonatomic, assign) NSTimeInterval trimStartValue; +@property (nonatomic, assign) NSTimeInterval trimEndValue; + +@property (nonatomic, assign) NSTimeInterval maximumLength; + + +@property (nonatomic, assign) bool isPlaying; +@property (nonatomic, assign) NSTimeInterval value; +- (void)setValue:(NSTimeInterval)value resetPosition:(bool)resetPosition; + +- (void)setTrimApplied:(bool)trimApplied; + +- (void)resetToStart; + +- (void)reloadData; +- (void)reloadDataAndReset:(bool)reset; + +- (void)reloadThumbnails; +- (void)ignoreThumbnails; +- (void)resetThumbnails; + +- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)timestamp isSummaryThubmnail:(bool)isSummaryThumbnail; + +@end + +@protocol TGVideoMessageScrubberDelegate + +- (void)videoScrubberDidBeginScrubbing:(TGVideoMessageScrubber *)videoScrubber; +- (void)videoScrubberDidEndScrubbing:(TGVideoMessageScrubber *)videoScrubber; +- (void)videoScrubber:(TGVideoMessageScrubber *)videoScrubber valueDidChange:(NSTimeInterval)position; + +- (void)videoScrubberDidBeginEditing:(TGVideoMessageScrubber *)videoScrubber; +- (void)videoScrubberDidEndEditing:(TGVideoMessageScrubber *)videoScrubber endValueChanged:(bool)endValueChanged; +- (void)videoScrubber:(TGVideoMessageScrubber *)videoScrubber editingStartValueDidChange:(NSTimeInterval)startValue; +- (void)videoScrubber:(TGVideoMessageScrubber *)videoScrubber editingEndValueDidChange:(NSTimeInterval)endValue; + +- (void)videoScrubberDidFinishRequestingThumbnails:(TGVideoMessageScrubber *)videoScrubber; +- (void)videoScrubberDidCancelRequestingThumbnails:(TGVideoMessageScrubber *)videoScrubber; + +@end + +@protocol TGVideoMessageScrubberDataSource + +- (NSTimeInterval)videoScrubberDuration:(TGVideoMessageScrubber *)videoScrubber; + +- (NSArray *)videoScrubber:(TGVideoMessageScrubber *)videoScrubber evenlySpacedTimestamps:(NSInteger)count startingAt:(NSTimeInterval)startTimestamp endingAt:(NSTimeInterval)endTimestamp; + +- (void)videoScrubber:(TGVideoMessageScrubber *)videoScrubber requestThumbnailImagesForTimestamps:(NSArray *)timestamps size:(CGSize)size isSummaryThumbnails:(bool)isSummaryThumbnails; + +- (CGFloat)videoScrubberThumbnailAspectRatio:(TGVideoMessageScrubber *)videoScrubber; + +- (CGSize)videoScrubberOriginalSize:(TGVideoMessageScrubber *)videoScrubber cropRect:(CGRect *)cropRect cropOrientation:(UIImageOrientation *)cropOrientation cropMirrored:(bool *)cropMirrored; + +@end diff --git a/LegacyComponents/TGVideoMessageScrubber.m b/LegacyComponents/TGVideoMessageScrubber.m new file mode 100644 index 0000000000..a1bda31be2 --- /dev/null +++ b/LegacyComponents/TGVideoMessageScrubber.m @@ -0,0 +1,883 @@ +#import "TGVideoMessageScrubber.h" + +#import "LegacyComponentsInternal.h" +#import "TGImageUtils.h" +#import "POPBasicAnimation.h" + +#import + +#import + +#import "TGVideoMessageScrubberThumbnailView.h" +#import "TGVideoMessageTrimView.h" + +const CGFloat TGVideoScrubberMinimumTrimDuration = 1.0f; +const CGFloat TGVideoScrubberZoomActivationInterval = 0.25f; +const CGFloat TGVideoScrubberTrimRectEpsilon = 3.0f; + +typedef enum +{ + TGMediaPickerGalleryVideoScrubberPivotSourceHandle, + TGMediaPickerGalleryVideoScrubberPivotSourceTrimStart, + TGMediaPickerGalleryVideoScrubberPivotSourceTrimEnd +} TGMediaPickerGalleryVideoScrubberPivotSource; + +@interface TGVideoMessageScrubber () +{ + UIControl *_wrapperView; + UIView *_summaryThumbnailSnapshotView; + UIView *_zoomedThumbnailWrapperView; + UIView *_summaryThumbnailWrapperView; + TGVideoMessageTrimView *_trimView; + UIView *_leftCurtainView; + UIView *_rightCurtainView; + UIControl *_scrubberHandle; + + UIPanGestureRecognizer *_panGestureRecognizer; + UILongPressGestureRecognizer *_pressGestureRecognizer; + + bool _beganInteraction; + bool _endedInteraction; + + bool _scrubbing; + CGFloat _scrubbingPosition; + + NSTimeInterval _duration; + NSTimeInterval _trimStartValue; + NSTimeInterval _trimEndValue; + + bool _ignoreThumbnailLoad; + bool _fadingThumbnailViews; + CGFloat _thumbnailAspectRatio; + NSArray *_summaryTimestamps; + NSMutableArray *_summaryThumbnailViews; + + CGSize _originalSize; + CGRect _cropRect; + UIImageOrientation _cropOrientation; + bool _cropMirrored; + + UIImageView *_leftMaskView; + UIImageView *_rightMaskView; +} +@end + +@implementation TGVideoMessageScrubber + +- (instancetype)initWithFrame:(CGRect)frame +{ + self = [super initWithFrame:frame]; + if (self != nil) + { + _allowsTrimming = true; + + _wrapperView = [[UIControl alloc] initWithFrame:CGRectMake(0, 0, 0, 33)]; + _wrapperView.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -10, -5, -10); + [self addSubview:_wrapperView]; + + _zoomedThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 33)]; + [_wrapperView addSubview:_zoomedThumbnailWrapperView]; + + _summaryThumbnailWrapperView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 0, 33)]; + _summaryThumbnailWrapperView.clipsToBounds = true; + [_wrapperView addSubview:_summaryThumbnailWrapperView]; + + _leftMaskView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"VideoMessageScrubberLeftMask"]]; + [_wrapperView addSubview:_leftMaskView]; + + _rightMaskView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"VideoMessageScrubberRightMask"]]; + [_wrapperView addSubview:_rightMaskView]; + + _leftCurtainView = [[UIView alloc] init]; + _leftCurtainView.backgroundColor = [UIColorRGB(0xf7f7f7) colorWithAlphaComponent:0.8f]; + [_wrapperView addSubview:_leftCurtainView]; + + _rightCurtainView = [[UIView alloc] init]; + _rightCurtainView.backgroundColor = [UIColorRGB(0xf7f7f7) colorWithAlphaComponent:0.8f]; + [_wrapperView addSubview:_rightCurtainView]; + + __weak TGVideoMessageScrubber *weakSelf = self; + _trimView = [[TGVideoMessageTrimView alloc] initWithFrame:CGRectZero]; + _trimView.exclusiveTouch = true; + _trimView.trimmingEnabled = _allowsTrimming; + _trimView.didBeginEditing = ^(__unused bool start) + { + __strong TGVideoMessageScrubber *strongSelf = weakSelf; + if (strongSelf == nil) + return; + + id delegate = strongSelf.delegate; + if ([delegate respondsToSelector:@selector(videoScrubberDidBeginEditing:)]) + [delegate videoScrubberDidBeginEditing:strongSelf]; + + [strongSelf->_trimView setTrimming:true animated:true]; + + [strongSelf setScrubberHandleHidden:true animated:false]; + }; + _trimView.didEndEditing = ^(bool start) + { + __strong TGVideoMessageScrubber *strongSelf = weakSelf; + if (strongSelf == nil) + return; + + id delegate = strongSelf.delegate; + if ([delegate respondsToSelector:@selector(videoScrubberDidEndEditing:endValueChanged:)]) + [delegate videoScrubberDidEndEditing:strongSelf endValueChanged:!start]; + + CGRect newTrimRect = strongSelf->_trimView.frame; + CGRect trimRect = [strongSelf _scrubbingRect]; + CGRect normalScrubbingRect = [strongSelf _scrubbingRect]; + CGFloat maxWidth = trimRect.size.width + normalScrubbingRect.origin.x * 2; + + CGFloat leftmostPosition = trimRect.origin.x - normalScrubbingRect.origin.x; + if (newTrimRect.origin.x < leftmostPosition + TGVideoScrubberTrimRectEpsilon) + { + CGFloat delta = leftmostPosition - newTrimRect.origin.x; + + newTrimRect.origin.x += delta; + newTrimRect.size.width = MIN(maxWidth, newTrimRect.size.width - delta); + } + + CGFloat rightmostPosition = maxWidth; + if (CGRectGetMaxX(newTrimRect) > maxWidth - TGVideoScrubberTrimRectEpsilon) + { + CGFloat delta = rightmostPosition - CGRectGetMaxX(newTrimRect); + + newTrimRect.size.width = MIN(maxWidth, newTrimRect.size.width + delta); + } + + strongSelf->_trimView.frame = newTrimRect; + + NSTimeInterval trimStartPosition = 0.0; + NSTimeInterval trimEndPosition = 0.0; + + [strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:newTrimRect duration:strongSelf.duration]; + + strongSelf->_trimStartValue = trimStartPosition; + strongSelf->_trimEndValue = trimEndPosition; + + bool isTrimmed = (strongSelf->_trimStartValue > FLT_EPSILON || fabs(strongSelf->_trimEndValue - strongSelf->_duration) > FLT_EPSILON); + + [strongSelf->_trimView setTrimming:isTrimmed animated:true]; + + [strongSelf setScrubberHandleHidden:false animated:true]; + }; + _trimView.startHandleMoved = ^(CGPoint translation) + { + __strong TGVideoMessageScrubber *strongSelf = weakSelf; + if (strongSelf == nil) + return; + + UIView *trimView = strongSelf->_trimView; + + CGRect availableTrimRect = [strongSelf _scrubbingRect]; + CGRect normalScrubbingRect = [strongSelf _scrubbingRect]; + CGFloat originX = MAX(0, trimView.frame.origin.x + translation.x); + CGFloat delta = originX - trimView.frame.origin.x; + CGFloat maxWidth = availableTrimRect.size.width + normalScrubbingRect.origin.x * 2 - originX; + + CGRect trimViewRect = CGRectMake(originX, trimView.frame.origin.y, MIN(maxWidth, trimView.frame.size.width - delta), trimView.frame.size.height); + + NSTimeInterval trimStartPosition = 0.0; + NSTimeInterval trimEndPosition = 0.0; + [strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration]; + + NSTimeInterval duration = trimEndPosition - trimStartPosition; + + if (trimEndPosition - trimStartPosition < TGVideoScrubberMinimumTrimDuration) + return; + + if (strongSelf.maximumLength > DBL_EPSILON && duration > strongSelf.maximumLength) + { + trimViewRect = CGRectMake(trimView.frame.origin.x + delta, + trimView.frame.origin.y, + trimView.frame.size.width, + trimView.frame.size.height); + + [strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration]; + } + + trimView.frame = trimViewRect; + + [strongSelf _layoutTrimCurtainViews]; + + strongSelf->_trimStartValue = trimStartPosition; + strongSelf->_trimEndValue = trimEndPosition; + + [strongSelf setValue:strongSelf->_trimStartValue]; + + UIView *handle = strongSelf->_scrubberHandle; + handle.center = CGPointMake(trimView.frame.origin.x + 12 + handle.frame.size.width / 2, handle.center.y); + + id delegate = strongSelf.delegate; + if ([delegate respondsToSelector:@selector(videoScrubber:editingStartValueDidChange:)]) + [delegate videoScrubber:strongSelf editingStartValueDidChange:trimStartPosition]; + }; + _trimView.endHandleMoved = ^(CGPoint translation) + { + __strong TGVideoMessageScrubber *strongSelf = weakSelf; + if (strongSelf == nil) + return; + + UIView *trimView = strongSelf->_trimView; + + CGRect availableTrimRect = [strongSelf _scrubbingRect]; + CGRect normalScrubbingRect = [strongSelf _scrubbingRect]; + CGFloat localOriginX = trimView.frame.origin.x - availableTrimRect.origin.x + normalScrubbingRect.origin.x; + CGFloat maxWidth = availableTrimRect.size.width + normalScrubbingRect.origin.x * 2 - localOriginX; + + CGRect trimViewRect = CGRectMake(trimView.frame.origin.x, trimView.frame.origin.y, MIN(maxWidth, trimView.frame.size.width + translation.x), trimView.frame.size.height); + + NSTimeInterval trimStartPosition = 0.0; + NSTimeInterval trimEndPosition = 0.0; + [strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration]; + + NSTimeInterval duration = trimEndPosition - trimStartPosition; + + if (trimEndPosition - trimStartPosition < TGVideoScrubberMinimumTrimDuration) + return; + + if (strongSelf.maximumLength > DBL_EPSILON && duration > strongSelf.maximumLength) + { + trimViewRect = CGRectMake(trimView.frame.origin.x + translation.x, trimView.frame.origin.y, trimView.frame.size.width, trimView.frame.size.height); + [strongSelf _trimStartPosition:&trimStartPosition trimEndPosition:&trimEndPosition forTrimFrame:trimViewRect duration:strongSelf.duration]; + } + + trimView.frame = trimViewRect; + + [strongSelf _layoutTrimCurtainViews]; + + strongSelf->_trimStartValue = trimStartPosition; + strongSelf->_trimEndValue = trimEndPosition; + + [strongSelf setValue:strongSelf->_trimEndValue]; + + UIView *handle = strongSelf->_scrubberHandle; + handle.center = CGPointMake(CGRectGetMaxX(trimView.frame) - 12 - handle.frame.size.width / 2, handle.center.y); + + id delegate = strongSelf.delegate; + if ([delegate respondsToSelector:@selector(videoScrubber:editingEndValueDidChange:)]) + [delegate videoScrubber:strongSelf editingEndValueDidChange:trimEndPosition]; + }; + [_wrapperView addSubview:_trimView]; + + _scrubberHandle = [[UIControl alloc] initWithFrame:CGRectMake(0, -1, 8, 33.0f)]; + _scrubberHandle.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -10, -5, -10); + //[_wrapperView addSubview:_scrubberHandle]; + + static UIImage *handleViewImage = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^ + { + UIGraphicsBeginImageContextWithOptions(CGSizeMake(_scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height), false, 0.0f); + CGContextRef context = UIGraphicsGetCurrentContext(); + CGContextSetShadowWithColor(context, CGSizeMake(0, 0.0f), 0.5f, [UIColor colorWithWhite:0.0f alpha:0.65f].CGColor); + CGContextSetFillColorWithColor(context, [UIColor whiteColor].CGColor); + + UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:CGRectMake(1.0f, 1.5f, _scrubberHandle.frame.size.width - 2.0f, _scrubberHandle.frame.size.height - 2.0f) cornerRadius:2]; + [path fill]; + + handleViewImage = UIGraphicsGetImageFromCurrentImageContext(); + UIGraphicsEndImageContext(); + }); + + UIImageView *scrubberImageView = [[UIImageView alloc] initWithFrame:_scrubberHandle.bounds]; + scrubberImageView.image = handleViewImage; + [_scrubberHandle addSubview:scrubberImageView]; + + _pressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handlePress:)]; + _pressGestureRecognizer.delegate = self; + _pressGestureRecognizer.minimumPressDuration = 0.1f; + //[_scrubberHandle addGestureRecognizer:_pressGestureRecognizer]; + + _panGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)]; + _panGestureRecognizer.delegate = self; + //[_scrubberHandle addGestureRecognizer:_panGestureRecognizer]; + } + return self; +} + +- (void)reloadThumbnails +{ + [self resetThumbnails]; + + id dataSource = self.dataSource; + + _summaryThumbnailViews = [[NSMutableArray alloc] init]; + + if ([dataSource respondsToSelector:@selector(videoScrubberOriginalSize:cropRect:cropOrientation:cropMirrored:)]) + _originalSize = [dataSource videoScrubberOriginalSize:self cropRect:&_cropRect cropOrientation:&_cropOrientation cropMirrored:&_cropMirrored]; + + CGFloat originalAspectRatio = 1.0f; + CGFloat frameAspectRatio = 1.0f; + if ([dataSource respondsToSelector:@selector(videoScrubberThumbnailAspectRatio:)]) + originalAspectRatio = [dataSource videoScrubberThumbnailAspectRatio:self]; + + if (!CGRectEqualToRect(_cropRect, CGRectZero)) + frameAspectRatio = _cropRect.size.width / _cropRect.size.height; + else + frameAspectRatio = originalAspectRatio; + + _thumbnailAspectRatio = frameAspectRatio; + + NSInteger thumbnailCount = (NSInteger)CGCeil(_summaryThumbnailWrapperView.frame.size.width / [self _thumbnailSizeWithAspectRatio:frameAspectRatio orientation:_cropOrientation].width); + + if ([dataSource respondsToSelector:@selector(videoScrubber:evenlySpacedTimestamps:startingAt:endingAt:)]) + _summaryTimestamps = [dataSource videoScrubber:self evenlySpacedTimestamps:thumbnailCount startingAt:0 endingAt:_duration]; + + CGSize thumbnailImageSize = [self _thumbnailSizeWithAspectRatio:originalAspectRatio orientation:UIImageOrientationUp]; + CGFloat scale = MIN(2.0f, TGScreenScaling()); + thumbnailImageSize = CGSizeMake(thumbnailImageSize.width * scale, thumbnailImageSize.height * scale); + + if ([dataSource respondsToSelector:@selector(videoScrubber:requestThumbnailImagesForTimestamps:size:isSummaryThumbnails:)]) + [dataSource videoScrubber:self requestThumbnailImagesForTimestamps:_summaryTimestamps size:thumbnailImageSize isSummaryThumbnails:true]; +} + +- (void)ignoreThumbnails +{ + _ignoreThumbnailLoad = true; +} + +- (void)resetThumbnails +{ + _ignoreThumbnailLoad = false; + + if (_summaryThumbnailViews.count < _summaryTimestamps.count) + { + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(videoScrubberDidCancelRequestingThumbnails:)]) + [delegate videoScrubberDidCancelRequestingThumbnails:self]; + } + + for (UIView *view in _summaryThumbnailWrapperView.subviews) + [view removeFromSuperview]; + + for (UIView *view in _zoomedThumbnailWrapperView.subviews) + [view removeFromSuperview]; + + _summaryThumbnailViews = nil; + _summaryTimestamps = nil; +} + +- (void)reloadData +{ + [self reloadDataAndReset:true]; +} + +- (void)reloadDataAndReset:(bool)reset +{ + id dataSource = self.dataSource; + if ([dataSource respondsToSelector:@selector(videoScrubberDuration:)]) + _duration = [dataSource videoScrubberDuration:self]; + else + return; + + if (!reset && _summaryThumbnailViews.count > 0 && _summaryThumbnailSnapshotView == nil) + { + _summaryThumbnailSnapshotView = [_summaryThumbnailWrapperView snapshotViewAfterScreenUpdates:false]; + _summaryThumbnailSnapshotView.frame = _summaryThumbnailWrapperView.frame; + [_summaryThumbnailWrapperView.superview insertSubview:_summaryThumbnailSnapshotView aboveSubview:_summaryThumbnailWrapperView]; + } + else if (reset) + { + [_summaryThumbnailSnapshotView removeFromSuperview]; + _summaryThumbnailSnapshotView = nil; + } + + [self _layoutTrimView]; + + [self reloadThumbnails]; +} + +- (void)setThumbnailImage:(UIImage *)image forTimestamp:(NSTimeInterval)__unused timestamp isSummaryThubmnail:(bool)isSummaryThumbnail +{ + TGVideoMessageScrubberThumbnailView *thumbnailView = [[TGVideoMessageScrubberThumbnailView alloc] initWithImage:image originalSize:_originalSize cropRect:_cropRect cropOrientation:_cropOrientation cropMirrored:_cropMirrored]; + + if (isSummaryThumbnail) + { + [_summaryThumbnailWrapperView addSubview:thumbnailView]; + [_summaryThumbnailViews addObject:thumbnailView]; + } + + if ((isSummaryThumbnail && _summaryThumbnailViews.count == _summaryTimestamps.count)) + { + if (!_ignoreThumbnailLoad) + { + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(videoScrubberDidFinishRequestingThumbnails:)]) + [delegate videoScrubberDidFinishRequestingThumbnails:self]; + } + _ignoreThumbnailLoad = false; + + if (isSummaryThumbnail) + { + [self _layoutSummaryThumbnailViews]; + + UIView *snapshotView = _summaryThumbnailSnapshotView; + _summaryThumbnailSnapshotView = nil; + + if (snapshotView != nil) + { + _fadingThumbnailViews = true; + [UIView animateWithDuration:0.3f animations:^ + { + snapshotView.alpha = 0.0f; + } completion:^(__unused BOOL finished) + { + _fadingThumbnailViews = false; + [snapshotView removeFromSuperview]; + }]; + } + } + } +} + +- (CGSize)_thumbnailSize +{ + return [self _thumbnailSizeWithAspectRatio:_thumbnailAspectRatio orientation:_cropOrientation]; +} + +- (CGSize)_thumbnailSizeWithAspectRatio:(CGFloat)__unused aspectRatio orientation:(UIImageOrientation)__unused orientation +{ + return CGSizeMake(33, 33); +} + +- (void)_layoutSummaryThumbnailViews +{ + if (_summaryThumbnailViews.count == 0) + return; + + CGSize thumbnailViewSize = [self _thumbnailSize]; + CGFloat totalWidth = thumbnailViewSize.width * _summaryThumbnailViews.count; + CGFloat originX = (_summaryThumbnailWrapperView.frame.size.width - totalWidth) / 2; + + [_summaryThumbnailViews enumerateObjectsUsingBlock:^(UIView *view, NSUInteger index, __unused BOOL *stop) + { + view.frame = CGRectMake(originX + thumbnailViewSize.width * index, 0, thumbnailViewSize.width, thumbnailViewSize.height); + }]; +} + +- (void)setIsPlaying:(bool)isPlaying +{ + _isPlaying = isPlaying; + + if (_isPlaying) + [self _updateScrubberAnimationsAndResetCurrentPosition:false]; + else + [self removeHandleAnimation]; +} + +- (void)setValue:(NSTimeInterval)value +{ + [self setValue:value resetPosition:false]; +} + +- (void)setValue:(NSTimeInterval)value resetPosition:(bool)resetPosition +{ + if (_duration < FLT_EPSILON) + return; + + if (value > _duration) + value = _duration; + + _value = value; + + if (resetPosition) + [self _updateScrubberAnimationsAndResetCurrentPosition:true]; +} + +- (void)_updateScrubberAnimationsAndResetCurrentPosition:(bool)resetCurrentPosition +{ + if (_duration < FLT_EPSILON) + return; + + CGPoint point = [self _scrubberPositionForPosition:_value duration:_duration]; + CGRect frame = CGRectMake(CGFloor(point.x) - _scrubberHandle.frame.size.width / 2, _scrubberHandle.frame.origin.y, _scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height); + + if (_trimStartValue > DBL_EPSILON && fabs(_value - _trimStartValue) < 0.01) + { + frame = CGRectMake(_trimView.frame.origin.x + [self _scrubbingRect].origin.x, _scrubberHandle.frame.origin.y, _scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height); + } + else if (fabs(_value - _trimEndValue) < 0.01) + { + frame = CGRectMake(_trimView.frame.origin.x + _trimView.frame.size.width - [self _scrubbingRect].origin.x - _scrubberHandle.frame.size.width, _scrubberHandle.frame.origin.y, _scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height); + } + + if (_isPlaying) + { + if (resetCurrentPosition) + _scrubberHandle.frame = frame; + + CGRect scrubbingRect = [self _scrubbingRect]; + CGFloat maxPosition = scrubbingRect.origin.x + scrubbingRect.size.width - _scrubberHandle.frame.size.width / 2; + NSTimeInterval duration = _duration; + NSTimeInterval value = _value; + + if (self.allowsTrimming) + { + maxPosition = MIN(maxPosition, CGRectGetMaxX(_trimView.frame) - scrubbingRect.origin.x - _scrubberHandle.frame.size.width / 2); + duration = _trimEndValue - _trimStartValue; + value = _value - _trimStartValue; + } + + CGRect endFrame = CGRectMake(maxPosition - _scrubberHandle.frame.size.width / 2, frame.origin.y, _scrubberHandle.frame.size.width, _scrubberHandle.frame.size.height); + + [self addHandleAnimationFromFrame:_scrubberHandle.frame toFrame:endFrame duration:MAX(0.0, duration - value)]; + } + else + { + [self removeHandleAnimation]; + _scrubberHandle.frame = frame; + } +} + +- (void)addHandleAnimationFromFrame:(CGRect)fromFrame toFrame:(CGRect)toFrame duration:(NSTimeInterval)duration +{ + [self removeHandleAnimation]; + + POPBasicAnimation *animation = [POPBasicAnimation animationWithPropertyNamed:kPOPViewFrame]; + animation.fromValue = [NSValue valueWithCGRect:fromFrame]; + animation.toValue = [NSValue valueWithCGRect:toFrame]; + animation.duration = duration; + animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear]; + animation.clampMode = kPOPAnimationClampBoth; + animation.roundingFactor = 0.5f; + + [_scrubberHandle pop_addAnimation:animation forKey:@"progress"]; +} + +- (void)removeHandleAnimation +{ + [_scrubberHandle pop_removeAnimationForKey:@"progress"]; +} + +- (void)resetToStart +{ + _value = _trimStartValue; + + [self removeHandleAnimation]; + _scrubberHandle.center = CGPointMake(_trimView.frame.origin.x + [self _scrubbingRect].origin.x + _scrubberHandle.frame.size.width / 2, _scrubberHandle.center.y); +} + +#pragma mark - Scrubber Handle + +- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer +{ + if (gestureRecognizer.view != otherGestureRecognizer.view) + return false; + + return true; +} + +- (void)handlePress:(UILongPressGestureRecognizer *)gestureRecognizer +{ + switch (gestureRecognizer.state) + { + case UIGestureRecognizerStateBegan: + { + if (_beganInteraction) + return; + + _scrubbing = true; + + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(videoScrubberDidBeginScrubbing:)]) + [delegate videoScrubberDidBeginScrubbing:self]; + + _endedInteraction = false; + _beganInteraction = true; + } + break; + + case UIGestureRecognizerStateEnded: + case UIGestureRecognizerStateCancelled: + { + _beganInteraction = false; + + if (_endedInteraction) + return; + + _scrubbing = false; + + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(videoScrubberDidEndScrubbing:)]) + [delegate videoScrubberDidEndScrubbing:self]; + + _endedInteraction = true; + } + break; + + default: + break; + } +} + +- (void)handlePan:(UIPanGestureRecognizer *)gestureRecognizer +{ + CGPoint translation = [gestureRecognizer translationInView:self]; + [gestureRecognizer setTranslation:CGPointZero inView:self]; + + switch (gestureRecognizer.state) + { + case UIGestureRecognizerStateBegan: + { + if (_beganInteraction) + return; + + _scrubbing = true; + + [self removeHandleAnimation]; + + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(videoScrubberDidBeginScrubbing:)]) + [delegate videoScrubberDidBeginScrubbing:self]; + + _endedInteraction = false; + _beganInteraction = true; + } + break; + + case UIGestureRecognizerStateChanged: + { + CGRect scrubbingRect = [self _scrubbingRect]; + CGRect normalScrubbingRect = [self _scrubbingRect]; + CGFloat minPosition = scrubbingRect.origin.x + _scrubberHandle.frame.size.width / 2; + CGFloat maxPosition = scrubbingRect.origin.x + scrubbingRect.size.width - _scrubberHandle.frame.size.width / 2; + if (self.allowsTrimming) + { + minPosition = MAX(minPosition, _trimView.frame.origin.x + normalScrubbingRect.origin.x + _scrubberHandle.frame.size.width / 2); + maxPosition = MIN(maxPosition, CGRectGetMaxX(_trimView.frame) - normalScrubbingRect.origin.x - _scrubberHandle.frame.size.width / 2); + } + + _scrubberHandle.center = CGPointMake(MIN(MAX(_scrubberHandle.center.x + translation.x, minPosition), maxPosition), _scrubberHandle.center.y); + + NSTimeInterval position = [self _positionForScrubberPosition:_scrubberHandle.center duration:_duration]; + + if (self.allowsTrimming) + { + if (ABS(_scrubberHandle.center.x - minPosition) < FLT_EPSILON) + position = _trimStartValue; + else if (ABS(_scrubberHandle.center.x - maxPosition) < FLT_EPSILON) + position = _trimEndValue; + } + + _value = position; + + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(videoScrubber:valueDidChange:)]) + [delegate videoScrubber:self valueDidChange:position]; + } + break; + + case UIGestureRecognizerStateEnded: + case UIGestureRecognizerStateCancelled: + { + _beganInteraction = false; + + if (_endedInteraction) + return; + + _scrubbing = false; + + id delegate = self.delegate; + if ([delegate respondsToSelector:@selector(videoScrubberDidEndScrubbing:)]) + [delegate videoScrubberDidEndScrubbing:self]; + + _endedInteraction = true; + } + break; + + default: + break; + } +} + +- (void)setScrubberHandleHidden:(bool)hidden animated:(bool)animated +{ + if (animated) + { + _scrubberHandle.hidden = false; + [UIView animateWithDuration:0.25f animations:^ + { + _scrubberHandle.alpha = hidden ? 0.0f : 1.0f; + } completion:^(BOOL finished) + { + if (finished) + _scrubberHandle.hidden = hidden; + }]; + } + else + { + _scrubberHandle.hidden = hidden; + _scrubberHandle.alpha = hidden ? 0.0f : 1.0f; + } +} + +- (CGPoint)_scrubberPositionForPosition:(NSTimeInterval)position duration:(NSTimeInterval)duration +{ + CGRect scrubbingRect = [self _scrubbingRect]; + + if (duration < FLT_EPSILON) + { + position = 0.0; + duration = 1.0; + } + + return CGPointMake(_scrubberHandle.frame.size.width / 2 + scrubbingRect.origin.x + (CGFloat)(position / duration) * (scrubbingRect.size.width - _scrubberHandle.frame.size.width), CGRectGetMidY([self _scrubbingRect])); +} + +- (NSTimeInterval)_positionForScrubberPosition:(CGPoint)scrubberPosition duration:(NSTimeInterval)duration +{ + CGRect scrubbingRect = [self _scrubbingRect]; + return (scrubberPosition.x - _scrubberHandle.frame.size.width / 2 - scrubbingRect.origin.x) / (scrubbingRect.size.width - _scrubberHandle.frame.size.width) * duration; +} + +- (CGRect)_scrubbingRect +{ + CGFloat width = self.frame.size.width; + CGFloat origin = 0; + if (self.allowsTrimming) + { + width = width - 16 * 2; + origin = 16; + } + else + { + width = width - 2 * 2; + origin = 2; + } + + return CGRectMake(origin, 0, width, 33); +} + +#pragma mark - Trimming + +- (bool)hasTrimming +{ + return (_allowsTrimming && (_trimStartValue > FLT_EPSILON || _trimEndValue < _duration)); +} + +- (void)setAllowsTrimming:(bool)allowsTrimming +{ + _allowsTrimming = allowsTrimming; + _trimView.trimmingEnabled = allowsTrimming; +} + +- (NSTimeInterval)trimStartValue +{ + return MAX(0.0, _trimStartValue); +} + +- (void)setTrimStartValue:(NSTimeInterval)trimStartValue +{ + _trimStartValue = trimStartValue; + + [self _layoutTrimView]; + + if (_value < _trimStartValue) + { + [self setValue:_trimStartValue]; + _scrubberHandle.center = CGPointMake(_trimView.frame.origin.x + 12 + _scrubberHandle.frame.size.width / 2, _scrubberHandle.center.y); + } +} + +- (NSTimeInterval)trimEndValue +{ + return MIN(_duration, _trimEndValue); +} + +- (void)setTrimEndValue:(NSTimeInterval)trimEndValue +{ + _trimEndValue = trimEndValue; + + [self _layoutTrimView]; + + if (_value > _trimEndValue) + { + [self setValue:_trimEndValue]; + _scrubberHandle.center = CGPointMake(CGRectGetMaxX(_trimView.frame) - 12 - _scrubberHandle.frame.size.width / 2, _scrubberHandle.center.y); + } +} + +- (void)setTrimApplied:(bool)trimApplied +{ + [_trimView setTrimming:trimApplied animated:false]; +} + +- (void)_trimStartPosition:(NSTimeInterval *)trimStartPosition trimEndPosition:(NSTimeInterval *)trimEndPosition forTrimFrame:(CGRect)trimFrame duration:(NSTimeInterval)duration +{ + if (trimStartPosition == NULL || trimEndPosition == NULL) + return; + + CGRect trimRect = [self _scrubbingRect]; + + *trimStartPosition = (CGRectGetMinX(trimFrame) + 12 - trimRect.origin.x) / trimRect.size.width * duration; + *trimEndPosition = (CGRectGetMaxX(trimFrame) - 12 - trimRect.origin.x) / trimRect.size.width * duration; +} + +- (CGRect)_trimFrameForStartPosition:(NSTimeInterval)startPosition endPosition:(NSTimeInterval)endPosition duration:(NSTimeInterval)duration +{ + CGRect trimRect = [self _scrubbingRect]; + CGRect normalScrubbingRect = [self _scrubbingRect]; + + CGFloat minX = (CGFloat)startPosition * trimRect.size.width / (CGFloat)duration + trimRect.origin.x - normalScrubbingRect.origin.x; + CGFloat maxX = (CGFloat)endPosition * trimRect.size.width / (CGFloat)duration + trimRect.origin.x + normalScrubbingRect.origin.x; + + return CGRectMake(minX, 0, maxX - minX, 33); +} + +- (void)_layoutTrimView +{ + if (_duration > DBL_EPSILON) + { + NSTimeInterval endPosition = _trimEndValue; + if (endPosition < DBL_EPSILON) + endPosition = _duration; + + _trimView.frame = [self _trimFrameForStartPosition:_trimStartValue endPosition:_trimEndValue duration:_duration]; + } + else + { + _trimView.frame = _wrapperView.bounds; + } + + [self _layoutTrimCurtainViews]; +} + +- (void)_layoutTrimCurtainViews +{ + _leftCurtainView.hidden = !self.allowsTrimming; + _rightCurtainView.hidden = !self.allowsTrimming; + + if (self.allowsTrimming) + { + CGRect scrubbingRect = [self _scrubbingRect]; + CGRect normalScrubbingRect = [self _scrubbingRect]; + + _leftCurtainView.frame = CGRectMake(scrubbingRect.origin.x - 16.0f, 0.0f, _trimView.frame.origin.x - scrubbingRect.origin.x + normalScrubbingRect.origin.x + 16.0f, 33); + _rightCurtainView.frame = CGRectMake(CGRectGetMaxX(_trimView.frame) - 16.0f, 0.0f, scrubbingRect.origin.x + scrubbingRect.size.width - CGRectGetMaxX(_trimView.frame) - scrubbingRect.origin.x + normalScrubbingRect.origin.x + 32.0f, 33); + } +} + +#pragma mark - Layout + +- (void)setFrame:(CGRect)frame +{ + [super setFrame:frame]; + + _summaryThumbnailWrapperView.frame = CGRectMake(0.0f, 0.0f, frame.size.width, 33); + _zoomedThumbnailWrapperView.frame = _summaryThumbnailWrapperView.frame; + + _leftMaskView.frame = CGRectMake(0.0f, 0.0f, 16.0f, 33.0f); + _rightMaskView.frame = CGRectMake(frame.size.width - 16.0f, 0.0f, 16.0f, 33.0f); +} + +- (void)layoutSubviews +{ + _wrapperView.frame = CGRectMake(0, 0, self.frame.size.width, 33); + [self _layoutTrimView]; + + [self _updateScrubberAnimationsAndResetCurrentPosition:true]; +} + +@end diff --git a/LegacyComponents/TGVideoMessageScrubberThumbnailView.h b/LegacyComponents/TGVideoMessageScrubberThumbnailView.h new file mode 100644 index 0000000000..e9f941629a --- /dev/null +++ b/LegacyComponents/TGVideoMessageScrubberThumbnailView.h @@ -0,0 +1,7 @@ +#import + +@interface TGVideoMessageScrubberThumbnailView : UIView + +- (instancetype)initWithImage:(UIImage *)image originalSize:(CGSize)originalSize cropRect:(CGRect)cropRect cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored; + +@end diff --git a/LegacyComponents/TGVideoMessageScrubberThumbnailView.m b/LegacyComponents/TGVideoMessageScrubberThumbnailView.m new file mode 100644 index 0000000000..816e939308 --- /dev/null +++ b/LegacyComponents/TGVideoMessageScrubberThumbnailView.m @@ -0,0 +1,90 @@ +#import "TGVideoMessageScrubberThumbnailView.h" + +#import "LegacyComponentsInternal.h" +#import "TGImageUtils.h" + +#import + +@interface TGVideoMessageScrubberThumbnailView () +{ + CGSize _originalSize; + CGRect _cropRect; + UIImageOrientation _cropOrientation; + bool _cropMirrored; + + UIImageView *_imageView; + UIView *_stripeView; +} +@end + +@implementation TGVideoMessageScrubberThumbnailView + +- (instancetype)initWithImage:(UIImage *)image +{ + self = [super initWithFrame:CGRectZero]; + if (self != nil) + { + self.clipsToBounds = true; + + _imageView = [[UIImageView alloc] initWithFrame:CGRectZero]; + _imageView.image = image; + [self addSubview:_imageView]; + + _stripeView = [[UIView alloc] init]; + _stripeView.backgroundColor = [UIColor colorWithWhite:0.0f alpha:0.3f]; + [self addSubview:_stripeView]; + } + return self; +} + +- (instancetype)initWithImage:(UIImage *)image originalSize:(CGSize)originalSize cropRect:(CGRect)cropRect cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored +{ + self = [self initWithImage:image]; + if (self != nil) + { + _originalSize = originalSize; + _cropRect = cropRect; + _cropOrientation = cropOrientation; + _cropMirrored = cropMirrored; + } + return self; +} + +- (void)setFrame:(CGRect)frame +{ + [super setFrame:frame]; + + if (_imageView == nil) + return; + + CGAffineTransform transform = CGAffineTransformMakeRotation(TGRotationForOrientation(_cropOrientation)); + if (_cropMirrored) + transform = CGAffineTransformScale(transform, -1.0f, 1.0f); + _imageView.transform = transform; + + CGRect cropRect = _cropRect; + CGSize originalSize = _originalSize; + + if (_cropOrientation == UIImageOrientationLeft) + { + cropRect = CGRectMake(cropRect.origin.y, originalSize.width - cropRect.size.width - cropRect.origin.x, cropRect.size.height, cropRect.size.width); + originalSize = CGSizeMake(originalSize.height, originalSize.width); + } + else if (_cropOrientation == UIImageOrientationRight) + { + cropRect = CGRectMake(originalSize.height - cropRect.size.height - cropRect.origin.y, cropRect.origin.x, cropRect.size.height, cropRect.size.width); + originalSize = CGSizeMake(originalSize.height, originalSize.width); + } + else if (_cropOrientation == UIImageOrientationDown) + { + cropRect = CGRectMake(originalSize.width - cropRect.size.width - cropRect.origin.x, originalSize.height - cropRect.size.height - cropRect.origin.y, cropRect.size.width, cropRect.size.height); + } + + CGFloat ratio = frame.size.width / cropRect.size.width; + _imageView.frame = CGRectMake(-cropRect.origin.x * ratio, -cropRect.origin.y * ratio, originalSize.width * ratio, originalSize.height * ratio); + + CGFloat thickness = 1.0f - TGRetinaPixel; + _stripeView.frame = CGRectMake(frame.size.width - thickness, 0, thickness, frame.size.height); +} + +@end diff --git a/LegacyComponents/TGVideoMessageTrimView.h b/LegacyComponents/TGVideoMessageTrimView.h new file mode 100644 index 0000000000..37dad6bb0e --- /dev/null +++ b/LegacyComponents/TGVideoMessageTrimView.h @@ -0,0 +1,14 @@ +#import + +@interface TGVideoMessageTrimView : UIControl + +@property (nonatomic, copy) void(^didBeginEditing)(bool start); +@property (nonatomic, copy) void(^startHandleMoved)(CGPoint translation); +@property (nonatomic, copy) void(^endHandleMoved)(CGPoint translation); +@property (nonatomic, copy) void(^didEndEditing)(bool start); + +@property (nonatomic, assign) bool trimmingEnabled; + +- (void)setTrimming:(bool)trimming animated:(bool)animated; + +@end diff --git a/LegacyComponents/TGVideoMessageTrimView.m b/LegacyComponents/TGVideoMessageTrimView.m new file mode 100644 index 0000000000..cbcd3e3ef5 --- /dev/null +++ b/LegacyComponents/TGVideoMessageTrimView.m @@ -0,0 +1,192 @@ +#import "TGVideoMessageTrimView.h" + +#import + +@interface TGVideoMessageTrimView () +{ + UIButton *_leftSegmentView; + UIButton *_rightSegmentView; + + UILongPressGestureRecognizer *_startHandlePressGestureRecognizer; + UILongPressGestureRecognizer *_endHandlePressGestureRecognizer; + + UIPanGestureRecognizer *_startHandlePanGestureRecognizer; + UIPanGestureRecognizer *_endHandlePanGestureRecognizer; + + bool _beganInteraction; + bool _endedInteraction; + + bool _isTracking; +} +@end + +@implementation TGVideoMessageTrimView + +- (instancetype)initWithFrame:(CGRect)frame +{ + self = [super initWithFrame:frame]; + if (self != nil) + { + self.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -25, -5, -25); + + + _leftSegmentView = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 16, 33)]; + _leftSegmentView.exclusiveTouch = true; + _leftSegmentView.adjustsImageWhenHighlighted = false; + [_leftSegmentView setBackgroundImage:[UIImage imageNamed:@"VideoMessageLeftHandle"] forState:UIControlStateNormal]; + _leftSegmentView.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -25, -5, -10); + [self addSubview:_leftSegmentView]; + + _rightSegmentView = [[UIButton alloc] initWithFrame:CGRectMake(0, 0, 16, 33)]; + _rightSegmentView.exclusiveTouch = true; + _rightSegmentView.adjustsImageWhenHighlighted = false; + [_rightSegmentView setBackgroundImage:[UIImage imageNamed:@"VideoMessageRightHandle"] forState:UIControlStateNormal]; + _rightSegmentView.hitTestEdgeInsets = UIEdgeInsetsMake(-5, -10, -5, -25); + [self addSubview:_rightSegmentView]; + + _startHandlePressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleHandlePress:)]; + _startHandlePressGestureRecognizer.delegate = self; + _startHandlePressGestureRecognizer.minimumPressDuration = 0.1f; + [_leftSegmentView addGestureRecognizer:_startHandlePressGestureRecognizer]; + + _endHandlePressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleHandlePress:)]; + _endHandlePressGestureRecognizer.delegate = self; + _endHandlePressGestureRecognizer.minimumPressDuration = 0.1f; + [_rightSegmentView addGestureRecognizer:_endHandlePressGestureRecognizer]; + + _startHandlePanGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handleHandlePan:)]; + _startHandlePanGestureRecognizer.delegate = self; + [_leftSegmentView addGestureRecognizer:_startHandlePanGestureRecognizer]; + + _endHandlePanGestureRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handleHandlePan:)]; + _endHandlePanGestureRecognizer.delegate = self; + [_rightSegmentView addGestureRecognizer:_endHandlePanGestureRecognizer]; + } + return self; +} + +- (void)setTrimmingEnabled:(bool)trimmingEnabled +{ + _trimmingEnabled = trimmingEnabled; + + _leftSegmentView.userInteractionEnabled = trimmingEnabled; + _rightSegmentView.userInteractionEnabled = trimmingEnabled; + + [self setNeedsLayout]; +} + +- (void)setTrimming:(bool)__unused trimming animated:(bool)__unused animated +{ +} + +- (void)handleHandlePress:(UILongPressGestureRecognizer *)gestureRecognizer +{ + switch (gestureRecognizer.state) + { + case UIGestureRecognizerStateBegan: + { + if (_beganInteraction) + return; + + _isTracking = true; + + if (self.didBeginEditing != nil) + self.didBeginEditing(gestureRecognizer.view == _leftSegmentView); + + _endedInteraction = false; + _beganInteraction = true; + } + break; + + case UIGestureRecognizerStateEnded: + case UIGestureRecognizerStateCancelled: + { + _beganInteraction = false; + + if (_endedInteraction) + return; + + _isTracking = false; + + if (self.didEndEditing != nil) + self.didEndEditing(gestureRecognizer.view == _leftSegmentView); + + _endedInteraction = true; + } + break; + + default: + break; + } +} + +- (void)handleHandlePan:(UIPanGestureRecognizer *)gestureRecognizer +{ + CGPoint translation = [gestureRecognizer translationInView:self]; + [gestureRecognizer setTranslation:CGPointZero inView:self]; + + switch (gestureRecognizer.state) + { + case UIGestureRecognizerStateBegan: + { + if (_beganInteraction) + return; + + _isTracking = true; + + if (self.didBeginEditing != nil) + self.didBeginEditing(gestureRecognizer.view == _leftSegmentView); + + _endedInteraction = false; + _beganInteraction = true; + } + break; + + case UIGestureRecognizerStateChanged: + { + if (gestureRecognizer == _startHandlePanGestureRecognizer && self.startHandleMoved != nil) + self.startHandleMoved(translation); + else if (gestureRecognizer == _endHandlePanGestureRecognizer && self.endHandleMoved != nil) + self.endHandleMoved(translation); + } + break; + + case UIGestureRecognizerStateEnded: + case UIGestureRecognizerStateCancelled: + { + _beganInteraction = false; + + if (_endedInteraction) + return; + + _isTracking = false; + + if (self.didEndEditing != nil) + self.didEndEditing(gestureRecognizer.view == _leftSegmentView); + + _endedInteraction = true; + } + break; + + default: + break; + } +} + +- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer +{ + if (gestureRecognizer.view != otherGestureRecognizer.view) + return false; + + return true; +} + +- (void)layoutSubviews +{ + CGFloat handleWidth = self.trimmingEnabled ? 16.0f : 2.0f; + + _leftSegmentView.frame = CGRectMake(0, 0, handleWidth, self.frame.size.height); + _rightSegmentView.frame = CGRectMake(self.frame.size.width - handleWidth, 0, handleWidth, self.frame.size.height); +} + +@end