import Foundation import UIKit import Display import AsyncDisplayKit import ComponentFlow import SwiftSignalKit import ViewControllerComponent import ComponentDisplayAdapters import TelegramPresentationData import AccountContext import Postbox import TelegramCore import PresentationDataUtils import Camera import MultilineTextComponent import BlurredBackgroundComponent import PlainButtonComponent import Photos import TooltipUI import BundleIconComponent import CameraButtonComponent import TelegramNotices import DeviceAccess import MediaEditor import MediaResources import LocalMediaResources import ImageCompression struct CameraState: Equatable { enum Recording: Equatable { case none case holding case handsFree } let position: Camera.Position let recording: Recording let duration: Double let isDualCameraEnabled: Bool let isViewOnceEnabled: Bool func updatedPosition(_ position: Camera.Position) -> CameraState { return CameraState(position: position, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: self.isViewOnceEnabled) } func updatedRecording(_ recording: Recording) -> CameraState { return CameraState(position: self.position, recording: recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: self.isViewOnceEnabled) } func updatedDuration(_ duration: Double) -> CameraState { return CameraState(position: self.position, recording: self.recording, duration: duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: self.isViewOnceEnabled) } func updatedIsViewOnceEnabled(_ isViewOnceEnabled: Bool) -> CameraState { return CameraState(position: self.position, recording: self.recording, duration: self.duration, isDualCameraEnabled: self.isDualCameraEnabled, isViewOnceEnabled: isViewOnceEnabled) } } struct PreviewState: Equatable { let composition: AVComposition let trimRange: Range? } enum CameraScreenTransition { case animateIn case animateOut case finishedAnimateIn } private let viewOnceButtonTag = GenericComponentViewTag() private final class CameraScreenComponent: CombinedComponent { typealias EnvironmentType = ViewControllerComponentContainer.Environment let context: AccountContext let cameraState: CameraState let isPreviewing: Bool let getController: () -> VideoMessageCameraScreen? let present: (ViewController) -> Void let push: (ViewController) -> Void let startRecording: ActionSlot let stopRecording: ActionSlot let completion: ActionSlot init( context: AccountContext, cameraState: CameraState, isPreviewing: Bool, getController: @escaping () -> VideoMessageCameraScreen?, present: @escaping (ViewController) -> Void, push: @escaping (ViewController) -> Void, startRecording: ActionSlot, stopRecording: ActionSlot, completion: ActionSlot ) { self.context = context self.cameraState = cameraState self.isPreviewing = isPreviewing self.getController = getController self.present = present self.push = push self.startRecording = startRecording self.stopRecording = stopRecording self.completion = completion } static func ==(lhs: CameraScreenComponent, rhs: CameraScreenComponent) -> Bool { if lhs.context !== rhs.context { return false } if lhs.cameraState != rhs.cameraState { return false } if lhs.isPreviewing != rhs.isPreviewing { return false } return true } final class State: ComponentState { enum ImageKey: Hashable { case flip case buttonBackground } private var cachedImages: [ImageKey: UIImage] = [:] func image(_ key: ImageKey, theme: PresentationTheme) -> UIImage { if let image = self.cachedImages[key] { return image } else { var image: UIImage switch key { case .flip: image = UIImage(bundleImageName: "Camera/VideoMessageFlip")!.withRenderingMode(.alwaysTemplate) case .buttonBackground: let innerSize = CGSize(width: 40.0, height: 40.0) image = generateFilledCircleImage(diameter: innerSize.width, color: theme.rootController.navigationBar.opaqueBackgroundColor, strokeColor: theme.chat.inputPanel.panelSeparatorColor, strokeWidth: 0.5, backgroundColor: nil)! } cachedImages[key] = image return image } } private let context: AccountContext private let present: (ViewController) -> Void private let startRecording: ActionSlot private let stopRecording: ActionSlot private let completion: ActionSlot private let getController: () -> VideoMessageCameraScreen? private var resultDisposable = MetaDisposable() var cameraState: CameraState? private let hapticFeedback = HapticFeedback() init( context: AccountContext, present: @escaping (ViewController) -> Void, startRecording: ActionSlot, stopRecording: ActionSlot, completion: ActionSlot, getController: @escaping () -> VideoMessageCameraScreen? = { return nil } ) { self.context = context self.present = present self.startRecording = startRecording self.stopRecording = stopRecording self.completion = completion self.getController = getController super.init() self.startRecording.connect({ [weak self] _ in if let self, let controller = getController() { self.startVideoRecording(pressing: !controller.scheduledLock) controller.scheduledLock = false } }) self.stopRecording.connect({ [weak self] _ in self?.stopVideoRecording() }) } deinit { self.resultDisposable.dispose() } func toggleViewOnce() { guard let controller = self.getController() else { return } controller.updateCameraState({ $0.updatedIsViewOnceEnabled(!$0.isViewOnceEnabled) }, transition: .easeInOut(duration: 0.2)) } private var lastFlipTimestamp: Double? func togglePosition() { guard let controller = self.getController(), let camera = controller.camera else { return } let currentTimestamp = CACurrentMediaTime() if let lastFlipTimestamp = self.lastFlipTimestamp, currentTimestamp - lastFlipTimestamp < 1.0 { return } self.lastFlipTimestamp = currentTimestamp camera.togglePosition() self.hapticFeedback.impact(.veryLight) } func startVideoRecording(pressing: Bool) { guard let controller = self.getController(), let camera = controller.camera else { return } guard case .none = controller.cameraState.recording else { return } let initialDuration = controller.node.previewState?.composition.duration.seconds ?? 0.0 controller.updatePreviewState({ _ in return nil}, transition: .spring(duration: 0.4)) controller.node.dismissAllTooltips() controller.updateCameraState({ $0.updatedRecording(pressing ? .holding : .handsFree).updatedDuration(initialDuration) }, transition: .spring(duration: 0.4)) let isFirstTime = !controller.node.cameraIsActive controller.node.resumeCameraCapture() controller.node.withReadyCamera(isFirstTime: isFirstTime) { self.resultDisposable.set((camera.startRecording() |> deliverOnMainQueue).start(next: { [weak self] duration in let duration = initialDuration + duration if let self, let controller = self.getController() { controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1)) if duration > 59.0 { self.stopVideoRecording() } } })) } if initialDuration > 0.0 { controller.onResume() } } func stopVideoRecording() { guard let controller = self.getController(), let camera = controller.camera else { return } self.resultDisposable.set((camera.stopRecording() |> deliverOnMainQueue).start(next: { [weak self] result in if let self, let controller = self.getController(), case let .finished(mainResult, _, duration, _, _) = result { self.completion.invoke( .video(VideoMessageCameraScreen.CaptureResult.Video( videoPath: mainResult.path, dimensions: PixelDimensions(mainResult.dimensions), duration: duration, thumbnail: mainResult.thumbnail )) ) controller.updateCameraState({ $0.updatedRecording(.none) }, transition: .spring(duration: 0.4)) } })) } func lockVideoRecording() { guard let controller = self.getController() else { return } controller.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4)) } func updateZoom(fraction: CGFloat) { guard let camera = self.getController()?.camera else { return } camera.setZoomLevel(fraction) } } func makeState() -> State { return State(context: self.context, present: self.present, startRecording: self.startRecording, stopRecording: self.stopRecording, completion: self.completion, getController: self.getController) } static var body: Body { let flipButton = Child(CameraButton.self) let viewOnceButton = Child(PlainButtonComponent.self) let recordMoreButton = Child(PlainButtonComponent.self) return { context in let environment = context.environment[ViewControllerComponentContainer.Environment.self].value let component = context.component let state = context.state let availableSize = context.availableSize state.cameraState = component.cameraState var viewOnceOffset: CGFloat = 102.0 var showViewOnce = false var showRecordMore = false if component.isPreviewing { showViewOnce = true showRecordMore = true viewOnceOffset = 67.0 } else if case .handsFree = component.cameraState.recording { showViewOnce = true } if !component.isPreviewing { let flipButton = flipButton.update( component: CameraButton( content: AnyComponentWithIdentity( id: "flip", component: AnyComponent( Image( image: state.image(.flip, theme: environment.theme), tintColor: environment.theme.list.itemAccentColor, size: CGSize(width: 30.0, height: 30.0) ) ) ), minSize: CGSize(width: 44.0, height: 44.0), action: { [weak state] in if let state { state.togglePosition() } } ), availableSize: availableSize, transition: context.transition ) context.add(flipButton .position(CGPoint(x: flipButton.size.width / 2.0 + 8.0, y: availableSize.height - flipButton.size.height / 2.0 - 8.0)) .appear(.default(scale: true, alpha: true)) .disappear(.default(scale: true, alpha: true)) ) } if showViewOnce { let viewOnceButton = viewOnceButton.update( component: PlainButtonComponent( content: AnyComponent( ZStack([ AnyComponentWithIdentity( id: "background", component: AnyComponent( Image( image: state.image(.buttonBackground, theme: environment.theme), size: CGSize(width: 40.0, height: 40.0) ) ) ), AnyComponentWithIdentity( id: "icon", component: AnyComponent( BundleIconComponent( name: component.cameraState.isViewOnceEnabled ? "Media Gallery/ViewOnceEnabled" : "Media Gallery/ViewOnce", tintColor: environment.theme.list.itemAccentColor ) ) ) ]) ), effectAlignment: .center, action: { [weak state] in if let state { state.toggleViewOnce() } }, animateAlpha: false, tag: viewOnceButtonTag ), availableSize: availableSize, transition: context.transition ) context.add(viewOnceButton .position(CGPoint(x: availableSize.width - viewOnceButton.size.width / 2.0 - 2.0 - UIScreenPixel, y: availableSize.height - viewOnceButton.size.height / 2.0 - 8.0 - viewOnceOffset)) .appear(.default(scale: true, alpha: true)) .disappear(.default(scale: true, alpha: true)) ) } if showRecordMore { let recordMoreButton = recordMoreButton.update( component: PlainButtonComponent( content: AnyComponent( ZStack([ AnyComponentWithIdentity( id: "background", component: AnyComponent( Image( image: state.image(.buttonBackground, theme: environment.theme), size: CGSize(width: 40.0, height: 40.0) ) ) ), AnyComponentWithIdentity( id: "icon", component: AnyComponent( BundleIconComponent( name: "Chat/Input/Text/IconVideo", tintColor: environment.theme.list.itemAccentColor ) ) ) ]) ), effectAlignment: .center, action: { [weak state] in state?.startVideoRecording(pressing: false) } ), availableSize: availableSize, transition: context.transition ) context.add(recordMoreButton .position(CGPoint(x: availableSize.width - recordMoreButton.size.width / 2.0 - 2.0 - UIScreenPixel, y: availableSize.height - recordMoreButton.size.height / 2.0 - 22.0)) .appear(.default(scale: true, alpha: true)) .disappear(.default(scale: true, alpha: true)) ) } // var isVideoRecording = false // if case .video = component.cameraState.mode { // isVideoRecording = true // } else if component.cameraState.recording != .none { // isVideoRecording = true // } return availableSize } } } public class VideoMessageCameraScreen: ViewController { public enum CaptureResult { public struct Video { public let videoPath: String public let dimensions: PixelDimensions public let duration: Double public let thumbnail: UIImage } case video(Video) } fileprivate final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate { private weak var controller: VideoMessageCameraScreen? private let context: AccountContext fileprivate var camera: Camera? private let updateState: ActionSlot fileprivate let backgroundView: UIVisualEffectView fileprivate let containerView: UIView fileprivate let componentHost: ComponentView fileprivate let previewContainerView: UIView fileprivate var mainPreviewView: CameraSimplePreviewView fileprivate var additionalPreviewView: CameraSimplePreviewView private var progressView: RecordingProgressView private var resultPreviewView: ResultPreviewView? private var cameraStateDisposable: Disposable? private var changingPositionDisposable: Disposable? private let idleTimerExtensionDisposable = MetaDisposable() fileprivate var cameraIsActive = true { didSet { if self.cameraIsActive { self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension()) } else { self.idleTimerExtensionDisposable.set(nil) } } } private var presentationData: PresentationData private var validLayout: ContainerViewLayout? fileprivate var didAppear: () -> Void = {} fileprivate let startRecording = ActionSlot() fileprivate let stopRecording = ActionSlot() private let completion = ActionSlot() var cameraState: CameraState { didSet { if self.cameraState.isViewOnceEnabled != oldValue.isViewOnceEnabled { if self.cameraState.isViewOnceEnabled { let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } self.displayViewOnceTooltip(text: presentationData.strings.Chat_PlayVideoMessageOnceTooltip, hasIcon: false) } else { self.dismissAllTooltips() } } } } var previewState: PreviewState? { didSet { self.previewStatePromise.set(.single(self.previewState)) } } var previewStatePromise = Promise() var transitioningToPreview = false init(controller: VideoMessageCameraScreen) { self.controller = controller self.context = controller.context self.updateState = ActionSlot() self.presentationData = controller.updatedPresentationData?.initial ?? self.context.sharedContext.currentPresentationData.with { $0 } self.backgroundView = UIVisualEffectView(effect: UIBlurEffect(style: self.presentationData.theme.overallDarkAppearance ? .dark : .light)) self.containerView = UIView() self.containerView.clipsToBounds = true self.componentHost = ComponentView() self.previewContainerView = UIView() self.previewContainerView.clipsToBounds = true let isDualCameraEnabled = Camera.isDualCameraSupported let isFrontPosition = "".isEmpty self.mainPreviewView = CameraSimplePreviewView(frame: .zero, main: true) self.additionalPreviewView = CameraSimplePreviewView(frame: .zero, main: false) self.progressView = RecordingProgressView(frame: .zero) if isDualCameraEnabled { self.mainPreviewView.resetPlaceholder(front: false) self.additionalPreviewView.resetPlaceholder(front: true) } else { self.mainPreviewView.resetPlaceholder(front: isFrontPosition) } self.cameraState = CameraState( position: isFrontPosition ? .front : .back, recording: .none, duration: 0.0, isDualCameraEnabled: isDualCameraEnabled, isViewOnceEnabled: false ) self.previewState = nil super.init() self.backgroundColor = .clear self.view.addSubview(self.backgroundView) self.view.addSubview(self.containerView) self.containerView.addSubview(self.previewContainerView) self.previewContainerView.addSubview(self.mainPreviewView) self.previewContainerView.addSubview(self.additionalPreviewView) self.previewContainerView.addSubview(self.progressView) self.completion.connect { [weak self] result in if let self { self.addCaptureResult(result) } } self.mainPreviewView.removePlaceholder(delay: 0.0) self.withReadyCamera(isFirstTime: true, { self.additionalPreviewView.removePlaceholder(delay: 0.35) self.startRecording.invoke(Void()) }) self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension()) self.setupCamera() } deinit { self.cameraStateDisposable?.dispose() self.changingPositionDisposable?.dispose() self.idleTimerExtensionDisposable.dispose() } func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) { if #available(iOS 13.0, *) { let _ = (self.additionalPreviewView.isPreviewing |> filter { $0 } |> take(1)).startStandalone(next: { _ in f() }) } else { Queue.mainQueue().after(0.35) { f() } } } override func didLoad() { super.didLoad() self.view.disablesInteractiveModalDismiss = true self.view.disablesInteractiveKeyboardGestureRecognizer = true let pinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(self.handlePinch(_:))) self.view.addGestureRecognizer(pinchGestureRecognizer) } private func setupCamera() { guard self.camera == nil else { return } let camera = Camera( configuration: Camera.Configuration( preset: .hd1920x1080, position: self.cameraState.position, isDualEnabled: self.cameraState.isDualCameraEnabled, audio: true, photo: true, metadata: false, isRoundVideo: true ), previewView: self.mainPreviewView, secondaryPreviewView: self.additionalPreviewView ) self.cameraStateDisposable = (camera.position |> deliverOnMainQueue).start(next: { [weak self] position in guard let self else { return } self.cameraState = self.cameraState.updatedPosition(position) self.requestUpdateLayout(transition: .easeInOut(duration: 0.2)) }) self.changingPositionDisposable = (camera.modeChange |> deliverOnMainQueue).start(next: { [weak self] modeChange in if let self { let _ = self } }) camera.focus(at: CGPoint(x: 0.5, y: 0.5), autoFocus: true) camera.startCapture() self.camera = camera } @objc private func handlePinch(_ gestureRecognizer: UIPinchGestureRecognizer) { guard let camera = self.camera else { return } switch gestureRecognizer.state { case .changed: let scale = gestureRecognizer.scale camera.setZoomDelta(scale) gestureRecognizer.scale = 1.0 case .ended, .cancelled: camera.rampZoom(1.0, rate: 8.0) default: break } } private var animatingIn = false func animateIn() { self.animatingIn = true self.backgroundView.alpha = 0.0 UIView.animate(withDuration: 0.4, animations: { self.backgroundView.alpha = 1.0 }) let targetPosition = self.previewContainerView.center self.previewContainerView.center = CGPoint(x: targetPosition.x, y: self.frame.height + self.previewContainerView.frame.height / 2.0) UIView.animate(withDuration: 0.5, delay: 0.0, usingSpringWithDamping: 0.8, initialSpringVelocity: 0.2, animations: { self.previewContainerView.center = targetPosition }, completion: { _ in self.animatingIn = false }) if let view = self.componentHost.view { view.layer.animateAlpha(from: 0.1, to: 1.0, duration: 0.25) } } func animateOut(completion: @escaping () -> Void) { self.camera?.stopCapture(invalidate: true) UIView.animate(withDuration: 0.25, animations: { self.backgroundView.alpha = 0.0 }, completion: { _ in completion() }) self.componentHost.view?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false) self.previewContainerView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false) } func pauseCameraCapture() { self.mainPreviewView.isEnabled = false self.additionalPreviewView.isEnabled = false self.camera?.stopCapture() self.cameraIsActive = false self.requestUpdateLayout(transition: .immediate) } func resumeCameraCapture() { if !self.mainPreviewView.isEnabled { self.mainPreviewView.isEnabled = true self.additionalPreviewView.isEnabled = true self.camera?.startCapture() self.cameraIsActive = true self.requestUpdateLayout(transition: .immediate) } } fileprivate var results: [VideoMessageCameraScreen.CaptureResult] = [] fileprivate var resultsPipe = ValuePipe() func addCaptureResult(_ result: VideoMessageCameraScreen.CaptureResult) { guard let controller = self.controller else { return } self.pauseCameraCapture() self.results.append(result) self.resultsPipe.putNext(result) self.transitioningToPreview = false let composition = composition(with: self.results) controller.updatePreviewState({ _ in return PreviewState(composition: composition, trimRange: nil) }, transition: .spring(duration: 0.4)) } private func debugSaveResult(path: String) { guard let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: .mappedIfSafe) else { return } let id = Int64.random(in: Int64.min ... Int64.max) let fileResource = LocalFileReferenceMediaResource(localFilePath: path, randomId: id) let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: Int64(data.count), attributes: [.FileName(fileName: "video.mp4")]) let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: []) let _ = enqueueMessages(account: self.context.engine.account, peerId: self.context.engine.account.peerId, messages: [message]).start() } override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { let result = super.hitTest(point, with: event) if let controller = self.controller, point.y > self.frame.height - controller.inputPanelFrame.height - 34.0 { return nil } return result } fileprivate func maybePresentViewOnceTooltip() { let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } let _ = (ApplicationSpecificNotice.getVideoMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager) |> deliverOnMainQueue).startStandalone(next: { [weak self] counter in guard let self else { return } if counter >= 3 { return } Queue.mainQueue().after(0.3) { self.displayViewOnceTooltip(text: presentationData.strings.Chat_TapToPlayVideoMessageOnceTooltip, hasIcon: true) } let _ = ApplicationSpecificNotice.incrementVideoMessagesPlayOnceSuggestion(accountManager: context.sharedContext.accountManager).startStandalone() }) } private func displayViewOnceTooltip(text: String, hasIcon: Bool) { guard let controller = self.controller, let sourceView = self.componentHost.findTaggedView(tag: viewOnceButtonTag) else { return } self.dismissAllTooltips() let absoluteFrame = sourceView.convert(sourceView.bounds, to: self.view) let location = CGRect(origin: CGPoint(x: absoluteFrame.midX - 20.0, y: absoluteFrame.midY), size: CGSize()) let tooltipController = TooltipScreen( account: context.account, sharedContext: context.sharedContext, text: .markdown(text: text), balancedTextLayout: true, constrainWidth: 240.0, style: .customBlur(UIColor(rgb: 0x18181a), 0.0), arrowStyle: .small, icon: hasIcon ? .animation(name: "anim_autoremove_on", delay: 0.1, tintColor: nil) : nil, location: .point(location, .right), displayDuration: .default, inset: 8.0, cornerRadius: 8.0, shouldDismissOnTouch: { _, _ in return .ignore } ) controller.present(tooltipController, in: .window(.root)) } fileprivate func dismissAllTooltips() { guard let controller = self.controller else { return } controller.window?.forEachController({ controller in if let controller = controller as? TooltipScreen { controller.dismiss() } }) controller.forEachController({ controller in if let controller = controller as? TooltipScreen { controller.dismiss() } return true }) } func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) { self.resultPreviewView?.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply) self.controller?.updatePreviewState({ state in if let state { return PreviewState(composition: state.composition, trimRange: start..)? private let inputPanelFrame: CGRect fileprivate let completion: (EnqueueMessage) -> Void private var audioSessionDisposable: Disposable? private let hapticFeedback = HapticFeedback() private var validLayout: ContainerViewLayout? fileprivate var camera: Camera? { return self.node.camera } fileprivate var cameraState: CameraState { return self.node.cameraState } fileprivate func updateCameraState(_ f: (CameraState) -> CameraState, transition: Transition) { self.node.cameraState = f(self.node.cameraState) self.node.requestUpdateLayout(transition: transition) self.durationValue.set(self.cameraState.duration) } fileprivate func updatePreviewState(_ f: (PreviewState?) -> PreviewState?, transition: Transition) { self.node.previewState = f(self.node.previewState) self.node.requestUpdateLayout(transition: transition) } public final class RecordingStatus { public let micLevel: Signal public let duration: Signal public init(micLevel: Signal, duration: Signal) { self.micLevel = micLevel self.duration = duration } } private let micLevelValue = ValuePromise(0.0) private let durationValue = ValuePromise(0.0) public let recordingStatus: RecordingStatus public var onDismiss: (Bool) -> Void = { _ in } public var onStop: () -> Void = { } public var onResume: () -> Void = { } public struct RecordedVideoData { public let duration: Double public let frames: [UIImage] public let framesUpdateTimestamp: Double public let trimRange: Range? } private var currentResults: Signal<[VideoMessageCameraScreen.CaptureResult], NoError> { var results: Signal<[VideoMessageCameraScreen.CaptureResult], NoError> = .single(self.node.results) if self.waitingForNextResult { results = results |> mapToSignal { initial in return self.node.resultsPipe.signal() |> take(1) |> map { next in var updatedResults = initial updatedResults.append(next) return updatedResults } } } self.waitingForNextResult = false return results } public func takenRecordedData() -> Signal { let previewState = self.node.previewStatePromise.get() let count = 12 let initialPlaceholder: Signal if let firstResult = self.node.results.first { if case let .video(video) = firstResult { initialPlaceholder = .single(video.thumbnail) } else { initialPlaceholder = .single(nil) } } else { initialPlaceholder = self.camera?.transitionImage ?? .single(nil) } let immediateResult: Signal = initialPlaceholder |> take(1) |> mapToSignal { initialPlaceholder in return videoFrames(asset: nil, count: count, initialPlaceholder: initialPlaceholder) |> map { framesAndUpdateTimestamp in return RecordedVideoData( duration: 1.0, frames: framesAndUpdateTimestamp.0, framesUpdateTimestamp: framesAndUpdateTimestamp.1, trimRange: nil ) } } return immediateResult |> mapToSignal { immediateResult in return .single(immediateResult) |> then( self.currentResults |> take(1) |> mapToSignal { results in var totalDuration: Double = 0.0 for result in results { if case let .video(video) = result { totalDuration += video.duration } } let composition = composition(with: results) return combineLatest( queue: Queue.mainQueue(), videoFrames(asset: composition, count: count, initialTimestamp: immediateResult?.framesUpdateTimestamp), previewState ) |> map { framesAndUpdateTimestamp, previewState in return RecordedVideoData( duration: totalDuration, frames: framesAndUpdateTimestamp.0, framesUpdateTimestamp: framesAndUpdateTimestamp.1, trimRange: previewState?.trimRange ) } } ) } } public init( context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal)?, inputPanelFrame: CGRect, completion: @escaping (EnqueueMessage) -> Void ) { self.context = context self.updatedPresentationData = updatedPresentationData self.inputPanelFrame = inputPanelFrame self.completion = completion self.recordingStatus = RecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get()) super.init(navigationBarPresentationData: nil) self.statusBar.statusBarStyle = .Ignore self.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait) self.navigationPresentation = .flatModal self.requestAudioSession() } required public init(coder: NSCoder) { preconditionFailure() } deinit { self.audioSessionDisposable?.dispose() if #available(iOS 13.0, *) { try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(false) } } override public func loadDisplayNode() { self.displayNode = Node(controller: self) super.displayNodeDidLoad() } public func sendVideoRecording() { if case .none = self.cameraState.recording { } else { self.waitingForNextResult = true self.node.stopRecording.invoke(Void()) } let _ = (self.currentResults |> take(1) |> deliverOnMainQueue).startStandalone(next: { [weak self] results in guard let self, let firstResult = results.first, case let .video(video) = firstResult else { return } var videoPaths: [String] = [] var duration: Double = 0.0 for result in results { if case let .video(video) = result { videoPaths.append(video.videoPath) duration += video.duration } } let finalDuration: Double if let trimRange = self.node.previewState?.trimRange { finalDuration = trimRange.upperBound - trimRange.lowerBound } else { finalDuration = duration } var resourceAdjustments: VideoMediaResourceAdjustments? = nil let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: PixelDimensions(width: 400, height: 400), cropOffset: .zero, cropRect: CGRect(origin: .zero, size: CGSize(width: 400.0, height: 400.0)), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage) if let valuesData = try? JSONEncoder().encode(values) { let data = MemoryBuffer(data: valuesData) let digest = MemoryBuffer(data: data.md5Digest()) resourceAdjustments = VideoMediaResourceAdjustments(data: data, digest: digest, isStory: false) } let resource: TelegramMediaResource resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), paths: videoPaths, adjustments: resourceAdjustments) var previewRepresentations: [TelegramMediaImageRepresentation] = [] let thumbnailResource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max)) let thumbnailSize = video.dimensions.cgSize.aspectFitted(CGSize(width: 320.0, height: 320.0)) let thumbnailImage = scaleImageToPixelSize(image: video.thumbnail, size: thumbnailSize) if let thumbnailData = thumbnailImage?.jpegData(compressionQuality: 0.4) { self.context.account.postbox.mediaBox.storeResourceData(thumbnailResource.id, data: thumbnailData) previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(thumbnailSize), resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)) } let tempFile = TempBox.shared.tempFile(fileName: "file") defer { TempBox.shared.dispose(tempFile) } if let data = compressImageToJPEG(video.thumbnail, quality: 0.7, tempFilePath: tempFile.path) { context.account.postbox.mediaBox.storeCachedResourceRepresentation(resource, representation: CachedVideoFirstFrameRepresentation(), data: data) } let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.FileName(fileName: "video.mp4"), .Video(duration: finalDuration, size: video.dimensions, flags: [.instantRoundVideo], preloadSize: nil)]) var attributes: [MessageAttribute] = [] if self.cameraState.isViewOnceEnabled { attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil)) } self.completion(.message( text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: media), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [] )) }) } private var waitingForNextResult = false public func stopVideoRecording() -> Bool { self.waitingForNextResult = true self.node.transitioningToPreview = true self.node.requestUpdateLayout(transition: .spring(duration: 0.4)) self.node.stopRecording.invoke(Void()) return true } fileprivate var scheduledLock = false public func lockVideoRecording() { if case .none = self.cameraState.recording { self.scheduledLock = true } else { self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4)) } self.node.maybePresentViewOnceTooltip() } public func discardVideo() { self.requestDismiss(animated: true) } public func extractVideoSnapshot() -> UIView? { if let snapshotView = self.node.previewContainerView.snapshotView(afterScreenUpdates: false) { snapshotView.frame = self.node.previewContainerView.convert(self.node.previewContainerView.bounds, to: nil) return snapshotView } return nil } public func hideVideoSnapshot() { self.node.previewContainerView.alpha = 0.02 } public func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) { self.node.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply) } private func requestAudioSession() { self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in if #available(iOS 13.0, *) { try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true) } }, deactivate: { _ in return .single(Void()) }) } private var isDismissed = false fileprivate func requestDismiss(animated: Bool) { guard !self.isDismissed else { return } self.node.dismissAllTooltips() self.node.camera?.stopCapture(invalidate: true) self.isDismissed = true if animated { self.node.animateOut(completion: { self.dismiss(animated: false) }) } else { self.dismiss(animated: false) } } override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) { self.validLayout = layout super.containerLayoutUpdated(layout, transition: transition) if !self.isDismissed { (self.displayNode as! Node).containerLayoutUpdated(layout: layout, transition: Transition(transition)) } } } private func composition(with results: [VideoMessageCameraScreen.CaptureResult]) -> AVComposition { let composition = AVMutableComposition() var currentTime = CMTime.zero for result in results { guard case let .video(video) = result else { continue } let asset = AVAsset(url: URL(fileURLWithPath: video.videoPath)) let duration = asset.duration do { try composition.insertTimeRange( CMTimeRangeMake(start: .zero, duration: duration), of: asset, at: currentTime ) currentTime = CMTimeAdd(currentTime, duration) } catch { } } return composition }