Video message recording improvements

This commit is contained in:
Ilya Laktyushin
2024-01-14 16:57:52 +04:00
parent 40c4378bde
commit f34d0913df
3 changed files with 290 additions and 78 deletions

View File

@@ -0,0 +1,115 @@
import Foundation
import UIKit
import HierarchyTrackingLayer
import ComponentFlow
import Display
private let shadowImage: UIImage? = {
UIImage(named: "Stories/PanelGradient")
}()
final class LoadingEffectView: UIView {
private let duration: Double
private let hierarchyTrackingLayer: HierarchyTrackingLayer
private let gradientWidth: CGFloat
private let backgroundView: UIImageView
private let borderGradientView: UIImageView
private let borderContainerView: UIView
let borderMaskLayer: SimpleShapeLayer
init(effectAlpha: CGFloat, borderAlpha: CGFloat, gradientWidth: CGFloat = 200.0, duration: Double) {
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
self.duration = duration
self.gradientWidth = gradientWidth
self.backgroundView = UIImageView()
self.borderGradientView = UIImageView()
self.borderContainerView = UIView()
self.borderMaskLayer = SimpleShapeLayer()
super.init(frame: .zero)
self.layer.addSublayer(self.hierarchyTrackingLayer)
self.hierarchyTrackingLayer.didEnterHierarchy = { [weak self] in
guard let self, self.bounds.width != 0.0 else {
return
}
self.updateAnimations(size: self.bounds.size)
}
let generateGradient: (CGFloat) -> UIImage? = { baseAlpha in
return generateImage(CGSize(width: self.gradientWidth, height: 16.0), opaque: false, scale: 1.0, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
let foregroundColor = UIColor(white: 1.0, alpha: min(1.0, baseAlpha * 4.0))
if let shadowImage {
UIGraphicsPushContext(context)
for i in 0 ..< 2 {
let shadowFrame = CGRect(origin: CGPoint(x: CGFloat(i) * (size.width * 0.5), y: 0.0), size: CGSize(width: size.width * 0.5, height: size.height))
context.saveGState()
context.translateBy(x: shadowFrame.midX, y: shadowFrame.midY)
context.rotate(by: CGFloat(i == 0 ? 1.0 : -1.0) * CGFloat.pi * 0.5)
let adjustedRect = CGRect(origin: CGPoint(x: -shadowFrame.height * 0.5, y: -shadowFrame.width * 0.5), size: CGSize(width: shadowFrame.height, height: shadowFrame.width))
context.clip(to: adjustedRect, mask: shadowImage.cgImage!)
context.setFillColor(foregroundColor.cgColor)
context.fill(adjustedRect)
context.restoreGState()
}
UIGraphicsPopContext()
}
})
}
self.backgroundView.image = generateGradient(effectAlpha)
self.addSubview(self.backgroundView)
self.borderGradientView.image = generateGradient(borderAlpha)
self.borderContainerView.addSubview(self.borderGradientView)
self.addSubview(self.borderContainerView)
self.borderContainerView.layer.mask = self.borderMaskLayer
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func updateAnimations(size: CGSize) {
if self.backgroundView.layer.animation(forKey: "shimmer") != nil {
return
}
let animation = self.backgroundView.layer.makeAnimation(from: 0.0 as NSNumber, to: (size.width + self.gradientWidth + size.width * 0.2) as NSNumber, keyPath: "position.x", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: self.duration, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true)
animation.repeatCount = Float.infinity
self.backgroundView.layer.add(animation, forKey: "shimmer")
self.borderGradientView.layer.add(animation, forKey: "shimmer")
}
func update(size: CGSize, transition: Transition) {
if self.backgroundView.bounds.size != size {
self.backgroundView.layer.removeAllAnimations()
self.borderMaskLayer.fillColor = nil
self.borderMaskLayer.strokeColor = UIColor.white.cgColor
let lineWidth: CGFloat = 3.0
self.borderMaskLayer.lineWidth = lineWidth
self.borderMaskLayer.path = UIBezierPath(ovalIn: CGRect(origin: .zero, size: size)).cgPath
transition.setFrame(view: self.backgroundView, frame: CGRect(origin: CGPoint(x: -self.gradientWidth, y: 0.0), size: CGSize(width: self.gradientWidth, height: size.height)))
transition.setFrame(view: self.borderContainerView, frame: CGRect(origin: CGPoint(), size: size))
transition.setFrame(view: self.borderGradientView, frame: CGRect(origin: CGPoint(x: -self.gradientWidth, y: 0.0), size: CGSize(width: self.gradientWidth, height: size.height)))
}
self.updateAnimations(size: size)
}
}

View File

@@ -76,6 +76,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
let context: AccountContext
let cameraState: CameraState
let previewFrame: CGRect
let isPreviewing: Bool
let isMuted: Bool
let getController: () -> VideoMessageCameraScreen?
@@ -88,6 +89,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
init(
context: AccountContext,
cameraState: CameraState,
previewFrame: CGRect,
isPreviewing: Bool,
isMuted: Bool,
getController: @escaping () -> VideoMessageCameraScreen?,
@@ -99,6 +101,7 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
) {
self.context = context
self.cameraState = cameraState
self.previewFrame = previewFrame
self.isPreviewing = isPreviewing
self.isMuted = isMuted
self.getController = getController
@@ -113,6 +116,9 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
if lhs.context !== rhs.context {
return false
}
if lhs.previewFrame != rhs.previewFrame {
return false
}
if lhs.cameraState != rhs.cameraState {
return false
}
@@ -244,12 +250,12 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
let duration = initialDuration + recordingData.duration
if let self, let controller = self.getController() {
controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1))
if duration > 59.0 {
self.stopVideoRecording()
}
if isFirstRecording {
controller.node.setupLiveUpload(filePath: recordingData.filePath)
}
if duration > 59.5 {
controller.onStop()
}
}
}))
}
@@ -326,8 +332,13 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
showViewOnce = true
}
if let controller = component.getController(), !controller.viewOnceAvailable {
showViewOnce = false
if let controller = component.getController() {
if controller.isSendingImmediately || controller.scheduledLock {
showViewOnce = true
}
if !controller.viewOnceAvailable {
showViewOnce = false
}
}
if !component.isPreviewing {
@@ -444,14 +455,6 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
)
}
// var isVideoRecording = false
// if case .video = component.cameraState.mode {
// isVideoRecording = true
// } else if component.cameraState.recording != .none {
// isVideoRecording = true
// }
return availableSize
}
}
@@ -483,10 +486,13 @@ public class VideoMessageCameraScreen: ViewController {
fileprivate let containerView: UIView
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
fileprivate let previewContainerView: UIView
private var previewSnapshotView: UIView?
private var previewBlurView: BlurView
fileprivate var mainPreviewView: CameraSimplePreviewView
fileprivate var additionalPreviewView: CameraSimplePreviewView
private var progressView: RecordingProgressView
private let loadingView: LoadingEffectView
private var resultPreviewView: ResultPreviewView?
@@ -560,6 +566,11 @@ public class VideoMessageCameraScreen: ViewController {
self.progressView = RecordingProgressView(frame: .zero)
self.loadingView = LoadingEffectView(effectAlpha: 0.1, borderAlpha: 0.25, duration: 1.0)
self.previewBlurView = BlurView()
self.previewBlurView.isUserInteractionEnabled = false
if isDualCameraEnabled {
self.mainPreviewView.resetPlaceholder(front: false)
self.additionalPreviewView.resetPlaceholder(front: true)
@@ -589,17 +600,27 @@ public class VideoMessageCameraScreen: ViewController {
self.previewContainerView.addSubview(self.mainPreviewView)
self.previewContainerView.addSubview(self.additionalPreviewView)
self.previewContainerView.addSubview(self.progressView)
self.previewContainerView.addSubview(self.previewBlurView)
self.previewContainerView.addSubview(self.loadingView)
self.completion.connect { [weak self] result in
if let self {
self.addCaptureResult(result)
}
}
self.mainPreviewView.removePlaceholder(delay: 0.0)
if isDualCameraEnabled {
self.mainPreviewView.removePlaceholder(delay: 0.0)
}
self.withReadyCamera(isFirstTime: true, {
self.additionalPreviewView.removePlaceholder(delay: 0.35)
self.startRecording.invoke(Void())
if isDualCameraEnabled {
self.mainPreviewView.removePlaceholder(delay: 0.0)
}
self.loadingView.alpha = 0.0
self.additionalPreviewView.removePlaceholder(delay: 0.0)
Queue.mainQueue().after(0.15) {
self.startRecording.invoke(Void())
}
})
self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension())
@@ -744,10 +765,43 @@ public class VideoMessageCameraScreen: ViewController {
func resumeCameraCapture() {
if !self.mainPreviewView.isEnabled {
if let snapshotView = self.previewContainerView.snapshotView(afterScreenUpdates: false) {
self.previewContainerView.insertSubview(snapshotView, belowSubview: self.previewBlurView)
self.previewSnapshotView = snapshotView
}
self.mainPreviewView.isEnabled = true
self.additionalPreviewView.isEnabled = true
self.camera?.startCapture()
UIView.animate(withDuration: 0.25, animations: {
self.loadingView.alpha = 1.0
self.previewBlurView.effect = UIBlurEffect(style: .dark)
})
let action = { [weak self] in
guard let self else {
return
}
UIView.animate(withDuration: 0.4, animations: {
self.previewBlurView.effect = nil
self.previewSnapshotView?.alpha = 0.0
}, completion: { _ in
self.previewSnapshotView?.removeFromSuperview()
self.previewSnapshotView = nil
})
}
if #available(iOS 13.0, *) {
let _ = (self.mainPreviewView.isPreviewing
|> filter { $0 }
|> take(1)).startStandalone(next: { _ in
action()
})
} else {
Queue.mainQueue().after(1.0) {
action()
}
}
self.cameraIsActive = true
self.requestUpdateLayout(transition: .immediate)
}
@@ -776,10 +830,12 @@ public class VideoMessageCameraScreen: ViewController {
self.transitioningToPreview = false
let composition = composition(with: self.results)
controller.updatePreviewState({ _ in
return PreviewState(composition: composition, trimRange: nil, isMuted: true)
}, transition: .spring(duration: 0.4))
if !controller.isSendingImmediately {
let composition = composition(with: self.results)
controller.updatePreviewState({ _ in
return PreviewState(composition: composition, trimRange: nil, isMuted: true)
}, transition: .spring(duration: 0.4))
}
}
private func debugSaveResult(path: String) {
@@ -797,8 +853,16 @@ public class VideoMessageCameraScreen: ViewController {
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let result = super.hitTest(point, with: event)
if let resultPreviewView = self.resultPreviewView {
if resultPreviewView.bounds.contains(self.view.convert(point, to: resultPreviewView)) {
return resultPreviewView
}
}
if let controller = self.controller, let layout = self.validLayout {
if point.y > layout.size.height - controller.inputPanelFrame.height - 34.0 {
let insets = layout.insets(options: .input)
if point.y > layout.size.height - insets.bottom - controller.inputPanelFrame.height {
if layout.metrics.isTablet {
if point.x < layout.size.width * 0.33 {
return result
@@ -807,6 +871,7 @@ public class VideoMessageCameraScreen: ViewController {
return nil
}
}
return result
}
@@ -916,8 +981,6 @@ public class VideoMessageCameraScreen: ViewController {
let isFirstTime = self.validLayout == nil
self.validLayout = layout
// let isTablet = layout.metrics.isTablet
let environment = ViewControllerComponentContainer.Environment(
statusBarHeight: layout.statusBarHeight ?? 0.0,
navigationHeight: 0.0,
@@ -946,44 +1009,6 @@ public class VideoMessageCameraScreen: ViewController {
let backgroundFrame = CGRect(origin: .zero, size: CGSize(width: layout.size.width, height: controller.inputPanelFrame.minY))
let componentSize = self.componentHost.update(
transition: transition,
component: AnyComponent(
VideoMessageCameraScreenComponent(
context: self.context,
cameraState: self.cameraState,
isPreviewing: self.previewState != nil || self.transitioningToPreview,
isMuted: self.previewState?.isMuted ?? true,
getController: { [weak self] in
return self?.controller
},
present: { [weak self] c in
self?.controller?.present(c, in: .window(.root))
},
push: { [weak self] c in
self?.controller?.push(c)
},
startRecording: self.startRecording,
stopRecording: self.stopRecording,
completion: self.completion
)
),
environment: {
environment
},
forceUpdate: forceUpdate,
containerSize: backgroundFrame.size
)
if let componentView = self.componentHost.view {
if componentView.superview == nil {
self.containerView.addSubview(componentView)
componentView.clipsToBounds = true
}
let componentFrame = CGRect(origin: .zero, size: componentSize)
transition.setFrame(view: componentView, frame: componentFrame)
}
transition.setPosition(view: self.backgroundView, position: backgroundFrame.center)
transition.setBounds(view: self.backgroundView, bounds: CGRect(origin: .zero, size: backgroundFrame.size))
@@ -1015,6 +1040,49 @@ public class VideoMessageCameraScreen: ViewController {
transition.setAlpha(view: self.additionalPreviewView, alpha: self.cameraState.position == .front ? 1.0 : 0.0)
self.previewBlurView.frame = previewInnerFrame
self.previewSnapshotView?.frame = previewInnerFrame
self.loadingView.update(size: previewInnerFrame.size, transition: .immediate)
let componentSize = self.componentHost.update(
transition: transition,
component: AnyComponent(
VideoMessageCameraScreenComponent(
context: self.context,
cameraState: self.cameraState,
previewFrame: previewFrame,
isPreviewing: self.previewState != nil || self.transitioningToPreview,
isMuted: self.previewState?.isMuted ?? true,
getController: { [weak self] in
return self?.controller
},
present: { [weak self] c in
self?.controller?.present(c, in: .window(.root))
},
push: { [weak self] c in
self?.controller?.push(c)
},
startRecording: self.startRecording,
stopRecording: self.stopRecording,
completion: self.completion
)
),
environment: {
environment
},
forceUpdate: forceUpdate,
containerSize: backgroundFrame.size
)
if let componentView = self.componentHost.view {
if componentView.superview == nil {
self.containerView.addSubview(componentView)
componentView.clipsToBounds = true
}
let componentFrame = CGRect(origin: .zero, size: componentSize)
transition.setFrame(view: componentView, frame: componentFrame)
}
if let previewState = self.previewState {
if previewState.composition !== self.resultPreviewView?.composition {
self.resultPreviewView?.removeFromSuperview()
@@ -1106,9 +1174,6 @@ public class VideoMessageCameraScreen: ViewController {
private let durationValue = ValuePromise<TimeInterval>(0.0)
public let recordingStatus: RecordingStatus
public var onDismiss: (Bool) -> Void = { _ in
}
public var onStop: () -> Void = {
}
@@ -1251,6 +1316,7 @@ public class VideoMessageCameraScreen: ViewController {
super.displayNodeDidLoad()
}
fileprivate var isSendingImmediately = false
public func sendVideoRecording() {
if case .none = self.cameraState.recording, self.node.results.isEmpty {
self.completion(nil)
@@ -1259,6 +1325,7 @@ public class VideoMessageCameraScreen: ViewController {
if case .none = self.cameraState.recording {
} else {
self.isSendingImmediately = true
self.waitingForNextResult = true
self.node.stopRecording.invoke(Void())
}
@@ -1362,6 +1429,8 @@ public class VideoMessageCameraScreen: ViewController {
private var waitingForNextResult = false
public func stopVideoRecording() -> Bool {
self.node.dismissAllTooltips()
self.waitingForNextResult = true
self.node.transitioningToPreview = true
self.node.requestUpdateLayout(transition: .spring(duration: 0.4))
@@ -1376,6 +1445,7 @@ public class VideoMessageCameraScreen: ViewController {
public func lockVideoRecording() {
if case .none = self.cameraState.recording {
self.scheduledLock = true
self.node.requestUpdateLayout(transition: .spring(duration: 0.4))
} else {
self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4))
}
@@ -1465,3 +1535,46 @@ private func composition(with results: [VideoMessageCameraScreen.CaptureResult])
}
return composition
}
private class BlurView: UIVisualEffectView {
private func setup() {
for subview in self.subviews {
if subview.description.contains("VisualEffectSubview") {
subview.isHidden = true
}
}
if let sublayer = self.layer.sublayers?[0], let filters = sublayer.filters {
sublayer.backgroundColor = nil
sublayer.isOpaque = false
let allowedKeys: [String] = [
"gaussianBlur"
]
sublayer.filters = filters.filter { filter in
guard let filter = filter as? NSObject else {
return true
}
let filterName = String(describing: filter)
if !allowedKeys.contains(filterName) {
return false
}
return true
}
}
}
override var effect: UIVisualEffect? {
get {
return super.effect
}
set {
super.effect = newValue
self.setup()
}
}
override func didAddSubview(_ subview: UIView) {
super.didAddSubview(subview)
self.setup()
}
}

View File

@@ -6195,19 +6195,9 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
if let videoRecorder = videoRecorder {
strongSelf.recorderFeedback?.impact(.light)
videoRecorder.onDismiss = { [weak self] isCancelled in
self?.chatDisplayNode.updateRecordedMediaDeleted(isCancelled)
self?.beginMediaRecordingRequestId += 1
self?.lockMediaRecordingRequestId = nil
self?.videoRecorder.set(.single(nil))
}
videoRecorder.onStop = {
if let strongSelf = self {
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
}
})
strongSelf.dismissMediaRecorder(.pause)
}
}
strongSelf.present(videoRecorder, in: .window(.root))
@@ -15617,12 +15607,6 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
}
}))
// self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
// $0.updatedInputTextPanelState { panelState in
// return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
// }
// })
}
default:
self.recorderDataDisposable.set(nil)