mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-08-02 00:17:02 +00:00
Merge commit '3f6ac92df77e0f26115998191176f58ab38b060c'
This commit is contained in:
commit
ea35164d1f
@ -1022,6 +1022,16 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
|
|||||||
return rects
|
return rects
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func animateInstantVideoFromSnapshot(snapshotView: UIView, transition: CombinedTransition) {
|
||||||
|
for contentNode in self.contentNodes {
|
||||||
|
if let contentNode = contentNode as? ChatMessageInstantVideoBubbleContentNode {
|
||||||
|
snapshotView.frame = contentNode.interactiveVideoNode.view.convert(snapshotView.frame, from: self.view)
|
||||||
|
contentNode.interactiveVideoNode.animateFromSnapshot(snapshotView: snapshotView, transition: transition)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
override public func didLoad() {
|
override public func didLoad() {
|
||||||
super.didLoad()
|
super.didLoad()
|
||||||
|
|
||||||
|
@ -1778,9 +1778,16 @@ public class ChatMessageInteractiveInstantVideoNode: ASDisplayNode {
|
|||||||
if !self.animatedFadeIn {
|
if !self.animatedFadeIn {
|
||||||
self.animatedFadeIn = true
|
self.animatedFadeIn = true
|
||||||
self.dateAndStatusNode.layer.animateAlpha(from: 0.0, to: self.dateAndStatusNode.alpha, duration: 0.15, delay: 0.18)
|
self.dateAndStatusNode.layer.animateAlpha(from: 0.0, to: self.dateAndStatusNode.alpha, duration: 0.15, delay: 0.18)
|
||||||
|
|
||||||
if let durationNode = self.durationNode {
|
if let durationNode = self.durationNode {
|
||||||
durationNode.layer.animateAlpha(from: 0.0, to: durationNode.alpha, duration: 0.15, delay: 0.18)
|
durationNode.layer.animateAlpha(from: 0.0, to: durationNode.alpha, duration: 0.15, delay: 0.18)
|
||||||
}
|
}
|
||||||
|
if let durationBackgroundNode = self.durationBackgroundNode {
|
||||||
|
durationBackgroundNode.layer.animateAlpha(from: 0.0, to: durationBackgroundNode.alpha, duration: 0.15, delay: 0.18)
|
||||||
|
}
|
||||||
|
if let audioTranscriptionButton = self.audioTranscriptionButton {
|
||||||
|
audioTranscriptionButton.layer.animateAlpha(from: 0.0, to: audioTranscriptionButton.alpha, duration: 0.15, delay: 0.18)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,115 @@
|
|||||||
|
import Foundation
|
||||||
|
import UIKit
|
||||||
|
import HierarchyTrackingLayer
|
||||||
|
import ComponentFlow
|
||||||
|
import Display
|
||||||
|
|
||||||
|
private let shadowImage: UIImage? = {
|
||||||
|
UIImage(named: "Stories/PanelGradient")
|
||||||
|
}()
|
||||||
|
|
||||||
|
final class LoadingEffectView: UIView {
|
||||||
|
private let duration: Double
|
||||||
|
|
||||||
|
private let hierarchyTrackingLayer: HierarchyTrackingLayer
|
||||||
|
|
||||||
|
private let gradientWidth: CGFloat
|
||||||
|
private let backgroundView: UIImageView
|
||||||
|
|
||||||
|
private let borderGradientView: UIImageView
|
||||||
|
private let borderContainerView: UIView
|
||||||
|
let borderMaskLayer: SimpleShapeLayer
|
||||||
|
|
||||||
|
init(effectAlpha: CGFloat, borderAlpha: CGFloat, gradientWidth: CGFloat = 200.0, duration: Double) {
|
||||||
|
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
|
||||||
|
|
||||||
|
self.duration = duration
|
||||||
|
|
||||||
|
self.gradientWidth = gradientWidth
|
||||||
|
self.backgroundView = UIImageView()
|
||||||
|
|
||||||
|
self.borderGradientView = UIImageView()
|
||||||
|
self.borderContainerView = UIView()
|
||||||
|
self.borderMaskLayer = SimpleShapeLayer()
|
||||||
|
|
||||||
|
super.init(frame: .zero)
|
||||||
|
|
||||||
|
self.layer.addSublayer(self.hierarchyTrackingLayer)
|
||||||
|
self.hierarchyTrackingLayer.didEnterHierarchy = { [weak self] in
|
||||||
|
guard let self, self.bounds.width != 0.0 else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.updateAnimations(size: self.bounds.size)
|
||||||
|
}
|
||||||
|
|
||||||
|
let generateGradient: (CGFloat) -> UIImage? = { baseAlpha in
|
||||||
|
return generateImage(CGSize(width: self.gradientWidth, height: 16.0), opaque: false, scale: 1.0, rotatedContext: { size, context in
|
||||||
|
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||||
|
|
||||||
|
let foregroundColor = UIColor(white: 1.0, alpha: min(1.0, baseAlpha * 4.0))
|
||||||
|
|
||||||
|
if let shadowImage {
|
||||||
|
UIGraphicsPushContext(context)
|
||||||
|
|
||||||
|
for i in 0 ..< 2 {
|
||||||
|
let shadowFrame = CGRect(origin: CGPoint(x: CGFloat(i) * (size.width * 0.5), y: 0.0), size: CGSize(width: size.width * 0.5, height: size.height))
|
||||||
|
|
||||||
|
context.saveGState()
|
||||||
|
context.translateBy(x: shadowFrame.midX, y: shadowFrame.midY)
|
||||||
|
context.rotate(by: CGFloat(i == 0 ? 1.0 : -1.0) * CGFloat.pi * 0.5)
|
||||||
|
let adjustedRect = CGRect(origin: CGPoint(x: -shadowFrame.height * 0.5, y: -shadowFrame.width * 0.5), size: CGSize(width: shadowFrame.height, height: shadowFrame.width))
|
||||||
|
|
||||||
|
context.clip(to: adjustedRect, mask: shadowImage.cgImage!)
|
||||||
|
context.setFillColor(foregroundColor.cgColor)
|
||||||
|
context.fill(adjustedRect)
|
||||||
|
|
||||||
|
context.restoreGState()
|
||||||
|
}
|
||||||
|
|
||||||
|
UIGraphicsPopContext()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
self.backgroundView.image = generateGradient(effectAlpha)
|
||||||
|
self.addSubview(self.backgroundView)
|
||||||
|
|
||||||
|
self.borderGradientView.image = generateGradient(borderAlpha)
|
||||||
|
self.borderContainerView.addSubview(self.borderGradientView)
|
||||||
|
self.addSubview(self.borderContainerView)
|
||||||
|
self.borderContainerView.layer.mask = self.borderMaskLayer
|
||||||
|
}
|
||||||
|
|
||||||
|
required init?(coder: NSCoder) {
|
||||||
|
fatalError("init(coder:) has not been implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
private func updateAnimations(size: CGSize) {
|
||||||
|
if self.backgroundView.layer.animation(forKey: "shimmer") != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let animation = self.backgroundView.layer.makeAnimation(from: 0.0 as NSNumber, to: (size.width + self.gradientWidth + size.width * 0.2) as NSNumber, keyPath: "position.x", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: self.duration, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true)
|
||||||
|
animation.repeatCount = Float.infinity
|
||||||
|
self.backgroundView.layer.add(animation, forKey: "shimmer")
|
||||||
|
self.borderGradientView.layer.add(animation, forKey: "shimmer")
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(size: CGSize, transition: Transition) {
|
||||||
|
if self.backgroundView.bounds.size != size {
|
||||||
|
self.backgroundView.layer.removeAllAnimations()
|
||||||
|
|
||||||
|
self.borderMaskLayer.fillColor = nil
|
||||||
|
self.borderMaskLayer.strokeColor = UIColor.white.cgColor
|
||||||
|
let lineWidth: CGFloat = 3.0
|
||||||
|
self.borderMaskLayer.lineWidth = lineWidth
|
||||||
|
self.borderMaskLayer.path = UIBezierPath(ovalIn: CGRect(origin: .zero, size: size)).cgPath
|
||||||
|
|
||||||
|
transition.setFrame(view: self.backgroundView, frame: CGRect(origin: CGPoint(x: -self.gradientWidth, y: 0.0), size: CGSize(width: self.gradientWidth, height: size.height)))
|
||||||
|
|
||||||
|
transition.setFrame(view: self.borderContainerView, frame: CGRect(origin: CGPoint(), size: size))
|
||||||
|
transition.setFrame(view: self.borderGradientView, frame: CGRect(origin: CGPoint(x: -self.gradientWidth, y: 0.0), size: CGSize(width: self.gradientWidth, height: size.height)))
|
||||||
|
}
|
||||||
|
|
||||||
|
self.updateAnimations(size: size)
|
||||||
|
}
|
||||||
|
}
|
@ -76,8 +76,10 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
|
|
||||||
let context: AccountContext
|
let context: AccountContext
|
||||||
let cameraState: CameraState
|
let cameraState: CameraState
|
||||||
|
let previewFrame: CGRect
|
||||||
let isPreviewing: Bool
|
let isPreviewing: Bool
|
||||||
let isMuted: Bool
|
let isMuted: Bool
|
||||||
|
let totalDuration: Double
|
||||||
let getController: () -> VideoMessageCameraScreen?
|
let getController: () -> VideoMessageCameraScreen?
|
||||||
let present: (ViewController) -> Void
|
let present: (ViewController) -> Void
|
||||||
let push: (ViewController) -> Void
|
let push: (ViewController) -> Void
|
||||||
@ -88,8 +90,10 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
init(
|
init(
|
||||||
context: AccountContext,
|
context: AccountContext,
|
||||||
cameraState: CameraState,
|
cameraState: CameraState,
|
||||||
|
previewFrame: CGRect,
|
||||||
isPreviewing: Bool,
|
isPreviewing: Bool,
|
||||||
isMuted: Bool,
|
isMuted: Bool,
|
||||||
|
totalDuration: Double,
|
||||||
getController: @escaping () -> VideoMessageCameraScreen?,
|
getController: @escaping () -> VideoMessageCameraScreen?,
|
||||||
present: @escaping (ViewController) -> Void,
|
present: @escaping (ViewController) -> Void,
|
||||||
push: @escaping (ViewController) -> Void,
|
push: @escaping (ViewController) -> Void,
|
||||||
@ -99,8 +103,10 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
) {
|
) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.cameraState = cameraState
|
self.cameraState = cameraState
|
||||||
|
self.previewFrame = previewFrame
|
||||||
self.isPreviewing = isPreviewing
|
self.isPreviewing = isPreviewing
|
||||||
self.isMuted = isMuted
|
self.isMuted = isMuted
|
||||||
|
self.totalDuration = totalDuration
|
||||||
self.getController = getController
|
self.getController = getController
|
||||||
self.present = present
|
self.present = present
|
||||||
self.push = push
|
self.push = push
|
||||||
@ -113,6 +119,9 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
if lhs.context !== rhs.context {
|
if lhs.context !== rhs.context {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.previewFrame != rhs.previewFrame {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if lhs.cameraState != rhs.cameraState {
|
if lhs.cameraState != rhs.cameraState {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -122,6 +131,9 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
if lhs.isMuted != rhs.isMuted {
|
if lhs.isMuted != rhs.isMuted {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.totalDuration != rhs.totalDuration {
|
||||||
|
return false
|
||||||
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -244,12 +256,12 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
let duration = initialDuration + recordingData.duration
|
let duration = initialDuration + recordingData.duration
|
||||||
if let self, let controller = self.getController() {
|
if let self, let controller = self.getController() {
|
||||||
controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1))
|
controller.updateCameraState({ $0.updatedDuration(duration) }, transition: .easeInOut(duration: 0.1))
|
||||||
if duration > 59.0 {
|
|
||||||
self.stopVideoRecording()
|
|
||||||
}
|
|
||||||
if isFirstRecording {
|
if isFirstRecording {
|
||||||
controller.node.setupLiveUpload(filePath: recordingData.filePath)
|
controller.node.setupLiveUpload(filePath: recordingData.filePath)
|
||||||
}
|
}
|
||||||
|
if duration > 59.5 {
|
||||||
|
controller.onStop()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -305,6 +317,8 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
let viewOnceButton = Child(PlainButtonComponent.self)
|
let viewOnceButton = Child(PlainButtonComponent.self)
|
||||||
let recordMoreButton = Child(PlainButtonComponent.self)
|
let recordMoreButton = Child(PlainButtonComponent.self)
|
||||||
|
|
||||||
|
let muteIcon = Child(ZStack<Empty>.self)
|
||||||
|
|
||||||
return { context in
|
return { context in
|
||||||
let environment = context.environment[ViewControllerComponentContainer.Environment.self].value
|
let environment = context.environment[ViewControllerComponentContainer.Environment.self].value
|
||||||
let component = context.component
|
let component = context.component
|
||||||
@ -319,16 +333,24 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
var showRecordMore = false
|
var showRecordMore = false
|
||||||
if component.isPreviewing {
|
if component.isPreviewing {
|
||||||
showViewOnce = true
|
showViewOnce = true
|
||||||
|
if component.totalDuration < 59.0 {
|
||||||
showRecordMore = true
|
showRecordMore = true
|
||||||
|
|
||||||
viewOnceOffset = 67.0
|
viewOnceOffset = 67.0
|
||||||
|
} else {
|
||||||
|
viewOnceOffset = 14.0
|
||||||
|
}
|
||||||
} else if case .handsFree = component.cameraState.recording {
|
} else if case .handsFree = component.cameraState.recording {
|
||||||
showViewOnce = true
|
showViewOnce = true
|
||||||
}
|
}
|
||||||
|
|
||||||
if let controller = component.getController(), !controller.viewOnceAvailable {
|
if let controller = component.getController() {
|
||||||
|
if controller.isSendingImmediately || controller.scheduledLock {
|
||||||
|
showViewOnce = true
|
||||||
|
}
|
||||||
|
if !controller.viewOnceAvailable {
|
||||||
showViewOnce = false
|
showViewOnce = false
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if !component.isPreviewing {
|
if !component.isPreviewing {
|
||||||
let flipButton = flipButton.update(
|
let flipButton = flipButton.update(
|
||||||
@ -444,13 +466,34 @@ private final class VideoMessageCameraScreenComponent: CombinedComponent {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// var isVideoRecording = false
|
if component.isPreviewing && component.isMuted {
|
||||||
// if case .video = component.cameraState.mode {
|
let muteIcon = muteIcon.update(
|
||||||
// isVideoRecording = true
|
component: ZStack([
|
||||||
// } else if component.cameraState.recording != .none {
|
AnyComponentWithIdentity(
|
||||||
// isVideoRecording = true
|
id: "background",
|
||||||
// }
|
component: AnyComponent(
|
||||||
|
RoundedRectangle(color: UIColor(rgb: 0x000000, alpha: 0.3), cornerRadius: 24.0)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
AnyComponentWithIdentity(
|
||||||
|
id: "icon",
|
||||||
|
component: AnyComponent(
|
||||||
|
BundleIconComponent(
|
||||||
|
name: "Chat/Message/InstantVideoMute",
|
||||||
|
tintColor: .white
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]),
|
||||||
|
availableSize: CGSize(width: 24.0, height: 24.0),
|
||||||
|
transition: context.transition
|
||||||
|
)
|
||||||
|
context.add(muteIcon
|
||||||
|
.position(CGPoint(x: component.previewFrame.midX, y: component.previewFrame.maxY - 24.0))
|
||||||
|
.appear(.default(scale: true, alpha: true))
|
||||||
|
.disappear(.default(scale: true, alpha: true))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
return availableSize
|
return availableSize
|
||||||
}
|
}
|
||||||
@ -483,10 +526,13 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
fileprivate let containerView: UIView
|
fileprivate let containerView: UIView
|
||||||
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
|
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
|
||||||
fileprivate let previewContainerView: UIView
|
fileprivate let previewContainerView: UIView
|
||||||
|
private var previewSnapshotView: UIView?
|
||||||
|
private var previewBlurView: BlurView
|
||||||
|
|
||||||
fileprivate var mainPreviewView: CameraSimplePreviewView
|
fileprivate var mainPreviewView: CameraSimplePreviewView
|
||||||
fileprivate var additionalPreviewView: CameraSimplePreviewView
|
fileprivate var additionalPreviewView: CameraSimplePreviewView
|
||||||
private var progressView: RecordingProgressView
|
private var progressView: RecordingProgressView
|
||||||
|
private let loadingView: LoadingEffectView
|
||||||
|
|
||||||
private var resultPreviewView: ResultPreviewView?
|
private var resultPreviewView: ResultPreviewView?
|
||||||
|
|
||||||
@ -560,6 +606,11 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
|
|
||||||
self.progressView = RecordingProgressView(frame: .zero)
|
self.progressView = RecordingProgressView(frame: .zero)
|
||||||
|
|
||||||
|
self.loadingView = LoadingEffectView(effectAlpha: 0.1, borderAlpha: 0.25, duration: 1.0)
|
||||||
|
|
||||||
|
self.previewBlurView = BlurView()
|
||||||
|
self.previewBlurView.isUserInteractionEnabled = false
|
||||||
|
|
||||||
if isDualCameraEnabled {
|
if isDualCameraEnabled {
|
||||||
self.mainPreviewView.resetPlaceholder(front: false)
|
self.mainPreviewView.resetPlaceholder(front: false)
|
||||||
self.additionalPreviewView.resetPlaceholder(front: true)
|
self.additionalPreviewView.resetPlaceholder(front: true)
|
||||||
@ -589,17 +640,27 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
self.previewContainerView.addSubview(self.mainPreviewView)
|
self.previewContainerView.addSubview(self.mainPreviewView)
|
||||||
self.previewContainerView.addSubview(self.additionalPreviewView)
|
self.previewContainerView.addSubview(self.additionalPreviewView)
|
||||||
self.previewContainerView.addSubview(self.progressView)
|
self.previewContainerView.addSubview(self.progressView)
|
||||||
|
self.previewContainerView.addSubview(self.previewBlurView)
|
||||||
|
self.previewContainerView.addSubview(self.loadingView)
|
||||||
|
|
||||||
self.completion.connect { [weak self] result in
|
self.completion.connect { [weak self] result in
|
||||||
if let self {
|
if let self {
|
||||||
self.addCaptureResult(result)
|
self.addCaptureResult(result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if isDualCameraEnabled {
|
||||||
self.mainPreviewView.removePlaceholder(delay: 0.0)
|
self.mainPreviewView.removePlaceholder(delay: 0.0)
|
||||||
|
}
|
||||||
self.withReadyCamera(isFirstTime: true, {
|
self.withReadyCamera(isFirstTime: true, {
|
||||||
self.additionalPreviewView.removePlaceholder(delay: 0.35)
|
if isDualCameraEnabled {
|
||||||
|
self.mainPreviewView.removePlaceholder(delay: 0.0)
|
||||||
|
}
|
||||||
|
self.loadingView.alpha = 0.0
|
||||||
|
self.additionalPreviewView.removePlaceholder(delay: 0.0)
|
||||||
|
|
||||||
|
Queue.mainQueue().after(0.15) {
|
||||||
self.startRecording.invoke(Void())
|
self.startRecording.invoke(Void())
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension())
|
self.idleTimerExtensionDisposable.set(self.context.sharedContext.applicationBindings.pushIdleTimerExtension())
|
||||||
@ -744,10 +805,43 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
|
|
||||||
func resumeCameraCapture() {
|
func resumeCameraCapture() {
|
||||||
if !self.mainPreviewView.isEnabled {
|
if !self.mainPreviewView.isEnabled {
|
||||||
|
if let snapshotView = self.previewContainerView.snapshotView(afterScreenUpdates: false) {
|
||||||
|
self.previewContainerView.insertSubview(snapshotView, belowSubview: self.previewBlurView)
|
||||||
|
self.previewSnapshotView = snapshotView
|
||||||
|
}
|
||||||
self.mainPreviewView.isEnabled = true
|
self.mainPreviewView.isEnabled = true
|
||||||
self.additionalPreviewView.isEnabled = true
|
self.additionalPreviewView.isEnabled = true
|
||||||
self.camera?.startCapture()
|
self.camera?.startCapture()
|
||||||
|
|
||||||
|
UIView.animate(withDuration: 0.25, animations: {
|
||||||
|
self.loadingView.alpha = 1.0
|
||||||
|
self.previewBlurView.effect = UIBlurEffect(style: .dark)
|
||||||
|
})
|
||||||
|
|
||||||
|
let action = { [weak self] in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
UIView.animate(withDuration: 0.4, animations: {
|
||||||
|
self.previewBlurView.effect = nil
|
||||||
|
self.previewSnapshotView?.alpha = 0.0
|
||||||
|
}, completion: { _ in
|
||||||
|
self.previewSnapshotView?.removeFromSuperview()
|
||||||
|
self.previewSnapshotView = nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if #available(iOS 13.0, *) {
|
||||||
|
let _ = (self.mainPreviewView.isPreviewing
|
||||||
|
|> filter { $0 }
|
||||||
|
|> take(1)).startStandalone(next: { _ in
|
||||||
|
action()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Queue.mainQueue().after(1.0) {
|
||||||
|
action()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
self.cameraIsActive = true
|
self.cameraIsActive = true
|
||||||
self.requestUpdateLayout(transition: .immediate)
|
self.requestUpdateLayout(transition: .immediate)
|
||||||
}
|
}
|
||||||
@ -776,11 +870,13 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
|
|
||||||
self.transitioningToPreview = false
|
self.transitioningToPreview = false
|
||||||
|
|
||||||
|
if !controller.isSendingImmediately {
|
||||||
let composition = composition(with: self.results)
|
let composition = composition(with: self.results)
|
||||||
controller.updatePreviewState({ _ in
|
controller.updatePreviewState({ _ in
|
||||||
return PreviewState(composition: composition, trimRange: nil, isMuted: true)
|
return PreviewState(composition: composition, trimRange: nil, isMuted: true)
|
||||||
}, transition: .spring(duration: 0.4))
|
}, transition: .spring(duration: 0.4))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private func debugSaveResult(path: String) {
|
private func debugSaveResult(path: String) {
|
||||||
guard let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: .mappedIfSafe) else {
|
guard let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: .mappedIfSafe) else {
|
||||||
@ -797,8 +893,16 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
|
|
||||||
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||||
let result = super.hitTest(point, with: event)
|
let result = super.hitTest(point, with: event)
|
||||||
|
|
||||||
|
if let resultPreviewView = self.resultPreviewView {
|
||||||
|
if resultPreviewView.bounds.contains(self.view.convert(point, to: resultPreviewView)) {
|
||||||
|
return resultPreviewView
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let controller = self.controller, let layout = self.validLayout {
|
if let controller = self.controller, let layout = self.validLayout {
|
||||||
if point.y > layout.size.height - controller.inputPanelFrame.height - 34.0 {
|
let insets = layout.insets(options: .input)
|
||||||
|
if point.y > layout.size.height - insets.bottom - controller.inputPanelFrame.height {
|
||||||
if layout.metrics.isTablet {
|
if layout.metrics.isTablet {
|
||||||
if point.x < layout.size.width * 0.33 {
|
if point.x < layout.size.width * 0.33 {
|
||||||
return result
|
return result
|
||||||
@ -807,6 +911,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -916,8 +1021,6 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
let isFirstTime = self.validLayout == nil
|
let isFirstTime = self.validLayout == nil
|
||||||
self.validLayout = layout
|
self.validLayout = layout
|
||||||
|
|
||||||
// let isTablet = layout.metrics.isTablet
|
|
||||||
|
|
||||||
let environment = ViewControllerComponentContainer.Environment(
|
let environment = ViewControllerComponentContainer.Environment(
|
||||||
statusBarHeight: layout.statusBarHeight ?? 0.0,
|
statusBarHeight: layout.statusBarHeight ?? 0.0,
|
||||||
navigationHeight: 0.0,
|
navigationHeight: 0.0,
|
||||||
@ -944,44 +1047,9 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
self.didAppear()
|
self.didAppear()
|
||||||
}
|
}
|
||||||
|
|
||||||
let backgroundFrame = CGRect(origin: .zero, size: CGSize(width: layout.size.width, height: controller.inputPanelFrame.minY))
|
var backgroundFrame = CGRect(origin: .zero, size: CGSize(width: layout.size.width, height: controller.inputPanelFrame.minY))
|
||||||
|
if backgroundFrame.maxY < layout.size.height - 100.0 && (layout.inputHeight ?? 0.0).isZero {
|
||||||
let componentSize = self.componentHost.update(
|
backgroundFrame = CGRect(origin: .zero, size: CGSize(width: layout.size.width, height: layout.size.height - layout.intrinsicInsets.bottom - controller.inputPanelFrame.height))
|
||||||
transition: transition,
|
|
||||||
component: AnyComponent(
|
|
||||||
VideoMessageCameraScreenComponent(
|
|
||||||
context: self.context,
|
|
||||||
cameraState: self.cameraState,
|
|
||||||
isPreviewing: self.previewState != nil || self.transitioningToPreview,
|
|
||||||
isMuted: self.previewState?.isMuted ?? true,
|
|
||||||
getController: { [weak self] in
|
|
||||||
return self?.controller
|
|
||||||
},
|
|
||||||
present: { [weak self] c in
|
|
||||||
self?.controller?.present(c, in: .window(.root))
|
|
||||||
},
|
|
||||||
push: { [weak self] c in
|
|
||||||
self?.controller?.push(c)
|
|
||||||
},
|
|
||||||
startRecording: self.startRecording,
|
|
||||||
stopRecording: self.stopRecording,
|
|
||||||
completion: self.completion
|
|
||||||
)
|
|
||||||
),
|
|
||||||
environment: {
|
|
||||||
environment
|
|
||||||
},
|
|
||||||
forceUpdate: forceUpdate,
|
|
||||||
containerSize: backgroundFrame.size
|
|
||||||
)
|
|
||||||
if let componentView = self.componentHost.view {
|
|
||||||
if componentView.superview == nil {
|
|
||||||
self.containerView.addSubview(componentView)
|
|
||||||
componentView.clipsToBounds = true
|
|
||||||
}
|
|
||||||
|
|
||||||
let componentFrame = CGRect(origin: .zero, size: componentSize)
|
|
||||||
transition.setFrame(view: componentView, frame: componentFrame)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
transition.setPosition(view: self.backgroundView, position: backgroundFrame.center)
|
transition.setPosition(view: self.backgroundView, position: backgroundFrame.center)
|
||||||
@ -1015,6 +1083,50 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
|
|
||||||
transition.setAlpha(view: self.additionalPreviewView, alpha: self.cameraState.position == .front ? 1.0 : 0.0)
|
transition.setAlpha(view: self.additionalPreviewView, alpha: self.cameraState.position == .front ? 1.0 : 0.0)
|
||||||
|
|
||||||
|
self.previewBlurView.frame = previewInnerFrame
|
||||||
|
self.previewSnapshotView?.frame = previewInnerFrame
|
||||||
|
self.loadingView.update(size: previewInnerFrame.size, transition: .immediate)
|
||||||
|
|
||||||
|
let componentSize = self.componentHost.update(
|
||||||
|
transition: transition,
|
||||||
|
component: AnyComponent(
|
||||||
|
VideoMessageCameraScreenComponent(
|
||||||
|
context: self.context,
|
||||||
|
cameraState: self.cameraState,
|
||||||
|
previewFrame: previewFrame,
|
||||||
|
isPreviewing: self.previewState != nil || self.transitioningToPreview,
|
||||||
|
isMuted: self.previewState?.isMuted ?? true,
|
||||||
|
totalDuration: self.previewState?.composition.duration.seconds ?? 0.0,
|
||||||
|
getController: { [weak self] in
|
||||||
|
return self?.controller
|
||||||
|
},
|
||||||
|
present: { [weak self] c in
|
||||||
|
self?.controller?.present(c, in: .window(.root))
|
||||||
|
},
|
||||||
|
push: { [weak self] c in
|
||||||
|
self?.controller?.push(c)
|
||||||
|
},
|
||||||
|
startRecording: self.startRecording,
|
||||||
|
stopRecording: self.stopRecording,
|
||||||
|
completion: self.completion
|
||||||
|
)
|
||||||
|
),
|
||||||
|
environment: {
|
||||||
|
environment
|
||||||
|
},
|
||||||
|
forceUpdate: forceUpdate,
|
||||||
|
containerSize: backgroundFrame.size
|
||||||
|
)
|
||||||
|
if let componentView = self.componentHost.view {
|
||||||
|
if componentView.superview == nil {
|
||||||
|
self.containerView.addSubview(componentView)
|
||||||
|
componentView.clipsToBounds = true
|
||||||
|
}
|
||||||
|
|
||||||
|
let componentFrame = CGRect(origin: .zero, size: componentSize)
|
||||||
|
transition.setFrame(view: componentView, frame: componentFrame)
|
||||||
|
}
|
||||||
|
|
||||||
if let previewState = self.previewState {
|
if let previewState = self.previewState {
|
||||||
if previewState.composition !== self.resultPreviewView?.composition {
|
if previewState.composition !== self.resultPreviewView?.composition {
|
||||||
self.resultPreviewView?.removeFromSuperview()
|
self.resultPreviewView?.removeFromSuperview()
|
||||||
@ -1106,9 +1218,6 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
private let durationValue = ValuePromise<TimeInterval>(0.0)
|
private let durationValue = ValuePromise<TimeInterval>(0.0)
|
||||||
public let recordingStatus: RecordingStatus
|
public let recordingStatus: RecordingStatus
|
||||||
|
|
||||||
public var onDismiss: (Bool) -> Void = { _ in
|
|
||||||
}
|
|
||||||
|
|
||||||
public var onStop: () -> Void = {
|
public var onStop: () -> Void = {
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1251,6 +1360,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
super.displayNodeDidLoad()
|
super.displayNodeDidLoad()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fileprivate var isSendingImmediately = false
|
||||||
public func sendVideoRecording() {
|
public func sendVideoRecording() {
|
||||||
if case .none = self.cameraState.recording, self.node.results.isEmpty {
|
if case .none = self.cameraState.recording, self.node.results.isEmpty {
|
||||||
self.completion(nil)
|
self.completion(nil)
|
||||||
@ -1259,6 +1369,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
|
|
||||||
if case .none = self.cameraState.recording {
|
if case .none = self.cameraState.recording {
|
||||||
} else {
|
} else {
|
||||||
|
self.isSendingImmediately = true
|
||||||
self.waitingForNextResult = true
|
self.waitingForNextResult = true
|
||||||
self.node.stopRecording.invoke(Void())
|
self.node.stopRecording.invoke(Void())
|
||||||
}
|
}
|
||||||
@ -1362,6 +1473,8 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
|
|
||||||
private var waitingForNextResult = false
|
private var waitingForNextResult = false
|
||||||
public func stopVideoRecording() -> Bool {
|
public func stopVideoRecording() -> Bool {
|
||||||
|
self.node.dismissAllTooltips()
|
||||||
|
|
||||||
self.waitingForNextResult = true
|
self.waitingForNextResult = true
|
||||||
self.node.transitioningToPreview = true
|
self.node.transitioningToPreview = true
|
||||||
self.node.requestUpdateLayout(transition: .spring(duration: 0.4))
|
self.node.requestUpdateLayout(transition: .spring(duration: 0.4))
|
||||||
@ -1376,6 +1489,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
public func lockVideoRecording() {
|
public func lockVideoRecording() {
|
||||||
if case .none = self.cameraState.recording {
|
if case .none = self.cameraState.recording {
|
||||||
self.scheduledLock = true
|
self.scheduledLock = true
|
||||||
|
self.node.requestUpdateLayout(transition: .spring(duration: 0.4))
|
||||||
} else {
|
} else {
|
||||||
self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4))
|
self.updateCameraState({ $0.updatedRecording(.handsFree) }, transition: .spring(duration: 0.4))
|
||||||
}
|
}
|
||||||
@ -1396,7 +1510,7 @@ public class VideoMessageCameraScreen: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public func hideVideoSnapshot() {
|
public func hideVideoSnapshot() {
|
||||||
self.node.previewContainerView.alpha = 0.02
|
self.node.previewContainerView.isHidden = true
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) {
|
public func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) {
|
||||||
@ -1465,3 +1579,46 @@ private func composition(with results: [VideoMessageCameraScreen.CaptureResult])
|
|||||||
}
|
}
|
||||||
return composition
|
return composition
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private class BlurView: UIVisualEffectView {
|
||||||
|
private func setup() {
|
||||||
|
for subview in self.subviews {
|
||||||
|
if subview.description.contains("VisualEffectSubview") {
|
||||||
|
subview.isHidden = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let sublayer = self.layer.sublayers?[0], let filters = sublayer.filters {
|
||||||
|
sublayer.backgroundColor = nil
|
||||||
|
sublayer.isOpaque = false
|
||||||
|
let allowedKeys: [String] = [
|
||||||
|
"gaussianBlur"
|
||||||
|
]
|
||||||
|
sublayer.filters = filters.filter { filter in
|
||||||
|
guard let filter = filter as? NSObject else {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
let filterName = String(describing: filter)
|
||||||
|
if !allowedKeys.contains(filterName) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override var effect: UIVisualEffect? {
|
||||||
|
get {
|
||||||
|
return super.effect
|
||||||
|
}
|
||||||
|
set {
|
||||||
|
super.effect = newValue
|
||||||
|
self.setup()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override func didAddSubview(_ subview: UIView) {
|
||||||
|
super.didAddSubview(subview)
|
||||||
|
self.setup()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -6195,19 +6195,9 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
if let videoRecorder = videoRecorder {
|
if let videoRecorder = videoRecorder {
|
||||||
strongSelf.recorderFeedback?.impact(.light)
|
strongSelf.recorderFeedback?.impact(.light)
|
||||||
|
|
||||||
videoRecorder.onDismiss = { [weak self] isCancelled in
|
|
||||||
self?.chatDisplayNode.updateRecordedMediaDeleted(isCancelled)
|
|
||||||
self?.beginMediaRecordingRequestId += 1
|
|
||||||
self?.lockMediaRecordingRequestId = nil
|
|
||||||
self?.videoRecorder.set(.single(nil))
|
|
||||||
}
|
|
||||||
videoRecorder.onStop = {
|
videoRecorder.onStop = {
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
strongSelf.dismissMediaRecorder(.pause)
|
||||||
$0.updatedInputTextPanelState { panelState in
|
|
||||||
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
strongSelf.present(videoRecorder, in: .window(.root))
|
strongSelf.present(videoRecorder, in: .window(.root))
|
||||||
@ -15352,7 +15342,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
func deactivateRaiseGesture() {
|
func deactivateRaiseGesture() {
|
||||||
self.raiseToListenActivateRecordingTimer?.invalidate()
|
self.raiseToListenActivateRecordingTimer?.invalidate()
|
||||||
self.raiseToListenActivateRecordingTimer = nil
|
self.raiseToListenActivateRecordingTimer = nil
|
||||||
self.dismissMediaRecorder(.preview)
|
self.dismissMediaRecorder(.pause)
|
||||||
}
|
}
|
||||||
|
|
||||||
func requestAudioRecorder(beginWithTone: Bool) {
|
func requestAudioRecorder(beginWithTone: Bool) {
|
||||||
@ -15617,12 +15607,6 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
// self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
|
||||||
// $0.updatedInputTextPanelState { panelState in
|
|
||||||
// return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
|
|
||||||
// }
|
|
||||||
// })
|
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
self.recorderDataDisposable.set(nil)
|
self.recorderDataDisposable.set(nil)
|
||||||
@ -15687,6 +15671,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
self.videoRecorder.set(.single(nil))
|
self.videoRecorder.set(.single(nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.recorderDataDisposable.set(nil)
|
||||||
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
||||||
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||||
$0.updatedRecordedMediaPreview(nil)
|
$0.updatedRecordedMediaPreview(nil)
|
||||||
|
@ -635,7 +635,7 @@ public final class ChatMessageTransitionNodeImpl: ASDisplayNode, ChatMessageTran
|
|||||||
case let .videoMessage(videoMessage):
|
case let .videoMessage(videoMessage):
|
||||||
let combinedTransition = CombinedTransition(horizontal: .animated(duration: horizontalDuration, curve: ChatMessageTransitionNodeImpl.horizontalAnimationCurve), vertical: .animated(duration: verticalDuration, curve: ChatMessageTransitionNodeImpl.verticalAnimationCurve))
|
let combinedTransition = CombinedTransition(horizontal: .animated(duration: horizontalDuration, curve: ChatMessageTransitionNodeImpl.horizontalAnimationCurve), vertical: .animated(duration: verticalDuration, curve: ChatMessageTransitionNodeImpl.verticalAnimationCurve))
|
||||||
|
|
||||||
if let itemNode = self.itemNode as? ChatMessageInstantVideoItemNode {
|
if let itemNode = self.itemNode as? ChatMessageBubbleItemNode {
|
||||||
itemNode.cancelInsertionAnimations()
|
itemNode.cancelInsertionAnimations()
|
||||||
|
|
||||||
self.contextSourceNode.isExtractedToContextPreview = true
|
self.contextSourceNode.isExtractedToContextPreview = true
|
||||||
@ -659,7 +659,7 @@ public final class ChatMessageTransitionNodeImpl: ASDisplayNode, ChatMessageTran
|
|||||||
strongSelf.endAnimation()
|
strongSelf.endAnimation()
|
||||||
})
|
})
|
||||||
|
|
||||||
itemNode.animateFromSnapshot(snapshotView: videoMessage.view, transition: combinedTransition)
|
itemNode.animateInstantVideoFromSnapshot(snapshotView: videoMessage.view, transition: combinedTransition)
|
||||||
}
|
}
|
||||||
case let .mediaInput(mediaInput):
|
case let .mediaInput(mediaInput):
|
||||||
if let snapshotView = mediaInput.extractSnapshot() {
|
if let snapshotView = mediaInput.extractSnapshot() {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user