[WIP] Stories

This commit is contained in:
Ali
2023-05-19 18:10:12 +04:00
parent f72f2e3c60
commit 862cb0b366
42 changed files with 3617 additions and 925 deletions

View File

@@ -0,0 +1,357 @@
import Foundation
import UIKit
import Display
import ComponentFlow
import AppBundle
import TextFieldComponent
import BundleIconComponent
import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
import SwiftSignalKit
import LottieComponent
import HierarchyTrackingLayer
import ManagedAnimationNode
import AudioWaveformComponent
import UniversalMediaPlayer
private final class PlayPauseIconNode: ManagedAnimationNode {
enum State: Equatable {
case play
case pause
}
private let duration: Double = 0.35
private var iconState: State = .pause
init() {
super.init(size: CGSize(width: 28.0, height: 28.0))
self.enqueueState(.play, animated: false)
}
func enqueueState(_ state: State, animated: Bool) {
guard self.iconState != state else {
return
}
let previousState = self.iconState
self.iconState = state
switch previousState {
case .pause:
switch state {
case .play:
if animated {
self.trackTo(item: ManagedAnimationItem(source: .local("anim_playpause"), frames: .range(startFrame: 41, endFrame: 83), duration: self.duration))
} else {
self.trackTo(item: ManagedAnimationItem(source: .local("anim_playpause"), frames: .range(startFrame: 0, endFrame: 0), duration: 0.01))
}
case .pause:
break
}
case .play:
switch state {
case .pause:
if animated {
self.trackTo(item: ManagedAnimationItem(source: .local("anim_playpause"), frames: .range(startFrame: 0, endFrame: 41), duration: self.duration))
} else {
self.trackTo(item: ManagedAnimationItem(source: .local("anim_playpause"), frames: .range(startFrame: 41, endFrame: 41), duration: 0.01))
}
case .play:
break
}
}
}
}
private func textForDuration(seconds: Int32) -> String {
if seconds >= 60 * 60 {
return String(format: "%d:%02d:%02d", seconds / 3600, seconds / 60 % 60)
} else {
return String(format: "%d:%02d", seconds / 60, seconds % 60)
}
}
public final class MediaPreviewPanelComponent: Component {
public let context: AccountContext
public let theme: PresentationTheme
public let strings: PresentationStrings
public let mediaPreview: ChatRecordedMediaPreview
public let insets: UIEdgeInsets
public init(
context: AccountContext,
theme: PresentationTheme,
strings: PresentationStrings,
mediaPreview: ChatRecordedMediaPreview,
insets: UIEdgeInsets
) {
self.context = context
self.theme = theme
self.strings = strings
self.mediaPreview = mediaPreview
self.insets = insets
}
public static func ==(lhs: MediaPreviewPanelComponent, rhs: MediaPreviewPanelComponent) -> Bool {
if lhs.context !== rhs.context {
return false
}
if lhs.theme !== rhs.theme {
return false
}
if lhs.strings !== rhs.strings {
return false
}
if lhs.mediaPreview !== rhs.mediaPreview {
return false
}
if lhs.insets != rhs.insets {
return false
}
return true
}
public final class View: UIView {
private var component: MediaPreviewPanelComponent?
private weak var state: EmptyComponentState?
public let vibrancyContainer: UIView
private let trackingLayer: HierarchyTrackingLayer
private let indicator = ComponentView<Empty>()
private let timerFont: UIFont
private let timerText = ComponentView<Empty>()
private var timerTextValue: String = "0:00"
private let playPauseIconButton: HighlightableButton
private let playPauseIconNode: PlayPauseIconNode
private let waveform = ComponentView<Empty>()
private let vibrancyWaveform = ComponentView<Empty>()
private var mediaPlayer: MediaPlayer?
private let mediaPlayerStatus = Promise<MediaPlayerStatus?>(nil)
private var mediaPlayerStatusDisposable: Disposable?
override init(frame: CGRect) {
self.trackingLayer = HierarchyTrackingLayer()
self.timerFont = Font.with(size: 15.0, design: .camera, traits: .monospacedNumbers)
self.vibrancyContainer = UIView()
self.playPauseIconButton = HighlightableButton()
self.playPauseIconNode = PlayPauseIconNode()
self.playPauseIconNode.isUserInteractionEnabled = false
super.init(frame: frame)
self.layer.addSublayer(self.trackingLayer)
self.playPauseIconButton.addSubview(self.playPauseIconNode.view)
self.addSubview(self.playPauseIconButton)
self.playPauseIconButton.addTarget(self, action: #selector(self.playPauseButtonPressed), for: .touchUpInside)
self.mediaPlayerStatusDisposable = (self.mediaPlayerStatus.get()
|> deliverOnMainQueue).start(next: { [weak self] status in
guard let self else {
return
}
if let status {
switch status.status {
case .playing, .buffering(_, true, _, _):
self.playPauseIconNode.enqueueState(.play, animated: true)
default:
self.playPauseIconNode.enqueueState(.pause, animated: true)
}
//self.timerTextValue = textForDuration(seconds: component.mediaPreview.duration)
} else {
self.playPauseIconNode.enqueueState(.play, animated: true)
}
})
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.mediaPlayerStatusDisposable?.dispose()
}
public func animateIn() {
self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
}
public func animateOut(transition: Transition, completion: @escaping () -> Void) {
let vibrancyContainer = self.vibrancyContainer
transition.setAlpha(view: vibrancyContainer, alpha: 0.0, completion: { [weak vibrancyContainer] _ in
vibrancyContainer?.removeFromSuperview()
})
transition.setAlpha(view: self, alpha: 0.0, completion: { _ in
completion()
})
}
@objc private func playPauseButtonPressed() {
guard let component = self.component else {
return
}
if let mediaPlayer = self.mediaPlayer {
mediaPlayer.togglePlayPause()
} else {
let mediaManager = component.context.sharedContext.mediaManager
let mediaPlayer = MediaPlayer(
audioSessionManager: mediaManager.audioSession,
postbox: component.context.account.postbox,
userLocation: .other,
userContentType: .audio,
resourceReference: .standalone(resource: component.mediaPreview.resource),
streamable: .none,
video: false,
preferSoftwareDecoding: false,
enableSound: true,
fetchAutomatically: true
)
mediaPlayer.actionAtEnd = .action { [weak mediaPlayer] in
mediaPlayer?.seek(timestamp: 0.0)
}
self.mediaPlayer = mediaPlayer
self.mediaPlayerStatus.set(mediaPlayer.status |> map(Optional.init))
mediaPlayer.play()
}
}
func update(component: MediaPreviewPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
if self.component == nil {
self.timerTextValue = textForDuration(seconds: component.mediaPreview.duration)
}
self.component = component
self.state = state
let timerTextSize = self.timerText.update(
transition: .immediate,
component: AnyComponent(Text(text: self.timerTextValue, font: self.timerFont, color: .white)),
environment: {},
containerSize: CGSize(width: 100.0, height: 100.0)
)
if let timerTextView = self.timerText.view {
if timerTextView.superview == nil {
self.addSubview(timerTextView)
timerTextView.layer.anchorPoint = CGPoint(x: 1.0, y: 0.5)
}
let timerTextFrame = CGRect(origin: CGPoint(x: availableSize.width - component.insets.right - 8.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - timerTextSize.height) * 0.5)), size: timerTextSize)
transition.setPosition(view: timerTextView, position: CGPoint(x: timerTextFrame.minX, y: timerTextFrame.midY))
timerTextView.bounds = CGRect(origin: CGPoint(), size: timerTextFrame.size)
}
let playPauseSize = CGSize(width: 28.0, height: 28.0)
var playPauseFrame = CGRect(origin: CGPoint(x: component.insets.left + 8.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - playPauseSize.height) * 0.5)), size: playPauseSize)
let playPauseButtonFrame = playPauseFrame.insetBy(dx: -8.0, dy: -8.0)
playPauseFrame = playPauseFrame.offsetBy(dx: -playPauseButtonFrame.minX, dy: -playPauseButtonFrame.minY)
transition.setFrame(view: self.playPauseIconButton, frame: playPauseButtonFrame)
transition.setFrame(view: self.playPauseIconNode.view, frame: playPauseFrame)
let waveformFrame = CGRect(origin: CGPoint(x: component.insets.left + 47.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - 24.0) * 0.5)), size: CGSize(width: availableSize.width - component.insets.right - 47.0 - (component.insets.left + 47.0), height: 24.0))
let _ = self.waveform.update(
transition: transition,
component: AnyComponent(AudioWaveformComponent(
backgroundColor: UIColor.white.withAlphaComponent(0.1),
foregroundColor: UIColor.white.withAlphaComponent(1.0),
shimmerColor: nil,
style: .middle,
samples: component.mediaPreview.waveform.samples,
peak: component.mediaPreview.waveform.peak,
status: self.mediaPlayerStatus.get() |> map { value -> MediaPlayerStatus in
if let value {
return value
} else {
return MediaPlayerStatus(
generationTimestamp: 0.0,
duration: 0.0,
dimensions: CGSize(),
timestamp: 0.0,
baseRate: 1.0,
seekId: 0,
status: .paused,
soundEnabled: true
)
}
},
seek: { [weak self] timestamp in
guard let self, let mediaPlayer = self.mediaPlayer else {
return
}
mediaPlayer.seek(timestamp: timestamp)
},
updateIsSeeking: { [weak self] isSeeking in
guard let self, let mediaPlayer = self.mediaPlayer else {
return
}
if isSeeking {
mediaPlayer.pause()
} else {
mediaPlayer.play()
}
}
)),
environment: {},
containerSize: waveformFrame.size
)
let _ = self.vibrancyWaveform.update(
transition: transition,
component: AnyComponent(AudioWaveformComponent(
backgroundColor: .white,
foregroundColor: .white,
shimmerColor: nil,
style: .middle,
samples: component.mediaPreview.waveform.samples,
peak: component.mediaPreview.waveform.peak,
status: .complete(),
seek: nil,
updateIsSeeking: nil
)),
environment: {},
containerSize: waveformFrame.size
)
if let waveformView = self.waveform.view as? AudioWaveformComponent.View {
if waveformView.superview == nil {
waveformView.enableScrubbing = true
self.addSubview(waveformView)
}
transition.setFrame(view: waveformView, frame: waveformFrame)
}
if let vibrancyWaveformView = self.vibrancyWaveform.view {
if vibrancyWaveformView.superview == nil {
self.vibrancyContainer.addSubview(vibrancyWaveformView)
}
transition.setFrame(view: vibrancyWaveformView, frame: waveformFrame)
}
transition.setFrame(view: self.vibrancyContainer, frame: CGRect(origin: CGPoint(), size: availableSize))
return availableSize
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
}
}

View File

@@ -13,33 +13,60 @@ import LottieComponent
import HierarchyTrackingLayer
public final class MediaRecordingPanelComponent: Component {
public let theme: PresentationTheme
public let strings: PresentationStrings
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public let isRecordingLocked: Bool
public let cancelFraction: CGFloat
public let inputInsets: UIEdgeInsets
public let insets: UIEdgeInsets
public let cancelAction: () -> Void
public init(
theme: PresentationTheme,
strings: PresentationStrings,
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
isRecordingLocked: Bool,
cancelFraction: CGFloat,
insets: UIEdgeInsets
inputInsets: UIEdgeInsets,
insets: UIEdgeInsets,
cancelAction: @escaping () -> Void
) {
self.theme = theme
self.strings = strings
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
self.isRecordingLocked = isRecordingLocked
self.cancelFraction = cancelFraction
self.inputInsets = inputInsets
self.insets = insets
self.cancelAction = cancelAction
}
public static func ==(lhs: MediaRecordingPanelComponent, rhs: MediaRecordingPanelComponent) -> Bool {
if lhs.theme !== rhs.theme {
return false
}
if lhs.strings !== rhs.strings {
return false
}
if lhs.audioRecorder !== rhs.audioRecorder {
return false
}
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
if lhs.isRecordingLocked != rhs.isRecordingLocked {
return false
}
if lhs.cancelFraction != rhs.cancelFraction {
return false
}
if lhs.inputInsets != rhs.inputInsets {
return false
}
if lhs.insets != rhs.insets {
return false
}
@@ -50,13 +77,21 @@ public final class MediaRecordingPanelComponent: Component {
private var component: MediaRecordingPanelComponent?
private weak var state: EmptyComponentState?
public let vibrancyContainer: UIView
private let trackingLayer: HierarchyTrackingLayer
private let indicator = ComponentView<Empty>()
private let cancelContainerView: UIView
private let vibrancyCancelContainerView: UIView
private let cancelIconView: UIImageView
private let vibrancyCancelIconView: UIImageView
private let vibrancyCancelText = ComponentView<Empty>()
private let cancelText = ComponentView<Empty>()
private let vibrancyCancelButtonText = ComponentView<Empty>()
private let cancelButtonText = ComponentView<Empty>()
private var cancelButton: HighlightableButton?
private let timerFont: UIFont
private let timerText = ComponentView<Empty>()
@@ -68,16 +103,23 @@ public final class MediaRecordingPanelComponent: Component {
override init(frame: CGRect) {
self.trackingLayer = HierarchyTrackingLayer()
self.cancelIconView = UIImageView()
self.vibrancyCancelIconView = UIImageView()
self.timerFont = Font.with(size: 15.0, design: .camera, traits: .monospacedNumbers)
self.vibrancyContainer = UIView()
self.cancelContainerView = UIView()
self.vibrancyCancelContainerView = UIView()
super.init(frame: frame)
self.layer.addSublayer(self.trackingLayer)
self.cancelContainerView.addSubview(self.cancelIconView)
self.vibrancyCancelContainerView.addSubview(self.vibrancyCancelIconView)
self.vibrancyContainer.addSubview(self.vibrancyCancelContainerView)
self.addSubview(self.cancelContainerView)
self.trackingLayer.didEnterHierarchy = { [weak self] in
@@ -97,6 +139,10 @@ public final class MediaRecordingPanelComponent: Component {
}
private func updateAnimations() {
guard let component = self.component else {
return
}
if let indicatorView = self.indicator.view {
if indicatorView.layer.animation(forKey: "recording") == nil {
let animation = CAKeyframeAnimation(keyPath: "opacity")
@@ -109,7 +155,7 @@ public final class MediaRecordingPanelComponent: Component {
indicatorView.layer.add(animation, forKey: "recording")
}
}
if self.cancelContainerView.layer.animation(forKey: "recording") == nil {
if !component.isRecordingLocked, self.cancelContainerView.layer.animation(forKey: "recording") == nil {
let animation = CAKeyframeAnimation(keyPath: "position.x")
animation.values = [-5.0 as NSNumber, 5.0 as NSNumber, 0.0 as NSNumber]
animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber]
@@ -119,26 +165,40 @@ public final class MediaRecordingPanelComponent: Component {
animation.repeatCount = Float.infinity
self.cancelContainerView.layer.add(animation, forKey: "recording")
self.vibrancyCancelContainerView.layer.add(animation, forKey: "recording")
}
}
public func animateIn() {
guard let component = self.component else {
return
}
if let indicatorView = self.indicator.view {
indicatorView.layer.animatePosition(from: CGPoint(x: -20.0, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
indicatorView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
indicatorView.layer.animatePosition(from: CGPoint(x: component.inputInsets.left - component.insets.left, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
}
if let timerTextView = self.timerText.view {
timerTextView.layer.animatePosition(from: CGPoint(x: -20.0, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
timerTextView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25)
timerTextView.layer.animatePosition(from: CGPoint(x: component.inputInsets.left - component.insets.left, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
}
self.cancelContainerView.layer.animatePosition(from: CGPoint(x: self.bounds.width, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
self.vibrancyCancelContainerView.layer.animatePosition(from: CGPoint(x: self.bounds.width, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
}
public func animateOut(dismissRecording: Bool, completion: @escaping () -> Void) {
if let indicatorView = self.indicator.view as? LottieComponent.View {
if let _ = indicatorView.layer.animation(forKey: "recording") {
let fromAlpha = indicatorView.layer.presentation()?.opacity ?? indicatorView.layer.opacity
indicatorView.layer.removeAnimation(forKey: "recording")
indicatorView.layer.animateAlpha(from: CGFloat(fromAlpha), to: 1.0, duration: 0.2)
public func animateOut(transition: Transition, dismissRecording: Bool, completion: @escaping () -> Void) {
guard let component = self.component else {
completion()
return
}
if let indicatorView = self.indicator.view as? LottieComponent.View, let _ = indicatorView.layer.animation(forKey: "recording") {
let fromAlpha = indicatorView.layer.presentation()?.opacity ?? indicatorView.layer.opacity
indicatorView.layer.removeAnimation(forKey: "recording")
indicatorView.layer.animateAlpha(from: CGFloat(fromAlpha), to: 1.0, duration: 0.2)
}
if dismissRecording {
if let indicatorView = self.indicator.view as? LottieComponent.View {
indicatorView.playOnce(completion: { [weak indicatorView] in
if let indicatorView {
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
@@ -147,19 +207,35 @@ public final class MediaRecordingPanelComponent: Component {
completion()
})
} else {
completion()
}
} else {
completion()
if let indicatorView = self.indicator.view as? LottieComponent.View {
transition.setPosition(view: indicatorView, position: indicatorView.center.offsetBy(dx: component.inputInsets.left - component.insets.left, dy: 0.0))
transition.setAlpha(view: indicatorView, alpha: 0.0)
}
}
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
if let timerTextView = self.timerText.view {
transition.setAlpha(view: timerTextView, alpha: 0.0)
transition.setAlpha(view: timerTextView, alpha: 0.0, completion: { _ in
if !dismissRecording {
completion()
}
})
transition.setScale(view: timerTextView, scale: 0.001)
transition.setPosition(view: timerTextView, position: timerTextView.center.offsetBy(dx: component.inputInsets.left - component.insets.left, dy: 0.0))
}
transition.setAlpha(view: self.cancelContainerView, alpha: 0.0)
transition.setAlpha(view: self.vibrancyCancelContainerView, alpha: 0.0)
}
@objc private func cancelButtonPressed() {
guard let component = self.component else {
return
}
component.cancelAction()
}
func update(component: MediaRecordingPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
@@ -240,7 +316,7 @@ public final class MediaRecordingPanelComponent: Component {
if indicatorView.superview == nil {
self.addSubview(indicatorView)
}
transition.setFrame(view: indicatorView, frame: CGRect(origin: CGPoint(x: 3.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - indicatorSize.height) * 0.5)), size: indicatorSize))
transition.setFrame(view: indicatorView, frame: CGRect(origin: CGPoint(x: 5.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - indicatorSize.height) * 0.5)), size: indicatorSize))
}
let timerTextSize = self.timerText.update(
@@ -254,25 +330,48 @@ public final class MediaRecordingPanelComponent: Component {
self.addSubview(timerTextView)
timerTextView.layer.anchorPoint = CGPoint(x: 0.0, y: 0.5)
}
let timerTextFrame = CGRect(origin: CGPoint(x: 38.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - timerTextSize.height) * 0.5)), size: timerTextSize)
let timerTextFrame = CGRect(origin: CGPoint(x: 40.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - timerTextSize.height) * 0.5)), size: timerTextSize)
transition.setPosition(view: timerTextView, position: CGPoint(x: timerTextFrame.minX, y: timerTextFrame.midY))
timerTextView.bounds = CGRect(origin: CGPoint(), size: timerTextFrame.size)
}
if self.cancelIconView.image == nil {
self.cancelIconView.image = UIImage(bundleImageName: "Chat/Input/Text/AudioRecordingCancelArrow")?.withRenderingMode(.alwaysTemplate)
let image = UIImage(bundleImageName: "Chat/Input/Text/AudioRecordingCancelArrow")?.withRenderingMode(.alwaysTemplate)
self.cancelIconView.image = image
self.vibrancyCancelIconView.image = image
}
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.4)
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.3)
self.vibrancyCancelIconView.tintColor = .white
let cancelTextSize = self.cancelText.update(
transition: .immediate,
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: UIColor(white: 1.0, alpha: 0.4))),
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: UIColor(rgb: 0xffffff, alpha: 0.3))),
environment: {},
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
)
let _ = self.vibrancyCancelText.update(
transition: .immediate,
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: .white)),
environment: {},
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
)
let cancelButtonTextSize = self.cancelButtonText.update(
transition: .immediate,
component: AnyComponent(Text(text: "Cancel", font: Font.regular(17.0), color: .white)),
environment: {},
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
)
let _ = self.vibrancyCancelButtonText.update(
transition: .immediate,
component: AnyComponent(Text(text: "Cancel", font: Font.regular(17.0), color: .clear)),
environment: {},
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
)
var textFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - cancelTextSize.width) * 0.5), y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - cancelTextSize.height) * 0.5)), size: cancelTextSize)
let cancelButtonTextFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - cancelButtonTextSize.width) * 0.5), y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - cancelButtonTextSize.height) * 0.5)), size: cancelButtonTextSize)
let bandingStart: CGFloat = 0.0
let bandedOffset = abs(component.cancelFraction) - bandingStart
@@ -282,18 +381,105 @@ public final class MediaRecordingPanelComponent: Component {
textFrame.origin.x -= mappedCancelFraction * 0.5
if component.isRecordingLocked {
if self.cancelContainerView.layer.animation(forKey: "recording") != nil {
if let presentation = self.cancelContainerView.layer.presentation() {
transition.animatePosition(view: self.cancelContainerView, from: presentation.position, to: CGPoint())
transition.animatePosition(view: self.vibrancyCancelContainerView, from: presentation.position, to: CGPoint())
}
self.cancelContainerView.layer.removeAnimation(forKey: "recording")
self.vibrancyCancelContainerView.layer.removeAnimation(forKey: "recording")
}
}
if let cancelTextView = self.cancelText.view {
if cancelTextView.superview == nil {
self.cancelContainerView.addSubview(cancelTextView)
}
transition.setFrame(view: cancelTextView, frame: textFrame)
transition.setPosition(view: cancelTextView, position: textFrame.center)
transition.setBounds(view: cancelTextView, bounds: CGRect(origin: CGPoint(), size: textFrame.size))
transition.setAlpha(view: cancelTextView, alpha: !component.isRecordingLocked ? 1.0 : 0.0)
transition.setScale(view: cancelTextView, scale: !component.isRecordingLocked ? 1.0 : 0.001)
}
if let vibrancyCancelTextView = self.vibrancyCancelText.view {
if vibrancyCancelTextView.superview == nil {
self.vibrancyCancelContainerView.addSubview(vibrancyCancelTextView)
}
transition.setPosition(view: vibrancyCancelTextView, position: textFrame.center)
transition.setBounds(view: vibrancyCancelTextView, bounds: CGRect(origin: CGPoint(), size: textFrame.size))
transition.setAlpha(view: vibrancyCancelTextView, alpha: !component.isRecordingLocked ? 1.0 : 0.0)
transition.setScale(view: vibrancyCancelTextView, scale: !component.isRecordingLocked ? 1.0 : 0.001)
}
if let cancelButtonTextView = self.cancelButtonText.view {
if cancelButtonTextView.superview == nil {
self.cancelContainerView.addSubview(cancelButtonTextView)
}
transition.setPosition(view: cancelButtonTextView, position: cancelButtonTextFrame.center)
transition.setBounds(view: cancelButtonTextView, bounds: CGRect(origin: CGPoint(), size: cancelButtonTextFrame.size))
transition.setAlpha(view: cancelButtonTextView, alpha: component.isRecordingLocked ? 1.0 : 0.0)
transition.setScale(view: cancelButtonTextView, scale: component.isRecordingLocked ? 1.0 : 0.001)
}
if let vibrancyCancelButtonTextView = self.vibrancyCancelButtonText.view {
if vibrancyCancelButtonTextView.superview == nil {
self.vibrancyCancelContainerView.addSubview(vibrancyCancelButtonTextView)
}
transition.setPosition(view: vibrancyCancelButtonTextView, position: cancelButtonTextFrame.center)
transition.setBounds(view: vibrancyCancelButtonTextView, bounds: CGRect(origin: CGPoint(), size: cancelButtonTextFrame.size))
transition.setAlpha(view: vibrancyCancelButtonTextView, alpha: component.isRecordingLocked ? 1.0 : 0.0)
transition.setScale(view: vibrancyCancelButtonTextView, scale: component.isRecordingLocked ? 1.0 : 0.001)
}
if component.isRecordingLocked {
let cancelButton: HighlightableButton
if let current = self.cancelButton {
cancelButton = current
} else {
cancelButton = HighlightableButton()
self.cancelButton = cancelButton
self.addSubview(cancelButton)
cancelButton.highligthedChanged = { [weak self] highlighted in
guard let self else {
return
}
if highlighted {
self.cancelContainerView.alpha = 0.6
self.vibrancyCancelContainerView.alpha = 0.6
} else {
self.cancelContainerView.alpha = 1.0
self.vibrancyCancelContainerView.alpha = 1.0
self.cancelContainerView.layer.animateAlpha(from: 0.6, to: 1.0, duration: 0.2)
self.vibrancyCancelContainerView.layer.animateAlpha(from: 0.6, to: 1.0, duration: 0.2)
}
}
cancelButton.addTarget(self, action: #selector(self.cancelButtonPressed), for: .touchUpInside)
}
cancelButton.frame = CGRect(origin: CGPoint(x: cancelButtonTextFrame.minX - 8.0, y: 0.0), size: CGSize(width: cancelButtonTextFrame.width + 8.0 * 2.0, height: availableSize.height))
} else if let cancelButton = self.cancelButton {
cancelButton.removeFromSuperview()
}
if let image = self.cancelIconView.image {
transition.setFrame(view: self.cancelIconView, frame: CGRect(origin: CGPoint(x: textFrame.minX - 4.0 - image.size.width, y: textFrame.minY + floor((textFrame.height - image.size.height) * 0.5)), size: image.size))
let iconFrame = CGRect(origin: CGPoint(x: textFrame.minX - 4.0 - image.size.width, y: textFrame.minY + floor((textFrame.height - image.size.height) * 0.5)), size: image.size)
transition.setPosition(view: self.cancelIconView, position: iconFrame.center)
transition.setBounds(view: self.cancelIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
transition.setAlpha(view: self.cancelIconView, alpha: !component.isRecordingLocked ? 1.0 : 0.0)
transition.setScale(view: self.cancelIconView, scale: !component.isRecordingLocked ? 1.0 : 0.001)
transition.setPosition(view: self.vibrancyCancelIconView, position: iconFrame.center)
transition.setBounds(view: self.vibrancyCancelIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
transition.setAlpha(view: self.vibrancyCancelIconView, alpha: !component.isRecordingLocked ? 1.0 : 0.0)
transition.setScale(view: self.vibrancyCancelIconView, scale: !component.isRecordingLocked ? 1.0 : 0.001)
}
self.updateAnimations()
transition.setFrame(view: self.vibrancyContainer, frame: CGRect(origin: CGPoint(), size: availableSize))
return availableSize
}
}

View File

@@ -8,6 +8,19 @@ import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
private extension MessageInputActionButtonComponent.Mode {
var iconName: String? {
switch self {
case .delete:
return "Chat/Context Menu/Delete"
case .attach:
return "Chat/Input/Text/IconAttachment"
default:
return nil
}
}
}
public final class MessageInputActionButtonComponent: Component {
public enum Mode {
case none
@@ -15,6 +28,8 @@ public final class MessageInputActionButtonComponent: Component {
case apply
case voiceInput
case videoInput
case delete
case attach
}
public enum Action {
@@ -26,6 +41,8 @@ public final class MessageInputActionButtonComponent: Component {
public let action: (Mode, Action, Bool) -> Void
public let switchMediaInputMode: () -> Void
public let updateMediaCancelFraction: (CGFloat) -> Void
public let lockMediaRecording: () -> Void
public let stopAndPreviewMediaRecording: () -> Void
public let context: AccountContext
public let theme: PresentationTheme
public let strings: PresentationStrings
@@ -38,6 +55,8 @@ public final class MessageInputActionButtonComponent: Component {
action: @escaping (Mode, Action, Bool) -> Void,
switchMediaInputMode: @escaping () -> Void,
updateMediaCancelFraction: @escaping (CGFloat) -> Void,
lockMediaRecording: @escaping () -> Void,
stopAndPreviewMediaRecording: @escaping () -> Void,
context: AccountContext,
theme: PresentationTheme,
strings: PresentationStrings,
@@ -49,6 +68,8 @@ public final class MessageInputActionButtonComponent: Component {
self.action = action
self.switchMediaInputMode = switchMediaInputMode
self.updateMediaCancelFraction = updateMediaCancelFraction
self.lockMediaRecording = lockMediaRecording
self.stopAndPreviewMediaRecording = stopAndPreviewMediaRecording
self.context = context
self.theme = theme
self.strings = strings
@@ -162,6 +183,12 @@ public final class MessageInputActionButtonComponent: Component {
break
}
}
micButton.stopRecording = { [weak self] in
guard let self, let component = self.component else {
return
}
component.stopAndPreviewMediaRecording()
}
micButton.endRecording = { [weak self] sendMedia in
guard let self, let component = self.component else {
return
@@ -173,6 +200,12 @@ public final class MessageInputActionButtonComponent: Component {
break
}
}
micButton.updateLocked = { [weak self] _ in
guard let self, let component = self.component else {
return
}
component.lockMediaRecording()
}
micButton.switchMode = { [weak self] in
guard let self, let component = self.component else {
return
@@ -187,29 +220,33 @@ public final class MessageInputActionButtonComponent: Component {
}
}
if self.sendIconView.image == nil {
self.sendIconView.image = generateImage(CGSize(width: 33.0, height: 33.0), rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setFillColor(UIColor.white.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.copy)
context.setStrokeColor(UIColor.clear.cgColor)
context.setLineWidth(2.0)
context.setLineCap(.round)
context.setLineJoin(.round)
context.translateBy(x: 5.45, y: 4.0)
context.saveGState()
context.translateBy(x: 4.0, y: 4.0)
let _ = try? drawSvgPath(context, path: "M1,7 L7,1 L13,7 S ")
context.restoreGState()
context.saveGState()
context.translateBy(x: 10.0, y: 4.0)
let _ = try? drawSvgPath(context, path: "M1,16 V1 S ")
context.restoreGState()
})
if self.sendIconView.image == nil || previousComponent?.mode.iconName != component.mode.iconName {
if let iconName = component.mode.iconName {
self.sendIconView.image = generateTintedImage(image: UIImage(bundleImageName: iconName), color: .white)
} else {
self.sendIconView.image = generateImage(CGSize(width: 33.0, height: 33.0), rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setFillColor(UIColor.white.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.copy)
context.setStrokeColor(UIColor.clear.cgColor)
context.setLineWidth(2.0)
context.setLineCap(.round)
context.setLineJoin(.round)
context.translateBy(x: 5.45, y: 4.0)
context.saveGState()
context.translateBy(x: 4.0, y: 4.0)
let _ = try? drawSvgPath(context, path: "M1,7 L7,1 L13,7 S ")
context.restoreGState()
context.saveGState()
context.translateBy(x: 10.0, y: 4.0)
let _ = try? drawSvgPath(context, path: "M1,16 V1 S ")
context.restoreGState()
})
}
}
var sendAlpha: CGFloat = 0.0
@@ -218,7 +255,7 @@ public final class MessageInputActionButtonComponent: Component {
switch component.mode {
case .none:
break
case .send, .apply:
case .send, .apply, .attach, .delete:
sendAlpha = 1.0
case .videoInput, .voiceInput:
microphoneAlpha = 1.0
@@ -248,7 +285,7 @@ public final class MessageInputActionButtonComponent: Component {
if previousComponent?.mode != component.mode {
switch component.mode {
case .none, .send, .apply, .voiceInput:
case .none, .send, .apply, .voiceInput, .attach, .delete:
micButton.updateMode(mode: .audio, animated: !transition.animation.isImmediate)
case .videoInput:
micButton.updateMode(mode: .video, animated: !transition.animation.isImmediate)

View File

@@ -8,6 +8,7 @@ import BundleIconComponent
import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
import LottieComponent
public final class MessageInputPanelComponent: Component {
public enum Style {
@@ -31,10 +32,16 @@ public final class MessageInputPanelComponent: Component {
public let presentController: (ViewController) -> Void
public let sendMessageAction: () -> Void
public let setMediaRecordingActive: ((Bool, Bool, Bool) -> Void)?
public let lockMediaRecording: (() -> Void)?
public let stopAndPreviewMediaRecording: (() -> Void)?
public let discardMediaRecordingPreview: (() -> Void)?
public let attachmentAction: (() -> Void)?
public let reactionAction: ((UIView) -> Void)?
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public let isRecordingLocked: Bool
public let recordedAudioPreview: ChatRecordedMediaPreview?
public let wasRecordingDismissed: Bool
public let displayGradient: Bool
public let bottomInset: CGFloat
@@ -48,10 +55,16 @@ public final class MessageInputPanelComponent: Component {
presentController: @escaping (ViewController) -> Void,
sendMessageAction: @escaping () -> Void,
setMediaRecordingActive: ((Bool, Bool, Bool) -> Void)?,
lockMediaRecording: (() -> Void)?,
stopAndPreviewMediaRecording: (() -> Void)?,
discardMediaRecordingPreview: (() -> Void)?,
attachmentAction: (() -> Void)?,
reactionAction: ((UIView) -> Void)?,
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
isRecordingLocked: Bool,
recordedAudioPreview: ChatRecordedMediaPreview?,
wasRecordingDismissed: Bool,
displayGradient: Bool,
bottomInset: CGFloat
) {
@@ -64,10 +77,16 @@ public final class MessageInputPanelComponent: Component {
self.presentController = presentController
self.sendMessageAction = sendMessageAction
self.setMediaRecordingActive = setMediaRecordingActive
self.lockMediaRecording = lockMediaRecording
self.stopAndPreviewMediaRecording = stopAndPreviewMediaRecording
self.discardMediaRecordingPreview = discardMediaRecordingPreview
self.attachmentAction = attachmentAction
self.reactionAction = reactionAction
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
self.isRecordingLocked = isRecordingLocked
self.wasRecordingDismissed = wasRecordingDismissed
self.recordedAudioPreview = recordedAudioPreview
self.displayGradient = displayGradient
self.bottomInset = bottomInset
}
@@ -97,6 +116,15 @@ public final class MessageInputPanelComponent: Component {
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
if lhs.isRecordingLocked != rhs.isRecordingLocked {
return false
}
if lhs.wasRecordingDismissed != rhs.wasRecordingDismissed {
return false
}
if lhs.recordedAudioPreview !== rhs.recordedAudioPreview {
return false
}
if lhs.displayGradient != rhs.displayGradient {
return false
}
@@ -123,13 +151,17 @@ public final class MessageInputPanelComponent: Component {
private let textFieldExternalState = TextFieldComponent.ExternalState()
private let attachmentButton = ComponentView<Empty>()
private var deleteMediaPreviewButton: ComponentView<Empty>?
private let inputActionButton = ComponentView<Empty>()
private let stickerButton = ComponentView<Empty>()
private let reactionButton = ComponentView<Empty>()
private var mediaRecordingVibrancyContainer: UIView
private var mediaRecordingPanel: ComponentView<Empty>?
private weak var dismissingMediaRecordingPanel: UIView?
private var mediaPreviewPanel: ComponentView<Empty>?
private var currentMediaInputIsVoice: Bool = true
private var mediaCancelFraction: CGFloat = 0.0
@@ -145,6 +177,9 @@ public final class MessageInputPanelComponent: Component {
let vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect)
self.vibrancyEffectView = vibrancyEffectView
self.mediaRecordingVibrancyContainer = UIView()
self.vibrancyEffectView.contentView.addSubview(self.mediaRecordingVibrancyContainer)
self.gradientView = UIImageView()
self.bottomGradientView = UIView()
@@ -181,20 +216,31 @@ public final class MessageInputPanelComponent: Component {
}
}
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let result = super.hitTest(point, with: event)
return result
}
func update(component: MessageInputPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
var insets = UIEdgeInsets(top: 14.0, left: 7.0, bottom: 6.0, right: 7.0)
if let _ = component.attachmentAction {
insets.left = 41.0
}
if let _ = component.setMediaRecordingActive {
insets.right = 41.0
}
let mediaInsets = UIEdgeInsets(top: insets.top, left: 7.0, bottom: insets.bottom, right: insets.right)
let baseFieldHeight: CGFloat = 40.0
self.component = component
self.state = state
let hasMediaRecording = component.audioRecorder != nil || component.videoRecordingStatus != nil
let hasMediaEditing = component.recordedAudioPreview != nil
let topGradientHeight: CGFloat = 32.0
if self.gradientView.image == nil {
@@ -264,13 +310,20 @@ public final class MessageInputPanelComponent: Component {
}
let fieldFrame = CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: availableSize.width - insets.left - insets.right, height: textFieldSize.height))
transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldFrame.size))
transition.setAlpha(view: self.vibrancyEffectView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
transition.setFrame(view: self.fieldBackgroundView, frame: fieldFrame)
self.fieldBackgroundView.update(size: fieldFrame.size, cornerRadius: baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition)
let fieldBackgroundFrame: CGRect
if hasMediaRecording {
fieldBackgroundFrame = CGRect(origin: CGPoint(x: mediaInsets.left, y: insets.top), size: CGSize(width: availableSize.width - mediaInsets.left - mediaInsets.right, height: textFieldSize.height))
} else {
fieldBackgroundFrame = fieldFrame
}
let gradientFrame = CGRect(origin: CGPoint(x: 0.0, y: -topGradientHeight), size: CGSize(width: availableSize.width, height: topGradientHeight + fieldFrame.maxY + insets.bottom))
transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
transition.setFrame(view: self.fieldBackgroundView, frame: fieldBackgroundFrame)
self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition)
let gradientFrame = CGRect(origin: CGPoint(x: fieldBackgroundFrame.minX - fieldFrame.minX, y: -topGradientHeight), size: CGSize(width: availableSize.width - (fieldBackgroundFrame.minX - fieldFrame.minX), height: topGradientHeight + fieldBackgroundFrame.maxY + insets.bottom))
transition.setFrame(view: self.gradientView, frame: gradientFrame)
transition.setFrame(view: self.bottomGradientView, frame: CGRect(origin: CGPoint(x: 0.0, y: gradientFrame.maxY), size: CGSize(width: availableSize.width, height: component.bottomInset)))
transition.setAlpha(view: self.gradientView, alpha: component.displayGradient ? 1.0 : 0.0)
@@ -282,7 +335,7 @@ public final class MessageInputPanelComponent: Component {
} else {
placeholderOriginX = floorToScreenPixels((availableSize.width - placeholderSize.width) / 2.0)
}
let placeholderFrame = CGRect(origin: CGPoint(x: placeholderOriginX, y: floor((fieldFrame.height - placeholderSize.height) * 0.5)), size: placeholderSize)
let placeholderFrame = CGRect(origin: CGPoint(x: placeholderOriginX, y: floor((fieldBackgroundFrame.height - placeholderSize.height) * 0.5)), size: placeholderSize)
if let placeholderView = self.placeholder.view, let vibrancyPlaceholderView = self.vibrancyPlaceholder.view {
if vibrancyPlaceholderView.superview == nil {
vibrancyPlaceholderView.layer.anchorPoint = CGPoint()
@@ -298,6 +351,9 @@ public final class MessageInputPanelComponent: Component {
}
transition.setPosition(view: placeholderView, position: placeholderFrame.origin)
placeholderView.bounds = CGRect(origin: CGPoint(), size: placeholderFrame.size)
transition.setAlpha(view: placeholderView, alpha: (hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
transition.setAlpha(view: vibrancyPlaceholderView, alpha: (hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
}
let size = CGSize(width: availableSize.width, height: textFieldSize.height + insets.top + insets.bottom)
@@ -306,46 +362,151 @@ public final class MessageInputPanelComponent: Component {
if textFieldView.superview == nil {
self.addSubview(textFieldView)
}
transition.setFrame(view: textFieldView, frame: CGRect(origin: CGPoint(x: fieldFrame.minX, y: fieldFrame.maxY - textFieldSize.height), size: textFieldSize))
transition.setAlpha(view: textFieldView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
transition.setFrame(view: textFieldView, frame: CGRect(origin: CGPoint(x: fieldBackgroundFrame.minX, y: fieldBackgroundFrame.maxY - textFieldSize.height), size: textFieldSize))
transition.setAlpha(view: textFieldView, alpha: (hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
}
if let attachmentAction = component.attachmentAction {
if component.attachmentAction != nil {
let attachmentButtonMode: MessageInputActionButtonComponent.Mode
attachmentButtonMode = .attach
let attachmentButtonSize = self.attachmentButton.update(
transition: transition,
component: AnyComponent(Button(
content: AnyComponent(BundleIconComponent(
name: "Chat/Input/Text/IconAttachment",
tintColor: .white
)),
action: {
attachmentAction()
}
).minSize(CGSize(width: 41.0, height: baseFieldHeight))),
component: AnyComponent(MessageInputActionButtonComponent(
mode: attachmentButtonMode,
action: { [weak self] mode, action, sendAction in
guard let self, let component = self.component, case .up = action else {
return
}
switch mode {
case .delete:
break
case .attach:
component.attachmentAction?()
default:
break
}
},
switchMediaInputMode: {
},
updateMediaCancelFraction: { _ in
},
lockMediaRecording: {
},
stopAndPreviewMediaRecording: {
},
context: component.context,
theme: component.theme,
strings: component.strings,
presentController: component.presentController,
audioRecorder: nil,
videoRecordingStatus: nil
)),
environment: {},
containerSize: CGSize(width: 41.0, height: baseFieldHeight)
containerSize: CGSize(width: 33.0, height: baseFieldHeight)
)
if let attachmentButtonView = self.attachmentButton.view {
if attachmentButtonView.superview == nil {
self.addSubview(attachmentButtonView)
}
transition.setFrame(view: attachmentButtonView, frame: CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5), y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - attachmentButtonSize.height) * 0.5)), size: attachmentButtonSize))
let attachmentButtonFrame = CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5) + (fieldBackgroundFrame.minX - fieldFrame.minX), y: size.height - insets.bottom - baseFieldHeight + floor((baseFieldHeight - attachmentButtonSize.height) * 0.5)), size: attachmentButtonSize)
transition.setPosition(view: attachmentButtonView, position: attachmentButtonFrame.center)
transition.setBounds(view: attachmentButtonView, bounds: CGRect(origin: CGPoint(), size: attachmentButtonFrame.size))
transition.setAlpha(view: attachmentButtonView, alpha: (hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
transition.setScale(view: attachmentButtonView, scale: hasMediaEditing ? 0.001 : 1.0)
}
}
if hasMediaEditing {
let deleteMediaPreviewButton: ComponentView<Empty>
var deleteMediaPreviewButtonTransition = transition
if let current = self.deleteMediaPreviewButton {
deleteMediaPreviewButton = current
} else {
if !transition.animation.isImmediate {
deleteMediaPreviewButtonTransition = .immediate
}
deleteMediaPreviewButton = ComponentView()
self.deleteMediaPreviewButton = deleteMediaPreviewButton
}
let buttonSize = CGSize(width: 40.0, height: 40.0)
let deleteMediaPreviewButtonFrame = CGRect(origin: CGPoint(x: 1.0 + (fieldBackgroundFrame.minX - fieldFrame.minX), y: 3.0 + floor((size.height - buttonSize.height) * 0.5)), size: CGSize(width: buttonSize.width, height: buttonSize.height))
let _ = deleteMediaPreviewButton.update(
transition: deleteMediaPreviewButtonTransition,
component: AnyComponent(Button(
content: AnyComponent(LottieComponent(
content: LottieComponent.AppBundleContent(name: "BinBlue"),
color: .white,
startingPosition: .begin
)),
action: { [weak self] in
guard let self, let component = self.component else {
return
}
component.discardMediaRecordingPreview?()
}
).minSize(buttonSize)),
environment: {},
containerSize: buttonSize
)
if let deleteMediaPreviewButtonView = deleteMediaPreviewButton.view {
if deleteMediaPreviewButtonView.superview == nil {
self.addSubview(deleteMediaPreviewButtonView)
transition.animateAlpha(view: deleteMediaPreviewButtonView, from: 0.0, to: 1.0)
transition.animatePosition(view: deleteMediaPreviewButtonView, from: CGPoint(x: mediaInsets.left - insets.left, y: 0.0), to: CGPoint(), additive: true)
}
deleteMediaPreviewButtonTransition.setFrame(view: deleteMediaPreviewButtonView, frame: deleteMediaPreviewButtonFrame)
}
} else if let deleteMediaPreviewButton = self.deleteMediaPreviewButton {
self.deleteMediaPreviewButton = nil
if let deleteMediaPreviewButtonView = deleteMediaPreviewButton.view {
if component.wasRecordingDismissed, let deleteMediaPreviewButtonView = deleteMediaPreviewButtonView as? Button.View, let animationView = deleteMediaPreviewButtonView.content as? LottieComponent.View {
if let attachmentButtonView = self.attachmentButton.view {
attachmentButtonView.isHidden = true
}
animationView.playOnce(completion: { [weak self, weak deleteMediaPreviewButtonView] in
guard let self, let deleteMediaPreviewButtonView else {
return
}
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
transition.setAlpha(view: deleteMediaPreviewButtonView, alpha: 0.0, completion: { [weak deleteMediaPreviewButtonView] _ in
deleteMediaPreviewButtonView?.removeFromSuperview()
})
transition.setScale(view: deleteMediaPreviewButtonView, scale: 0.001)
if let attachmentButtonView = self.attachmentButton.view {
attachmentButtonView.isHidden = false
transition.animateAlpha(view: attachmentButtonView, from: 0.0, to: attachmentButtonView.alpha)
transition.animateScale(view: attachmentButtonView, from: 0.001, to: 1.0)
}
})
} else {
transition.setAlpha(view: deleteMediaPreviewButtonView, alpha: 0.0, completion: { [weak deleteMediaPreviewButtonView] _ in
deleteMediaPreviewButtonView?.removeFromSuperview()
})
transition.setScale(view: deleteMediaPreviewButtonView, scale: 0.001)
}
}
}
let inputActionButtonMode: MessageInputActionButtonComponent.Mode
if case .editor = component.style {
inputActionButtonMode = self.textFieldExternalState.isEditing ? .apply : .none
} else {
inputActionButtonMode = self.textFieldExternalState.hasText ? .send : (self.currentMediaInputIsVoice ? .voiceInput : .videoInput)
if hasMediaEditing {
inputActionButtonMode = .send
} else {
inputActionButtonMode = self.textFieldExternalState.hasText ? .send : (self.currentMediaInputIsVoice ? .voiceInput : .videoInput)
}
}
let inputActionButtonSize = self.inputActionButton.update(
transition: transition,
component: AnyComponent(MessageInputActionButtonComponent(
mode: inputActionButtonMode,
action: { [weak self] mode, action, sendAction in
guard let self else {
guard let self, let component = self.component else {
return
}
@@ -354,9 +515,11 @@ public final class MessageInputPanelComponent: Component {
break
case .send:
if case .up = action {
if case .text("") = self.getSendMessageInput() {
if component.recordedAudioPreview != nil {
component.sendMessageAction()
} else if case .text("") = self.getSendMessageInput() {
} else {
self.component?.sendMessageAction()
component.sendMessageAction()
}
}
case .apply:
@@ -364,7 +527,9 @@ public final class MessageInputPanelComponent: Component {
self.component?.sendMessageAction()
}
case .voiceInput, .videoInput:
self.component?.setMediaRecordingActive?(action == .down, mode == .videoInput, sendAction)
component.setMediaRecordingActive?(action == .down, mode == .videoInput, sendAction)
default:
break
}
},
switchMediaInputMode: { [weak self] in
@@ -383,6 +548,18 @@ public final class MessageInputPanelComponent: Component {
self.state?.updated(transition: .immediate)
}
},
lockMediaRecording: { [weak self] in
guard let self, let component = self.component else {
return
}
component.lockMediaRecording?()
},
stopAndPreviewMediaRecording: { [weak self] in
guard let self, let component = self.component else {
return
}
component.stopAndPreviewMediaRecording?()
},
context: component.context,
theme: component.theme,
strings: component.strings,
@@ -406,7 +583,7 @@ public final class MessageInputPanelComponent: Component {
transition.setFrame(view: inputActionButtonView, frame: CGRect(origin: CGPoint(x: inputActionButtonOriginX, y: size.height - insets.bottom - baseFieldHeight + floorToScreenPixels((baseFieldHeight - inputActionButtonSize.height) * 0.5)), size: inputActionButtonSize))
}
var fieldIconNextX = fieldFrame.maxX - 2.0
var fieldIconNextX = fieldBackgroundFrame.maxX - 2.0
if case .story = component.style {
let stickerButtonSize = self.stickerButton.update(
transition: transition,
@@ -429,12 +606,12 @@ public final class MessageInputPanelComponent: Component {
if stickerButtonView.superview == nil {
self.addSubview(stickerButtonView)
}
let stickerIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - stickerButtonSize.width, y: fieldFrame.minY + floor((fieldFrame.height - stickerButtonSize.height) * 0.5)), size: stickerButtonSize)
let stickerIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - stickerButtonSize.width, y: fieldBackgroundFrame.minY + floor((fieldBackgroundFrame.height - stickerButtonSize.height) * 0.5)), size: stickerButtonSize)
transition.setPosition(view: stickerButtonView, position: stickerIconFrame.center)
transition.setBounds(view: stickerButtonView, bounds: CGRect(origin: CGPoint(), size: stickerIconFrame.size))
transition.setAlpha(view: stickerButtonView, alpha: (self.textFieldExternalState.hasText || hasMediaRecording) ? 0.0 : 1.0)
transition.setScale(view: stickerButtonView, scale: self.textFieldExternalState.hasText ? 0.1 : 1.0)
transition.setAlpha(view: stickerButtonView, alpha: (self.textFieldExternalState.hasText || hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
transition.setScale(view: stickerButtonView, scale: (self.textFieldExternalState.hasText || hasMediaRecording || hasMediaEditing) ? 0.1 : 1.0)
fieldIconNextX -= stickerButtonSize.width + 2.0
}
@@ -462,19 +639,18 @@ public final class MessageInputPanelComponent: Component {
if reactionButtonView.superview == nil {
self.addSubview(reactionButtonView)
}
let reactionIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - reactionButtonSize.width, y: fieldFrame.minY + 1.0 + floor((fieldFrame.height - reactionButtonSize.height) * 0.5)), size: reactionButtonSize)
let reactionIconFrame = CGRect(origin: CGPoint(x: fieldIconNextX - reactionButtonSize.width, y: fieldBackgroundFrame.minY + 1.0 + floor((fieldBackgroundFrame.height - reactionButtonSize.height) * 0.5)), size: reactionButtonSize)
transition.setPosition(view: reactionButtonView, position: reactionIconFrame.center)
transition.setBounds(view: reactionButtonView, bounds: CGRect(origin: CGPoint(), size: reactionIconFrame.size))
transition.setAlpha(view: reactionButtonView, alpha: (self.textFieldExternalState.hasText || hasMediaRecording) ? 0.0 : 1.0)
transition.setScale(view: reactionButtonView, scale: self.textFieldExternalState.hasText ? 0.1 : 1.0)
transition.setAlpha(view: reactionButtonView, alpha: (self.textFieldExternalState.hasText || hasMediaRecording || hasMediaEditing) ? 0.0 : 1.0)
transition.setScale(view: reactionButtonView, scale: (self.textFieldExternalState.hasText || hasMediaRecording || hasMediaEditing) ? 0.1 : 1.0)
fieldIconNextX -= reactionButtonSize.width + 2.0
}
}
self.fieldBackgroundView.updateColor(color: self.textFieldExternalState.isEditing || component.style == .editor ? UIColor(white: 0.0, alpha: 0.5) : UIColor(white: 1.0, alpha: 0.09), transition: transition.containedViewLayoutTransition)
transition.setAlpha(view: self.fieldBackgroundView, alpha: hasMediaRecording ? 0.0 : 1.0)
if let placeholder = self.placeholder.view, let vibrancyPlaceholderView = self.vibrancyPlaceholder.view {
placeholder.isHidden = self.textFieldExternalState.hasText
vibrancyPlaceholderView.isHidden = placeholder.isHidden
@@ -483,7 +659,7 @@ public final class MessageInputPanelComponent: Component {
component.externalState.isEditing = self.textFieldExternalState.isEditing
component.externalState.hasText = self.textFieldExternalState.hasText
if component.audioRecorder != nil || component.videoRecordingStatus != nil {
if hasMediaRecording {
if let dismissingMediaRecordingPanel = self.dismissingMediaRecordingPanel {
self.dismissingMediaRecordingPanel = nil
transition.setAlpha(view: dismissingMediaRecordingPanel, alpha: 0.0, completion: { [weak dismissingMediaRecordingPanel] _ in
@@ -504,10 +680,20 @@ public final class MessageInputPanelComponent: Component {
let _ = mediaRecordingPanel.update(
transition: mediaRecordingPanelTransition,
component: AnyComponent(MediaRecordingPanelComponent(
theme: component.theme,
strings: component.strings,
audioRecorder: component.audioRecorder,
videoRecordingStatus: component.videoRecordingStatus,
isRecordingLocked: component.isRecordingLocked,
cancelFraction: self.mediaCancelFraction,
insets: insets
inputInsets: insets,
insets: mediaInsets,
cancelAction: { [weak self] in
guard let self, let component = self.component else {
return
}
component.setMediaRecordingActive?(false, false, false)
}
)),
environment: {},
containerSize: size
@@ -516,17 +702,18 @@ public final class MessageInputPanelComponent: Component {
var animateIn = false
if mediaRecordingPanelView.superview == nil {
animateIn = true
self.insertSubview(mediaRecordingPanelView, at: 0)
self.insertSubview(mediaRecordingPanelView, aboveSubview: self.fieldBackgroundView)
self.mediaRecordingVibrancyContainer.addSubview(mediaRecordingPanelView.vibrancyContainer)
}
mediaRecordingPanelTransition.setFrame(view: mediaRecordingPanelView, frame: CGRect(origin: CGPoint(), size: size))
transition.setFrame(view: self.mediaRecordingVibrancyContainer, frame: CGRect(origin: CGPoint(x: -fieldBackgroundFrame.minX, y: -fieldBackgroundFrame.minY), size: size))
if animateIn && !transition.animation.isImmediate {
mediaRecordingPanelView.animateIn()
}
}
if let attachmentButtonView = self.attachmentButton.view {
transition.setAlpha(view: attachmentButtonView, alpha: 0.0)
}
} else {
if let mediaRecordingPanel = self.mediaRecordingPanel {
self.mediaRecordingPanel = nil
@@ -541,7 +728,11 @@ public final class MessageInputPanelComponent: Component {
self.dismissingMediaRecordingPanel = mediaRecordingPanel.view
if let mediaRecordingPanelView = mediaRecordingPanel.view as? MediaRecordingPanelComponent.View {
mediaRecordingPanelView.animateOut(dismissRecording: true, completion: { [weak self, weak mediaRecordingPanelView] in
let wasRecordingDismissed = component.wasRecordingDismissed
if wasRecordingDismissed, let attachmentButtonView = self.attachmentButton.view {
attachmentButtonView.isHidden = true
}
mediaRecordingPanelView.animateOut(transition: transition, dismissRecording: wasRecordingDismissed, completion: { [weak self, weak mediaRecordingPanelView] in
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
if let mediaRecordingPanelView = mediaRecordingPanelView {
@@ -553,8 +744,10 @@ public final class MessageInputPanelComponent: Component {
guard let self else {
return
}
if self.mediaRecordingPanel == nil, let attachmentButtonView = self.attachmentButton.view {
transition.setAlpha(view: attachmentButtonView, alpha: 1.0)
if wasRecordingDismissed, self.mediaRecordingPanel == nil, let attachmentButtonView = self.attachmentButton.view {
attachmentButtonView.isHidden = false
transition.animateAlpha(view: attachmentButtonView, from: 0.0, to: attachmentButtonView.alpha)
transition.animateScale(view: attachmentButtonView, from: 0.001, to: 1.0)
}
})
@@ -562,6 +755,57 @@ public final class MessageInputPanelComponent: Component {
}
}
if let recordedAudioPreview = component.recordedAudioPreview {
let mediaPreviewPanel: ComponentView<Empty>
var mediaPreviewPanelTransition = transition
if let current = self.mediaPreviewPanel {
mediaPreviewPanel = current
} else {
mediaPreviewPanelTransition = .immediate
mediaPreviewPanel = ComponentView()
self.mediaPreviewPanel = mediaPreviewPanel
}
let _ = mediaPreviewPanel.update(
transition: mediaPreviewPanelTransition,
component: AnyComponent(MediaPreviewPanelComponent(
context: component.context,
theme: component.theme,
strings: component.strings,
mediaPreview: recordedAudioPreview,
insets: insets
)),
environment: {},
containerSize: size
)
if let mediaPreviewPanelView = mediaPreviewPanel.view as? MediaPreviewPanelComponent.View {
var animateIn = false
if mediaPreviewPanelView.superview == nil {
animateIn = true
self.insertSubview(mediaPreviewPanelView, aboveSubview: self.fieldBackgroundView)
self.mediaRecordingVibrancyContainer.addSubview(mediaPreviewPanelView.vibrancyContainer)
}
mediaPreviewPanelTransition.setFrame(view: mediaPreviewPanelView, frame: CGRect(origin: CGPoint(), size: size))
transition.setFrame(view: self.mediaRecordingVibrancyContainer, frame: CGRect(origin: CGPoint(x: -fieldBackgroundFrame.minX, y: -fieldBackgroundFrame.minY), size: size))
if animateIn && !transition.animation.isImmediate {
mediaPreviewPanelView.animateIn()
}
}
} else {
if let mediaPreviewPanel = self.mediaPreviewPanel {
self.mediaPreviewPanel = nil
if let mediaPreviewPanelView = mediaPreviewPanel.view as? MediaPreviewPanelComponent.View {
mediaPreviewPanelView.animateOut(transition: transition, completion: { [weak mediaPreviewPanelView] in
mediaPreviewPanelView?.removeFromSuperview()
})
}
}
}
return size
}
}