[WIP] Stories

This commit is contained in:
Ali
2023-04-28 20:20:46 +04:00
parent cb1f40de1a
commit 7dd76ef329
27 changed files with 1062 additions and 190 deletions

View File

@@ -0,0 +1,204 @@
import Foundation
import UIKit
import Display
import ComponentFlow
import AppBundle
import TextFieldComponent
import BundleIconComponent
import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
import SwiftSignalKit
public final class MediaRecordingPanelComponent: Component {
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public let cancelFraction: CGFloat
public init(
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
cancelFraction: CGFloat
) {
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
self.cancelFraction = cancelFraction
}
public static func ==(lhs: MediaRecordingPanelComponent, rhs: MediaRecordingPanelComponent) -> Bool {
if lhs.audioRecorder !== rhs.audioRecorder {
return false
}
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
if lhs.cancelFraction != rhs.cancelFraction {
return false
}
return true
}
public final class View: UIView {
private var component: MediaRecordingPanelComponent?
private weak var state: EmptyComponentState?
private let indicatorView: UIImageView
private let cancelIconView: UIImageView
private let cancelText = ComponentView<Empty>()
private let timerText = ComponentView<Empty>()
private var timerTextDisposable: Disposable?
private var timerTextValue: String = "0:00,00"
override init(frame: CGRect) {
self.indicatorView = UIImageView()
self.cancelIconView = UIImageView()
super.init(frame: frame)
self.addSubview(self.indicatorView)
self.addSubview(self.cancelIconView)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.timerTextDisposable?.dispose()
}
func update(component: MediaRecordingPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
let previousComponent = self.component
self.component = component
self.state = state
if previousComponent?.audioRecorder !== component.audioRecorder || previousComponent?.videoRecordingStatus !== component.videoRecordingStatus {
self.timerTextDisposable?.dispose()
if let audioRecorder = component.audioRecorder {
var updateNow = false
self.timerTextDisposable = audioRecorder.recordingState.start(next: { [weak self] state in
Queue.mainQueue().async {
guard let self else {
return
}
switch state {
case .paused(let duration), .recording(let duration, _):
let currentAudioDurationSeconds = Int(duration)
let currentAudioDurationMilliseconds = Int(duration * 100.0) % 100
let text: String
if currentAudioDurationSeconds >= 60 * 60 {
text = String(format: "%d:%02d:%02d,%02d", currentAudioDurationSeconds / 3600, currentAudioDurationSeconds / 60 % 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
} else {
text = String(format: "%d:%02d,%02d", currentAudioDurationSeconds / 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
}
if self.timerTextValue != text {
self.timerTextValue = text
}
if updateNow {
self.state?.updated(transition: .immediate)
}
case .stopped:
break
}
}
})
updateNow = true
} else if let videoRecordingStatus = component.videoRecordingStatus {
var updateNow = false
self.timerTextDisposable = videoRecordingStatus.duration.start(next: { [weak self] duration in
Queue.mainQueue().async {
guard let self else {
return
}
let currentAudioDurationSeconds = Int(duration)
let currentAudioDurationMilliseconds = Int(duration * 100.0) % 100
let text: String
if currentAudioDurationSeconds >= 60 * 60 {
text = String(format: "%d:%02d:%02d,%02d", currentAudioDurationSeconds / 3600, currentAudioDurationSeconds / 60 % 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
} else {
text = String(format: "%d:%02d,%02d", currentAudioDurationSeconds / 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
}
if self.timerTextValue != text {
self.timerTextValue = text
}
if updateNow {
self.state?.updated(transition: .immediate)
}
}
})
updateNow = true
}
}
if self.indicatorView.image == nil {
self.indicatorView.image = generateFilledCircleImage(diameter: 10.0, color: UIColor(rgb: 0xFF3B30))
}
if let image = self.indicatorView.image {
transition.setFrame(view: self.indicatorView, frame: CGRect(origin: CGPoint(x: 10.0, y: floor((availableSize.height - image.size.height) * 0.5)), size: image.size))
}
let timerTextSize = self.timerText.update(
transition: .immediate,
component: AnyComponent(Text(text: self.timerTextValue, font: Font.regular(15.0), color: .white)),
environment: {},
containerSize: CGSize(width: 100.0, height: 100.0)
)
if let timerTextView = self.timerText.view {
if timerTextView.superview == nil {
self.addSubview(timerTextView)
timerTextView.layer.anchorPoint = CGPoint()
}
let timerTextFrame = CGRect(origin: CGPoint(x: 28.0, y: floor((availableSize.height - timerTextSize.height) * 0.5)), size: timerTextSize)
transition.setPosition(view: timerTextView, position: timerTextFrame.origin)
timerTextView.bounds = CGRect(origin: CGPoint(), size: timerTextFrame.size)
}
if self.cancelIconView.image == nil {
self.cancelIconView.image = UIImage(bundleImageName: "Chat/Input/Text/AudioRecordingCancelArrow")?.withRenderingMode(.alwaysTemplate)
}
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.3)
let cancelTextSize = self.cancelText.update(
transition: .immediate,
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: UIColor(white: 1.0, alpha: 0.3))),
environment: {},
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
)
var textFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - cancelTextSize.width) * 0.5), y: floor((availableSize.height - cancelTextSize.height) * 0.5)), size: cancelTextSize)
let bandingStart: CGFloat = 0.0
let bandedOffset = abs(component.cancelFraction) - bandingStart
let range: CGFloat = 300.0
let coefficient: CGFloat = 0.4
let mappedCancelFraction = bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
textFrame.origin.x -= mappedCancelFraction * 0.5
if let cancelTextView = self.cancelText.view {
if cancelTextView.superview == nil {
self.addSubview(cancelTextView)
}
transition.setFrame(view: cancelTextView, frame: textFrame)
}
if let image = self.cancelIconView.image {
transition.setFrame(view: self.cancelIconView, frame: CGRect(origin: CGPoint(x: textFrame.minX - 4.0 - image.size.width, y: textFrame.minY + floor((textFrame.height - image.size.height) * 0.5)), size: image.size))
}
return availableSize
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
}
}

View File

@@ -3,6 +3,10 @@ import UIKit
import Display
import ComponentFlow
import AppBundle
import ChatTextInputMediaRecordingButton
import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
public final class MessageInputActionButtonComponent: Component {
public enum Mode {
@@ -10,45 +14,83 @@ public final class MessageInputActionButtonComponent: Component {
case voiceInput
case videoInput
}
public enum Action {
case down
case up
}
public let mode: Mode
public let action: () -> Void
public let action: (Mode, Action, Bool) -> Void
public let switchMediaInputMode: () -> Void
public let updateMediaCancelFraction: (CGFloat) -> Void
public let context: AccountContext
public let theme: PresentationTheme
public let strings: PresentationStrings
public let presentController: (ViewController) -> Void
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public init(
mode: Mode,
action: @escaping () -> Void
action: @escaping (Mode, Action, Bool) -> Void,
switchMediaInputMode: @escaping () -> Void,
updateMediaCancelFraction: @escaping (CGFloat) -> Void,
context: AccountContext,
theme: PresentationTheme,
strings: PresentationStrings,
presentController: @escaping (ViewController) -> Void,
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?
) {
self.mode = mode
self.action = action
self.switchMediaInputMode = switchMediaInputMode
self.updateMediaCancelFraction = updateMediaCancelFraction
self.context = context
self.theme = theme
self.strings = strings
self.presentController = presentController
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
}
public static func ==(lhs: MessageInputActionButtonComponent, rhs: MessageInputActionButtonComponent) -> Bool {
if lhs.mode != rhs.mode {
return false
}
if lhs.context !== rhs.context {
return false
}
if lhs.theme !== rhs.theme {
return false
}
if lhs.strings !== rhs.strings {
return false
}
if lhs.audioRecorder !== rhs.audioRecorder {
return false
}
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
return true
}
public final class View: HighlightTrackingButton {
private let microphoneIconView: UIImageView
private let cameraIconView: UIImageView
private var micButton: ChatTextInputMediaRecordingButton?
private let sendIconView: UIImageView
private var component: MessageInputActionButtonComponent?
private weak var componentState: EmptyComponentState?
override init(frame: CGRect) {
self.microphoneIconView = UIImageView()
self.cameraIconView = UIImageView()
self.sendIconView = UIImageView()
super.init(frame: frame)
self.isMultipleTouchEnabled = false
self.addSubview(self.microphoneIconView)
self.addSubview(self.cameraIconView)
self.addSubview(self.sendIconView)
self.highligthedChanged = { [weak self] highlighted in
@@ -62,6 +104,7 @@ public final class MessageInputActionButtonComponent: Component {
transition.setSublayerTransform(view: self, transform: CATransform3DMakeScale(scale, scale, 1.0))
}
self.addTarget(self, action: #selector(self.touchDown), for: .touchDown)
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
}
@@ -69,8 +112,18 @@ public final class MessageInputActionButtonComponent: Component {
fatalError("init(coder:) has not been implemented")
}
@objc private func touchDown() {
guard let component = self.component else {
return
}
component.action(component.mode, .down, false)
}
@objc private func pressed() {
self.component?.action()
guard let component = self.component else {
return
}
component.action(component.mode, .up, false)
}
override public func continueTracking(_ touch: UITouch, with event: UIEvent?) -> Bool {
@@ -78,16 +131,57 @@ public final class MessageInputActionButtonComponent: Component {
}
func update(component: MessageInputActionButtonComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
let previousComponent = self.component
self.component = component
self.componentState = state
if self.microphoneIconView.image == nil {
self.microphoneIconView.image = UIImage(bundleImageName: "Chat/Input/Text/IconMicrophone")?.withRenderingMode(.alwaysTemplate)
self.microphoneIconView.tintColor = .white
}
if self.cameraIconView.image == nil {
self.cameraIconView.image = UIImage(bundleImageName: "Chat/Input/Text/IconVideo")?.withRenderingMode(.alwaysTemplate)
self.cameraIconView.tintColor = .white
let themeUpdated = previousComponent?.theme !== component.theme
if self.micButton == nil {
let micButton = ChatTextInputMediaRecordingButton(
context: component.context,
theme: component.theme,
strings: component.strings,
presentController: component.presentController
)
self.micButton = micButton
micButton.statusBarHost = component.context.sharedContext.mainWindow?.statusBarHost
self.addSubview(micButton)
micButton.beginRecording = { [weak self] in
guard let self, let component = self.component else {
return
}
switch component.mode {
case .voiceInput, .videoInput:
component.action(component.mode, .down, false)
default:
break
}
}
micButton.endRecording = { [weak self] sendMedia in
guard let self, let component = self.component else {
return
}
switch component.mode {
case .voiceInput, .videoInput:
component.action(component.mode, .up, sendMedia)
default:
break
}
}
micButton.switchMode = { [weak self] in
guard let self, let component = self.component else {
return
}
component.switchMediaInputMode()
}
micButton.updateCancelTranslation = { [weak self] in
guard let self, let micButton = self.micButton, let component = self.component else {
return
}
component.updateMediaCancelFraction(micButton.cancelTranslation)
}
}
if self.sendIconView.image == nil {
@@ -117,40 +211,55 @@ public final class MessageInputActionButtonComponent: Component {
var sendAlpha: CGFloat = 0.0
var microphoneAlpha: CGFloat = 0.0
var cameraAlpha: CGFloat = 0.0
switch component.mode {
case .send:
sendAlpha = 1.0
case .videoInput:
cameraAlpha = 1.0
case .voiceInput:
case .videoInput, .voiceInput:
microphoneAlpha = 1.0
}
transition.setAlpha(view: self.sendIconView, alpha: sendAlpha)
transition.setScale(view: self.sendIconView, scale: sendAlpha == 0.0 ? 0.01 : 1.0)
transition.setAlpha(view: self.cameraIconView, alpha: cameraAlpha)
transition.setScale(view: self.cameraIconView, scale: cameraAlpha == 0.0 ? 0.01 : 1.0)
transition.setAlpha(view: self.microphoneIconView, alpha: microphoneAlpha)
transition.setScale(view: self.microphoneIconView, scale: microphoneAlpha == 0.0 ? 0.01 : 1.0)
if let image = self.sendIconView.image {
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
transition.setPosition(view: self.sendIconView, position: iconFrame.center)
transition.setBounds(view: self.sendIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
}
if let image = self.cameraIconView.image {
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
transition.setPosition(view: self.cameraIconView, position: iconFrame.center)
transition.setBounds(view: self.cameraIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
}
if let image = self.microphoneIconView.image {
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
transition.setPosition(view: self.microphoneIconView, position: iconFrame.center)
transition.setBounds(view: self.microphoneIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
if let micButton = self.micButton {
if themeUpdated {
micButton.updateTheme(theme: component.theme)
}
let micButtonFrame = CGRect(origin: CGPoint(), size: availableSize)
let shouldLayoutMicButton = micButton.bounds.size != micButtonFrame.size
transition.setPosition(layer: micButton.layer, position: micButtonFrame.center)
transition.setBounds(layer: micButton.layer, bounds: CGRect(origin: CGPoint(), size: micButtonFrame.size))
if shouldLayoutMicButton {
micButton.layoutItems()
}
if previousComponent?.mode != component.mode {
switch component.mode {
case .send, .voiceInput:
micButton.updateMode(mode: .audio, animated: !transition.animation.isImmediate)
case .videoInput:
micButton.updateMode(mode: .video, animated: !transition.animation.isImmediate)
}
}
DispatchQueue.main.async { [weak self, weak micButton] in
guard let self, let component = self.component, let micButton else {
return
}
micButton.audioRecorder = component.audioRecorder
micButton.videoRecordingStatus = component.videoRecordingStatus
}
transition.setAlpha(view: micButton, alpha: microphoneAlpha)
transition.setScale(view: micButton, scale: microphoneAlpha == 0.0 ? 0.01 : 1.0)
}
return availableSize

View File

@@ -5,6 +5,9 @@ import ComponentFlow
import AppBundle
import TextFieldComponent
import BundleIconComponent
import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
public final class MessageInputPanelComponent: Component {
public final class ExternalState {
@@ -16,23 +19,59 @@ public final class MessageInputPanelComponent: Component {
}
public let externalState: ExternalState
public let context: AccountContext
public let theme: PresentationTheme
public let strings: PresentationStrings
public let presentController: (ViewController) -> Void
public let sendMessageAction: () -> Void
public let setMediaRecordingActive: (Bool, Bool, Bool) -> Void
public let attachmentAction: () -> Void
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public init(
externalState: ExternalState,
context: AccountContext,
theme: PresentationTheme,
strings: PresentationStrings,
presentController: @escaping (ViewController) -> Void,
sendMessageAction: @escaping () -> Void,
attachmentAction: @escaping () -> Void
setMediaRecordingActive: @escaping (Bool, Bool, Bool) -> Void,
attachmentAction: @escaping () -> Void,
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?
) {
self.externalState = externalState
self.context = context
self.theme = theme
self.strings = strings
self.presentController = presentController
self.sendMessageAction = sendMessageAction
self.setMediaRecordingActive = setMediaRecordingActive
self.attachmentAction = attachmentAction
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
}
public static func ==(lhs: MessageInputPanelComponent, rhs: MessageInputPanelComponent) -> Bool {
if lhs.externalState !== rhs.externalState {
return false
}
if lhs.context !== rhs.context {
return false
}
if lhs.theme !== rhs.theme {
return false
}
if lhs.strings !== rhs.strings {
return false
}
if lhs.audioRecorder !== rhs.audioRecorder {
return false
}
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
return true
}
@@ -50,7 +89,10 @@ public final class MessageInputPanelComponent: Component {
private let inputActionButton = ComponentView<Empty>()
private let stickerIconView: UIImageView
private var mediaRecordingPanel: ComponentView<Empty>?
private var currentMediaInputIsVoice: Bool = true
private var mediaCancelFraction: CGFloat = 0.0
private var component: MessageInputPanelComponent?
private weak var state: EmptyComponentState?
@@ -107,6 +149,7 @@ public final class MessageInputPanelComponent: Component {
self.stickerIconView.image = UIImage(bundleImageName: "Chat/Input/Text/AccessoryIconStickers")?.withRenderingMode(.alwaysTemplate)
self.stickerIconView.tintColor = .white
}
transition.setAlpha(view: self.stickerIconView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
let availableTextFieldSize = CGSize(width: availableSize.width - insets.left - insets.right, height: availableSize.height - insets.top - insets.bottom)
@@ -123,6 +166,7 @@ public final class MessageInputPanelComponent: Component {
let fieldFrame = CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: availableSize.width - insets.left - insets.right, height: textFieldSize.height))
transition.setFrame(view: self.fieldBackgroundView, frame: fieldFrame)
transition.setAlpha(view: self.fieldBackgroundView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
let rightFieldInset: CGFloat = 34.0
@@ -133,6 +177,7 @@ public final class MessageInputPanelComponent: Component {
self.addSubview(textFieldView)
}
transition.setFrame(view: textFieldView, frame: CGRect(origin: CGPoint(x: fieldFrame.minX, y: fieldFrame.maxY - textFieldSize.height), size: textFieldSize))
transition.setAlpha(view: textFieldView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
}
let attachmentButtonSize = self.attachmentButton.update(
@@ -157,26 +202,53 @@ public final class MessageInputPanelComponent: Component {
self.addSubview(attachmentButtonView)
}
transition.setFrame(view: attachmentButtonView, frame: CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5), y: size.height - baseHeight + floor((baseHeight - attachmentButtonSize.height) * 0.5)), size: attachmentButtonSize))
transition.setAlpha(view: attachmentButtonView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
}
let inputActionButtonSize = self.inputActionButton.update(
transition: transition,
component: AnyComponent(MessageInputActionButtonComponent(
mode: self.textFieldExternalState.hasText ? .send : (self.currentMediaInputIsVoice ? .voiceInput : .videoInput),
action: { [weak self] in
action: { [weak self] mode, action, sendAction in
guard let self else {
return
}
if case .text("") = self.getSendMessageInput() {
self.currentMediaInputIsVoice = !self.currentMediaInputIsVoice
self.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
HapticFeedback().impact()
} else {
self.component?.sendMessageAction()
switch mode {
case .send:
if case .up = action {
if case .text("") = self.getSendMessageInput() {
} else {
self.component?.sendMessageAction()
}
}
case .voiceInput, .videoInput:
self.component?.setMediaRecordingActive(action == .down, mode == .videoInput, sendAction)
}
}
},
switchMediaInputMode: { [weak self] in
guard let self else {
return
}
self.currentMediaInputIsVoice = !self.currentMediaInputIsVoice
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)))
},
updateMediaCancelFraction: { [weak self] mediaCancelFraction in
guard let self else {
return
}
if self.mediaCancelFraction != mediaCancelFraction {
self.mediaCancelFraction = mediaCancelFraction
self.state?.updated(transition: .immediate)
}
},
context: component.context,
theme: component.theme,
strings: component.strings,
presentController: component.presentController,
audioRecorder: component.audioRecorder,
videoRecordingStatus: component.videoRecordingStatus
)),
environment: {},
containerSize: CGSize(width: 33.0, height: 33.0)
@@ -199,6 +271,50 @@ public final class MessageInputPanelComponent: Component {
component.externalState.isEditing = self.textFieldExternalState.isEditing
component.externalState.hasText = self.textFieldExternalState.hasText
if component.audioRecorder != nil || component.videoRecordingStatus != nil {
let mediaRecordingPanel: ComponentView<Empty>
var mediaRecordingPanelTransition = transition
if let current = self.mediaRecordingPanel {
mediaRecordingPanel = current
} else {
mediaRecordingPanelTransition = .immediate
mediaRecordingPanel = ComponentView()
self.mediaRecordingPanel = mediaRecordingPanel
}
let _ = mediaRecordingPanel.update(
transition: mediaRecordingPanelTransition,
component: AnyComponent(MediaRecordingPanelComponent(
audioRecorder: component.audioRecorder,
videoRecordingStatus: component.videoRecordingStatus,
cancelFraction: self.mediaCancelFraction
)),
environment: {},
containerSize: size
)
if let mediaRecordingPanelView = mediaRecordingPanel.view {
var animateIn = false
if mediaRecordingPanelView.superview == nil {
animateIn = true
self.insertSubview(mediaRecordingPanelView, at: 0)
}
mediaRecordingPanelTransition.setFrame(view: mediaRecordingPanelView, frame: CGRect(origin: CGPoint(), size: size))
if animateIn && !transition.animation.isImmediate {
transition.animateAlpha(view: mediaRecordingPanelView, from: 0.0, to: 1.0)
}
}
} else {
if let mediaRecordingPanel = self.mediaRecordingPanel {
self.mediaRecordingPanel = nil
if let mediaRecordingPanelView = mediaRecordingPanel.view {
transition.setAlpha(view: mediaRecordingPanelView, alpha: 0.0, completion: { [weak mediaRecordingPanelView] _ in
mediaRecordingPanelView?.removeFromSuperview()
})
}
}
}
return size
}
}