mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Apply patch
This commit is contained in:
parent
43c2d875a6
commit
5045b0a0ca
@ -224,6 +224,10 @@ public extension CALayer {
|
|||||||
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.scale", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
|
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.scale", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func animateScaleY(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
|
||||||
|
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.scale.y", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
|
||||||
|
}
|
||||||
|
|
||||||
func animateRotation(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
|
func animateRotation(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
|
||||||
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.rotation.z", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
|
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.rotation.z", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@
|
|||||||
@property (nonatomic) bool fadeDisabled;
|
@property (nonatomic) bool fadeDisabled;
|
||||||
|
|
||||||
- (void)animateIn;
|
- (void)animateIn;
|
||||||
- (void)animateOut;
|
- (void)animateOut:(BOOL)toSmallSize;
|
||||||
- (void)addMicLevel:(CGFloat)level;
|
- (void)addMicLevel:(CGFloat)level;
|
||||||
- (void)dismiss;
|
- (void)dismiss;
|
||||||
|
|
||||||
|
@ -500,7 +500,7 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)animateOut {
|
- (void)animateOut:(BOOL)toSmallSize {
|
||||||
_locked = false;
|
_locked = false;
|
||||||
_animatedIn = false;
|
_animatedIn = false;
|
||||||
_displayLink.paused = true;
|
_displayLink.paused = true;
|
||||||
@ -511,15 +511,20 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius
|
|||||||
_cancelTargetTranslation = 0;
|
_cancelTargetTranslation = 0;
|
||||||
_currentScale = 1.0f;
|
_currentScale = 1.0f;
|
||||||
[UIView animateWithDuration:0.18 animations:^{
|
[UIView animateWithDuration:0.18 animations:^{
|
||||||
_innerIconWrapperView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
|
||||||
_innerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
_innerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||||
_outerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
_outerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||||
_decoration.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
if (toSmallSize) {
|
||||||
|
_decoration.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||||
|
_decoration.alpha = 0.0;
|
||||||
|
_innerIconWrapperView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||||
|
_innerIconWrapperView.alpha = 0.0f;
|
||||||
|
} else {
|
||||||
|
_decoration.transform = CGAffineTransformMakeScale(0.33f, 0.33f);
|
||||||
|
_innerIconWrapperView.transform = CGAffineTransformMakeScale(0.4f, 0.4f);
|
||||||
|
}
|
||||||
_innerCircleView.alpha = 0.0f;
|
_innerCircleView.alpha = 0.0f;
|
||||||
_outerCircleView.alpha = 0.0f;
|
_outerCircleView.alpha = 0.0f;
|
||||||
_decoration.alpha = 0.0f;
|
|
||||||
self.iconView.alpha = 1.0f;
|
self.iconView.alpha = 1.0f;
|
||||||
_innerIconWrapperView.alpha = 0.0f;
|
|
||||||
|
|
||||||
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, 100.0f);
|
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, 100.0f);
|
||||||
transform = CGAffineTransformScale(transform, 0.2f, 0.2f);
|
transform = CGAffineTransformScale(transform, 0.2f, 0.2f);
|
||||||
|
File diff suppressed because one or more lines are too long
1
submodules/TelegramUI/Resources/Animations/BinBlue.json
Normal file
1
submodules/TelegramUI/Resources/Animations/BinBlue.json
Normal file
File diff suppressed because one or more lines are too long
1
submodules/TelegramUI/Resources/Animations/BinRed.json
Normal file
1
submodules/TelegramUI/Resources/Animations/BinRed.json
Normal file
File diff suppressed because one or more lines are too long
@ -4,13 +4,16 @@ import Display
|
|||||||
import AsyncDisplayKit
|
import AsyncDisplayKit
|
||||||
|
|
||||||
private final class AudioWaveformNodeParameters: NSObject {
|
private final class AudioWaveformNodeParameters: NSObject {
|
||||||
|
|
||||||
let waveform: AudioWaveform?
|
let waveform: AudioWaveform?
|
||||||
let color: UIColor?
|
let color: UIColor?
|
||||||
|
let gravity: AudioWaveformNode.Gravity?
|
||||||
let progress: CGFloat?
|
let progress: CGFloat?
|
||||||
|
|
||||||
init(waveform: AudioWaveform?, color: UIColor?, progress: CGFloat?) {
|
init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?) {
|
||||||
self.waveform = waveform
|
self.waveform = waveform
|
||||||
self.color = color
|
self.color = color
|
||||||
|
self.gravity = gravity
|
||||||
self.progress = progress
|
self.progress = progress
|
||||||
|
|
||||||
super.init()
|
super.init()
|
||||||
@ -18,8 +21,16 @@ private final class AudioWaveformNodeParameters: NSObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
final class AudioWaveformNode: ASDisplayNode {
|
final class AudioWaveformNode: ASDisplayNode {
|
||||||
|
|
||||||
|
enum Gravity {
|
||||||
|
|
||||||
|
case bottom
|
||||||
|
case center
|
||||||
|
}
|
||||||
|
|
||||||
private var waveform: AudioWaveform?
|
private var waveform: AudioWaveform?
|
||||||
private var color: UIColor?
|
private var color: UIColor?
|
||||||
|
private var gravity: Gravity?
|
||||||
|
|
||||||
var progress: CGFloat? {
|
var progress: CGFloat? {
|
||||||
didSet {
|
didSet {
|
||||||
@ -48,16 +59,17 @@ final class AudioWaveformNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func setup(color: UIColor, waveform: AudioWaveform?) {
|
func setup(color: UIColor, gravity: Gravity, waveform: AudioWaveform?) {
|
||||||
if self.color == nil || !self.color!.isEqual(color) || self.waveform != waveform {
|
if self.color == nil || !self.color!.isEqual(color) || self.waveform != waveform || self.gravity != gravity {
|
||||||
self.color = color
|
self.color = color
|
||||||
|
self.gravity = gravity
|
||||||
self.waveform = waveform
|
self.waveform = waveform
|
||||||
self.setNeedsDisplay()
|
self.setNeedsDisplay()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
override func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
||||||
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, progress: self.progress)
|
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
||||||
@ -128,12 +140,26 @@ final class AudioWaveformNode: ASDisplayNode {
|
|||||||
diff = sampleWidth * 1.5
|
diff = sampleWidth * 1.5
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let gravityMultiplierY: CGFloat = {
|
||||||
|
switch parameters.gravity ?? .bottom {
|
||||||
|
case .bottom:
|
||||||
|
return 1
|
||||||
|
case .center:
|
||||||
|
return 0.5
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
let adjustedSampleHeight = sampleHeight - diff
|
let adjustedSampleHeight = sampleHeight - diff
|
||||||
if adjustedSampleHeight.isLessThanOrEqualTo(sampleWidth) {
|
if adjustedSampleHeight.isLessThanOrEqualTo(sampleWidth) {
|
||||||
context.fillEllipse(in: CGRect(x: offset, y: size.height - sampleWidth, width: sampleWidth, height: sampleWidth))
|
context.fillEllipse(in: CGRect(x: offset, y: (size.height - sampleWidth) * gravityMultiplierY, width: sampleWidth, height: sampleWidth))
|
||||||
context.fill(CGRect(x: offset, y: size.height - halfSampleWidth, width: sampleWidth, height: halfSampleWidth))
|
context.fill(CGRect(x: offset, y: (size.height - halfSampleWidth) * gravityMultiplierY, width: sampleWidth, height: halfSampleWidth))
|
||||||
} else {
|
} else {
|
||||||
let adjustedRect = CGRect(x: offset, y: size.height - adjustedSampleHeight, width: sampleWidth, height: adjustedSampleHeight)
|
let adjustedRect = CGRect(
|
||||||
|
x: offset,
|
||||||
|
y: (size.height - adjustedSampleHeight) * gravityMultiplierY,
|
||||||
|
width: sampleWidth,
|
||||||
|
height: adjustedSampleHeight
|
||||||
|
)
|
||||||
context.fill(adjustedRect)
|
context.fill(adjustedRect)
|
||||||
context.fillEllipse(in: CGRect(x: adjustedRect.minX, y: adjustedRect.minY - halfSampleWidth, width: sampleWidth, height: sampleWidth))
|
context.fillEllipse(in: CGRect(x: adjustedRect.minX, y: adjustedRect.minY - halfSampleWidth, width: sampleWidth, height: sampleWidth))
|
||||||
context.fillEllipse(in: CGRect(x: adjustedRect.minX, y: adjustedRect.maxY - halfSampleWidth, width: sampleWidth, height: sampleWidth))
|
context.fillEllipse(in: CGRect(x: adjustedRect.minX, y: adjustedRect.maxY - halfSampleWidth, width: sampleWidth, height: sampleWidth))
|
||||||
|
@ -2493,11 +2493,15 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
|
|
||||||
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||||
$0.updatedInputTextPanelState { panelState in
|
$0.updatedInputTextPanelState { panelState in
|
||||||
|
let isLocked = strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId
|
||||||
if let audioRecorder = audioRecorder {
|
if let audioRecorder = audioRecorder {
|
||||||
if panelState.mediaRecordingState == nil {
|
if panelState.mediaRecordingState == nil {
|
||||||
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId))
|
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: isLocked))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
if case .waitingForPreview = panelState.mediaRecordingState {
|
||||||
|
return panelState
|
||||||
|
}
|
||||||
return panelState.withUpdatedMediaRecordingState(nil)
|
return panelState.withUpdatedMediaRecordingState(nil)
|
||||||
}
|
}
|
||||||
return panelState
|
return panelState
|
||||||
@ -7316,18 +7320,30 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
||||||
break
|
break
|
||||||
case .preview:
|
case .preview:
|
||||||
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||||
|
$0.updatedInputTextPanelState { panelState in
|
||||||
|
return panelState.withUpdatedMediaRecordingState(.waitingForPreview)
|
||||||
|
}
|
||||||
|
})
|
||||||
let _ = (audioRecorderValue.takenRecordedData() |> deliverOnMainQueue).start(next: { [weak self] data in
|
let _ = (audioRecorderValue.takenRecordedData() |> deliverOnMainQueue).start(next: { [weak self] data in
|
||||||
if let strongSelf = self, let data = data {
|
if let strongSelf = self, let data = data {
|
||||||
if data.duration < 0.5 {
|
if data.duration < 0.5 {
|
||||||
strongSelf.recorderFeedback?.error()
|
strongSelf.recorderFeedback?.error()
|
||||||
strongSelf.recorderFeedback = nil
|
strongSelf.recorderFeedback = nil
|
||||||
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||||
|
$0.updatedInputTextPanelState { panelState in
|
||||||
|
return panelState.withUpdatedMediaRecordingState(nil)
|
||||||
|
}
|
||||||
|
})
|
||||||
} else if let waveform = data.waveform {
|
} else if let waveform = data.waveform {
|
||||||
let resource = LocalFileMediaResource(fileId: arc4random64(), size: data.compressedData.count)
|
let resource = LocalFileMediaResource(fileId: arc4random64(), size: data.compressedData.count)
|
||||||
|
|
||||||
strongSelf.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData)
|
strongSelf.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData)
|
||||||
|
|
||||||
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||||
$0.updatedRecordedMediaPreview(ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5)))
|
$0.updatedRecordedMediaPreview(ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5))).updatedInputTextPanelState { panelState in
|
||||||
|
return panelState.withUpdatedMediaRecordingState(nil)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
strongSelf.recorderFeedback = nil
|
strongSelf.recorderFeedback = nil
|
||||||
}
|
}
|
||||||
@ -7425,12 +7441,14 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func deleteMediaRecording() {
|
private func deleteMediaRecording() {
|
||||||
|
self.chatDisplayNode.updateRecordedMediaDeleted(true)
|
||||||
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||||
$0.updatedRecordedMediaPreview(nil)
|
$0.updatedRecordedMediaPreview(nil)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
private func sendMediaRecording() {
|
private func sendMediaRecording() {
|
||||||
|
self.chatDisplayNode.updateRecordedMediaDeleted(false)
|
||||||
if let recordedMediaPreview = self.presentationInterfaceState.recordedMediaPreview {
|
if let recordedMediaPreview = self.presentationInterfaceState.recordedMediaPreview {
|
||||||
if let _ = self.presentationInterfaceState.slowmodeState, !self.presentationInterfaceState.isScheduledMessages {
|
if let _ = self.presentationInterfaceState.slowmodeState, !self.presentationInterfaceState.isScheduledMessages {
|
||||||
if let rect = self.chatDisplayNode.frameForInputActionButton() {
|
if let rect = self.chatDisplayNode.frameForInputActionButton() {
|
||||||
|
@ -1045,6 +1045,7 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
|
|||||||
var immediatelyLayoutInputPanelAndAnimateAppearance = false
|
var immediatelyLayoutInputPanelAndAnimateAppearance = false
|
||||||
var secondaryInputPanelSize: CGSize?
|
var secondaryInputPanelSize: CGSize?
|
||||||
var immediatelyLayoutSecondaryInputPanelAndAnimateAppearance = false
|
var immediatelyLayoutSecondaryInputPanelAndAnimateAppearance = false
|
||||||
|
var inputPanelNodeHandlesTransition = false
|
||||||
|
|
||||||
let inputPanelNodes = inputPanelForChatPresentationIntefaceState(self.chatPresentationInterfaceState, context: self.context, currentPanel: self.inputPanelNode, currentSecondaryPanel: self.secondaryInputPanelNode, textInputPanelNode: self.textInputPanelNode, interfaceInteraction: self.interfaceInteraction)
|
let inputPanelNodes = inputPanelForChatPresentationIntefaceState(self.chatPresentationInterfaceState, context: self.context, currentPanel: self.inputPanelNode, currentSecondaryPanel: self.secondaryInputPanelNode, textInputPanelNode: self.textInputPanelNode, interfaceInteraction: self.interfaceInteraction)
|
||||||
|
|
||||||
@ -1056,11 +1057,18 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
|
|||||||
}
|
}
|
||||||
let _ = inputTextPanelNode.updateLayout(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, maxHeight: layout.size.height - insets.top - insets.bottom, isSecondary: false, transition: transition, interfaceState: self.chatPresentationInterfaceState, metrics: layout.metrics)
|
let _ = inputTextPanelNode.updateLayout(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, maxHeight: layout.size.height - insets.top - insets.bottom, isSecondary: false, transition: transition, interfaceState: self.chatPresentationInterfaceState, metrics: layout.metrics)
|
||||||
}
|
}
|
||||||
dismissedInputPanelNode = self.inputPanelNode
|
if let prevInputPanelNode = self.inputPanelNode, inputPanelNode.canHandleTransition(from: prevInputPanelNode) {
|
||||||
let inputPanelHeight = inputPanelNode.updateLayout(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, maxHeight: layout.size.height - insets.top - insets.bottom, isSecondary: false, transition: inputPanelNode.supernode == nil ? .immediate : transition, interfaceState: self.chatPresentationInterfaceState, metrics: layout.metrics)
|
inputPanelNodeHandlesTransition = true
|
||||||
|
inputPanelNode.removeFromSupernode()
|
||||||
|
inputPanelNode.prevInputPanelNode = prevInputPanelNode
|
||||||
|
inputPanelNode.addSubnode(prevInputPanelNode)
|
||||||
|
} else {
|
||||||
|
dismissedInputPanelNode = self.inputPanelNode
|
||||||
|
}
|
||||||
|
let inputPanelHeight = inputPanelNode.updateLayout(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, maxHeight: layout.size.height - insets.top - insets.bottom, isSecondary: false, transition: inputPanelNode.supernode !== self ? .immediate : transition, interfaceState: self.chatPresentationInterfaceState, metrics: layout.metrics)
|
||||||
inputPanelSize = CGSize(width: layout.size.width, height: inputPanelHeight)
|
inputPanelSize = CGSize(width: layout.size.width, height: inputPanelHeight)
|
||||||
self.inputPanelNode = inputPanelNode
|
self.inputPanelNode = inputPanelNode
|
||||||
if inputPanelNode.supernode == nil {
|
if inputPanelNode.supernode !== self {
|
||||||
immediatelyLayoutInputPanelAndAnimateAppearance = true
|
immediatelyLayoutInputPanelAndAnimateAppearance = true
|
||||||
self.insertSubnode(inputPanelNode, aboveSubnode: self.inputPanelBackgroundNode)
|
self.insertSubnode(inputPanelNode, aboveSubnode: self.inputPanelBackgroundNode)
|
||||||
}
|
}
|
||||||
@ -1471,21 +1479,6 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
|
|||||||
transition.animatePositionAdditive(node: titleAccessoryPanelNode, offset: CGPoint(x: 0.0, y: -titleAccessoryPanelFrame.height))
|
transition.animatePositionAdditive(node: titleAccessoryPanelNode, offset: CGPoint(x: 0.0, y: -titleAccessoryPanelFrame.height))
|
||||||
}
|
}
|
||||||
|
|
||||||
if let inputPanelNode = self.inputPanelNode, let apparentInputPanelFrame = apparentInputPanelFrame, !inputPanelNode.frame.equalTo(apparentInputPanelFrame) {
|
|
||||||
if immediatelyLayoutInputPanelAndAnimateAppearance {
|
|
||||||
inputPanelNode.frame = apparentInputPanelFrame.offsetBy(dx: 0.0, dy: apparentInputPanelFrame.height + previousInputPanelBackgroundFrame.maxY - apparentInputBackgroundFrame.maxY)
|
|
||||||
inputPanelNode.alpha = 0.0
|
|
||||||
}
|
|
||||||
if !transition.isAnimated {
|
|
||||||
inputPanelNode.layer.removeAllAnimations()
|
|
||||||
if let currentDismissedInputPanelNode = self.currentDismissedInputPanelNode, inputPanelNode is ChatSearchInputPanelNode {
|
|
||||||
currentDismissedInputPanelNode.layer.removeAllAnimations()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
transition.updateFrame(node: inputPanelNode, frame: apparentInputPanelFrame)
|
|
||||||
transition.updateAlpha(node: inputPanelNode, alpha: 1.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
if let secondaryInputPanelNode = self.secondaryInputPanelNode, let apparentSecondaryInputPanelFrame = apparentSecondaryInputPanelFrame, !secondaryInputPanelNode.frame.equalTo(apparentSecondaryInputPanelFrame) {
|
if let secondaryInputPanelNode = self.secondaryInputPanelNode, let apparentSecondaryInputPanelFrame = apparentSecondaryInputPanelFrame, !secondaryInputPanelNode.frame.equalTo(apparentSecondaryInputPanelFrame) {
|
||||||
if immediatelyLayoutSecondaryInputPanelAndAnimateAppearance {
|
if immediatelyLayoutSecondaryInputPanelAndAnimateAppearance {
|
||||||
secondaryInputPanelNode.frame = apparentSecondaryInputPanelFrame.offsetBy(dx: 0.0, dy: apparentSecondaryInputPanelFrame.height + previousInputPanelBackgroundFrame.maxY - apparentSecondaryInputPanelFrame.maxY)
|
secondaryInputPanelNode.frame = apparentSecondaryInputPanelFrame.offsetBy(dx: 0.0, dy: apparentSecondaryInputPanelFrame.height + previousInputPanelBackgroundFrame.maxY - apparentSecondaryInputPanelFrame.maxY)
|
||||||
@ -1570,6 +1563,28 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let inputPanelNode = self.inputPanelNode,
|
||||||
|
let apparentInputPanelFrame = apparentInputPanelFrame,
|
||||||
|
!inputPanelNode.frame.equalTo(apparentInputPanelFrame) {
|
||||||
|
if immediatelyLayoutInputPanelAndAnimateAppearance {
|
||||||
|
inputPanelNode.frame = apparentInputPanelFrame.offsetBy(dx: 0.0, dy: apparentInputPanelFrame.height + previousInputPanelBackgroundFrame.maxY - apparentInputBackgroundFrame.maxY)
|
||||||
|
inputPanelNode.alpha = 0.0
|
||||||
|
}
|
||||||
|
if !transition.isAnimated {
|
||||||
|
inputPanelNode.layer.removeAllAnimations()
|
||||||
|
if let currentDismissedInputPanelNode = self.currentDismissedInputPanelNode, inputPanelNode is ChatSearchInputPanelNode {
|
||||||
|
currentDismissedInputPanelNode.layer.removeAllAnimations()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if inputPanelNodeHandlesTransition {
|
||||||
|
inputPanelNode.frame = apparentInputPanelFrame
|
||||||
|
inputPanelNode.alpha = 1.0
|
||||||
|
} else {
|
||||||
|
transition.updateFrame(node: inputPanelNode, frame: apparentInputPanelFrame)
|
||||||
|
transition.updateAlpha(node: inputPanelNode, alpha: 1.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let dismissedInputPanelNode = dismissedInputPanelNode, dismissedInputPanelNode !== self.secondaryInputPanelNode {
|
if let dismissedInputPanelNode = dismissedInputPanelNode, dismissedInputPanelNode !== self.secondaryInputPanelNode {
|
||||||
var frameCompleted = false
|
var frameCompleted = false
|
||||||
var alphaCompleted = false
|
var alphaCompleted = false
|
||||||
|
@ -10,6 +10,7 @@ import AccountContext
|
|||||||
class ChatInputPanelNode: ASDisplayNode {
|
class ChatInputPanelNode: ASDisplayNode {
|
||||||
var context: AccountContext?
|
var context: AccountContext?
|
||||||
var interfaceInteraction: ChatPanelInterfaceInteraction?
|
var interfaceInteraction: ChatPanelInterfaceInteraction?
|
||||||
|
var prevInputPanelNode: ChatInputPanelNode?
|
||||||
|
|
||||||
func updateLayout(width: CGFloat, leftInset: CGFloat, rightInset: CGFloat, maxHeight: CGFloat, isSecondary: Bool, transition: ContainedViewLayoutTransition, interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics) -> CGFloat {
|
func updateLayout(width: CGFloat, leftInset: CGFloat, rightInset: CGFloat, maxHeight: CGFloat, isSecondary: Bool, transition: ContainedViewLayoutTransition, interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics) -> CGFloat {
|
||||||
return 0.0
|
return 0.0
|
||||||
@ -26,4 +27,8 @@ class ChatInputPanelNode: ASDisplayNode {
|
|||||||
return 45.0
|
return 45.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func canHandleTransition(from prevInputPanelNode: ChatInputPanelNode?) -> Bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -609,8 +609,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
|||||||
} else {
|
} else {
|
||||||
waveformColor = messageTheme.mediaInactiveControlColor
|
waveformColor = messageTheme.mediaInactiveControlColor
|
||||||
}
|
}
|
||||||
strongSelf.waveformNode.setup(color: waveformColor, waveform: audioWaveform)
|
strongSelf.waveformNode.setup(color: waveformColor, gravity: .bottom, waveform: audioWaveform)
|
||||||
strongSelf.waveformForegroundNode.setup(color: messageTheme.mediaActiveControlColor, waveform: audioWaveform)
|
strongSelf.waveformForegroundNode.setup(color: messageTheme.mediaActiveControlColor, gravity: .bottom, waveform: audioWaveform)
|
||||||
} else if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
|
} else if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
|
||||||
strongSelf.waveformScrubbingNode = nil
|
strongSelf.waveformScrubbingNode = nil
|
||||||
waveformScrubbingNode.removeFromSupernode()
|
waveformScrubbingNode.removeFromSupernode()
|
||||||
|
@ -10,6 +10,7 @@ import TelegramPresentationData
|
|||||||
import UniversalMediaPlayer
|
import UniversalMediaPlayer
|
||||||
import AppBundle
|
import AppBundle
|
||||||
import ContextUI
|
import ContextUI
|
||||||
|
import AnimationUI
|
||||||
|
|
||||||
private func generatePauseIcon(_ theme: PresentationTheme) -> UIImage? {
|
private func generatePauseIcon(_ theme: PresentationTheme) -> UIImage? {
|
||||||
return generateTintedImage(image: UIImage(bundleImageName: "GlobalMusicPlayer/MinimizedPause"), color: theme.chat.inputPanel.actionControlForegroundColor)
|
return generateTintedImage(image: UIImage(bundleImageName: "GlobalMusicPlayer/MinimizedPause"), color: theme.chat.inputPanel.actionControlForegroundColor)
|
||||||
@ -24,22 +25,23 @@ extension AudioWaveformNode: CustomMediaPlayerScrubbingForegroundNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
||||||
private let deleteButton: HighlightableButtonNode
|
let deleteButton: HighlightableButtonNode
|
||||||
|
let binNode: AnimationNode
|
||||||
let sendButton: HighlightTrackingButtonNode
|
let sendButton: HighlightTrackingButtonNode
|
||||||
private var sendButtonRadialStatusNode: ChatSendButtonRadialStatusNode?
|
private var sendButtonRadialStatusNode: ChatSendButtonRadialStatusNode?
|
||||||
private let playButton: HighlightableButtonNode
|
let playButton: HighlightableButtonNode
|
||||||
private let pauseButton: HighlightableButtonNode
|
let pauseButton: HighlightableButtonNode
|
||||||
private let waveformButton: ASButtonNode
|
private let waveformButton: ASButtonNode
|
||||||
private let waveformBackgroundNode: ASImageNode
|
let waveformBackgroundNode: ASImageNode
|
||||||
|
|
||||||
private let waveformNode: AudioWaveformNode
|
private let waveformNode: AudioWaveformNode
|
||||||
private let waveformForegroundNode: AudioWaveformNode
|
private let waveformForegroundNode: AudioWaveformNode
|
||||||
private let waveformScubberNode: MediaPlayerScrubbingNode
|
let waveformScubberNode: MediaPlayerScrubbingNode
|
||||||
|
|
||||||
private var presentationInterfaceState: ChatPresentationInterfaceState?
|
private var presentationInterfaceState: ChatPresentationInterfaceState?
|
||||||
|
|
||||||
private var mediaPlayer: MediaPlayer?
|
private var mediaPlayer: MediaPlayer?
|
||||||
private let durationLabel: MediaPlayerTimeTextNode
|
let durationLabel: MediaPlayerTimeTextNode
|
||||||
|
|
||||||
private let statusDisposable = MetaDisposable()
|
private let statusDisposable = MetaDisposable()
|
||||||
|
|
||||||
@ -48,7 +50,19 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
init(theme: PresentationTheme) {
|
init(theme: PresentationTheme) {
|
||||||
self.deleteButton = HighlightableButtonNode()
|
self.deleteButton = HighlightableButtonNode()
|
||||||
self.deleteButton.displaysAsynchronously = false
|
self.deleteButton.displaysAsynchronously = false
|
||||||
self.deleteButton.setImage(generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/MessageSelectionTrash"), color: theme.chat.inputPanel.panelControlAccentColor), for: [])
|
|
||||||
|
self.binNode = AnimationNode(
|
||||||
|
animation: "BinBlue",
|
||||||
|
colors: [
|
||||||
|
"Cap11.Cap2.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
|
||||||
|
"Bin 5.Bin.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
|
||||||
|
"Cap12.Cap1.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
|
||||||
|
"Line15.Line1.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
|
||||||
|
"Line13.Line3.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
|
||||||
|
"Line14.Line2.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
|
||||||
|
"Line13.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
self.sendButton = HighlightTrackingButtonNode()
|
self.sendButton = HighlightTrackingButtonNode()
|
||||||
self.sendButton.displaysAsynchronously = false
|
self.sendButton.displaysAsynchronously = false
|
||||||
@ -87,8 +101,9 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
super.init()
|
super.init()
|
||||||
|
|
||||||
self.addSubnode(self.deleteButton)
|
self.addSubnode(self.deleteButton)
|
||||||
self.addSubnode(self.sendButton)
|
self.deleteButton.addSubnode(binNode)
|
||||||
self.addSubnode(self.waveformBackgroundNode)
|
self.addSubnode(self.waveformBackgroundNode)
|
||||||
|
self.addSubnode(self.sendButton)
|
||||||
self.addSubnode(self.waveformScubberNode)
|
self.addSubnode(self.waveformScubberNode)
|
||||||
self.addSubnode(self.playButton)
|
self.addSubnode(self.playButton)
|
||||||
self.addSubnode(self.pauseButton)
|
self.addSubnode(self.pauseButton)
|
||||||
@ -144,8 +159,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
self.presentationInterfaceState = interfaceState
|
self.presentationInterfaceState = interfaceState
|
||||||
|
|
||||||
if let recordedMediaPreview = interfaceState.recordedMediaPreview, updateWaveform {
|
if let recordedMediaPreview = interfaceState.recordedMediaPreview, updateWaveform {
|
||||||
self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), waveform: recordedMediaPreview.waveform)
|
self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), gravity: .center, waveform: recordedMediaPreview.waveform)
|
||||||
self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, waveform: recordedMediaPreview.waveform)
|
self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, gravity: .center, waveform: recordedMediaPreview.waveform)
|
||||||
|
|
||||||
if self.mediaPlayer != nil {
|
if self.mediaPlayer != nil {
|
||||||
self.mediaPlayer?.pause()
|
self.mediaPlayer?.pause()
|
||||||
@ -175,8 +190,9 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
|
|
||||||
let panelHeight = defaultHeight(metrics: metrics)
|
let panelHeight = defaultHeight(metrics: metrics)
|
||||||
|
|
||||||
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset, y: -1.0), size: CGSize(width: 48.0, height: panelHeight)))
|
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40)))
|
||||||
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: -UIScreenPixel), size: CGSize(width: 44.0, height: panelHeight)))
|
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: -UIScreenPixel), size: CGSize(width: 44.0, height: panelHeight)))
|
||||||
|
self.binNode.frame = self.deleteButton.bounds
|
||||||
|
|
||||||
if let slowmodeState = interfaceState.slowmodeState, !interfaceState.isScheduledMessages {
|
if let slowmodeState = interfaceState.slowmodeState, !interfaceState.isScheduledMessages {
|
||||||
let sendButtonRadialStatusNode: ChatSendButtonRadialStatusNode
|
let sendButtonRadialStatusNode: ChatSendButtonRadialStatusNode
|
||||||
@ -203,14 +219,71 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
|
|
||||||
transition.updateFrame(node: self.playButton, frame: CGRect(origin: CGPoint(x: leftInset + 52.0, y: 10.0), size: CGSize(width: 26.0, height: 26.0)))
|
transition.updateFrame(node: self.playButton, frame: CGRect(origin: CGPoint(x: leftInset + 52.0, y: 10.0), size: CGSize(width: 26.0, height: 26.0)))
|
||||||
transition.updateFrame(node: self.pauseButton, frame: CGRect(origin: CGPoint(x: leftInset + 50.0, y: 10.0), size: CGSize(width: 26.0, height: 26.0)))
|
transition.updateFrame(node: self.pauseButton, frame: CGRect(origin: CGPoint(x: leftInset + 50.0, y: 10.0), size: CGSize(width: 26.0, height: 26.0)))
|
||||||
transition.updateFrame(node: self.waveformBackgroundNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: CGSize(width: width - leftInset - rightInset - 90.0, height: 33.0)))
|
let waveformBackgroundFrame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: CGSize(width: width - leftInset - rightInset - 90.0, height: 33.0))
|
||||||
|
transition.updateFrame(node: self.waveformBackgroundNode, frame: waveformBackgroundFrame)
|
||||||
transition.updateFrame(node: self.waveformButton, frame: CGRect(origin: CGPoint(x: leftInset + 45.0, y: 0.0), size: CGSize(width: width - leftInset - rightInset - 90.0, height: panelHeight)))
|
transition.updateFrame(node: self.waveformButton, frame: CGRect(origin: CGPoint(x: leftInset + 45.0, y: 0.0), size: CGSize(width: width - leftInset - rightInset - 90.0, height: panelHeight)))
|
||||||
transition.updateFrame(node: self.waveformScubberNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0 + 35.0, y: 7.0 + floor((33.0 - 13.0) / 2.0)), size: CGSize(width: width - leftInset - rightInset - 90.0 - 45.0 - 40.0, height: 13.0)))
|
transition.updateFrame(node: self.waveformScubberNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0 + 35.0, y: 7.0 + floor((33.0 - 13.0) / 2.0)), size: CGSize(width: width - leftInset - rightInset - 90.0 - 45.0 - 40.0, height: 13.0)))
|
||||||
transition.updateFrame(node: self.durationLabel, frame: CGRect(origin: CGPoint(x: width - rightInset - 90.0 - 4.0, y: 15.0), size: CGSize(width: 35.0, height: 20.0)))
|
transition.updateFrame(node: self.durationLabel, frame: CGRect(origin: CGPoint(x: width - rightInset - 90.0 - 4.0, y: 15.0), size: CGSize(width: 35.0, height: 20.0)))
|
||||||
|
|
||||||
|
prevInputPanelNode?.frame = CGRect(origin: .zero, size: CGSize(width: width, height: panelHeight))
|
||||||
|
if let prevTextInputPanelNode = prevInputPanelNode as? ChatTextInputPanelNode {
|
||||||
|
self.prevInputPanelNode = nil
|
||||||
|
|
||||||
|
if let audioRecordingDotNode = prevTextInputPanelNode.audioRecordingDotNode {
|
||||||
|
audioRecordingDotNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
||||||
|
audioRecordingDotNode.layer.removeAllAnimations()
|
||||||
|
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1.0), to: 0.0, duration: 0.15, removeOnCompletion: false)
|
||||||
|
}
|
||||||
|
|
||||||
|
if let audioRecordingTimeNode = prevTextInputPanelNode.audioRecordingTimeNode {
|
||||||
|
audioRecordingTimeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
|
||||||
|
audioRecordingTimeNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
||||||
|
let timePosition = audioRecordingTimeNode.position
|
||||||
|
audioRecordingTimeNode.layer.animatePosition(from: timePosition, to: CGPoint(x: timePosition.x - 20, y: timePosition.y), duration: 0.15, removeOnCompletion: false)
|
||||||
|
}
|
||||||
|
|
||||||
|
if let audioRecordingCancelIndicator = prevTextInputPanelNode.audioRecordingCancelIndicator {
|
||||||
|
audioRecordingCancelIndicator.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
|
||||||
|
}
|
||||||
|
|
||||||
|
prevTextInputPanelNode.actionButtons.micButton.animateOut(true)
|
||||||
|
|
||||||
|
self.deleteButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15)
|
||||||
|
self.deleteButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||||
|
|
||||||
|
self.playButton.layer.animateScale(from: 0.01, to: 1.0, duration: 0.5, delay: 0.15)
|
||||||
|
self.playButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, delay: 0.15)
|
||||||
|
|
||||||
|
self.pauseButton.layer.animateScale(from: 0.01, to: 1.0, duration: 0.5, delay: 0.15)
|
||||||
|
self.pauseButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, delay: 0.15)
|
||||||
|
|
||||||
|
self.durationLabel.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4)
|
||||||
|
|
||||||
|
self.waveformScubberNode.layer.animateScaleY(from: 0.1, to: 1.0, duration: 0.5, delay: 0.15)
|
||||||
|
self.waveformScubberNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, delay: 0.15)
|
||||||
|
|
||||||
|
self.waveformBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||||
|
self.waveformBackgroundNode.layer.animateFrame(
|
||||||
|
from: self.sendButton.frame.insetBy(dx: 5.5, dy: 5.5),
|
||||||
|
to: waveformBackgroundFrame,
|
||||||
|
duration: 0.3,
|
||||||
|
delay: 0.15,
|
||||||
|
timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue,
|
||||||
|
removeOnCompletion: false
|
||||||
|
) { [weak self, weak prevTextInputPanelNode] finished in
|
||||||
|
if finished, prevTextInputPanelNode?.supernode === self {
|
||||||
|
prevTextInputPanelNode?.removeFromSupernode()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return panelHeight
|
return panelHeight
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override func canHandleTransition(from prevInputPanelNode: ChatInputPanelNode?) -> Bool {
|
||||||
|
return prevInputPanelNode is ChatTextInputPanelNode
|
||||||
|
}
|
||||||
|
|
||||||
@objc func deletePressed() {
|
@objc func deletePressed() {
|
||||||
self.interfaceInteraction?.deleteRecordedMedia()
|
self.interfaceInteraction?.deleteRecordedMedia()
|
||||||
}
|
}
|
||||||
|
@ -231,7 +231,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
|||||||
if self.hasRecorder {
|
if self.hasRecorder {
|
||||||
self.animateIn()
|
self.animateIn()
|
||||||
} else {
|
} else {
|
||||||
self.animateOut()
|
self.animateOut(false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -429,11 +429,14 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
|||||||
innerIconView.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
innerIconView.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
||||||
}
|
}
|
||||||
|
|
||||||
override func animateOut() {
|
override func animateOut(_ toSmallSize: Bool) {
|
||||||
super.animateOut()
|
super.animateOut(toSmallSize)
|
||||||
|
|
||||||
innerIconView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, removeOnCompletion: false)
|
if !toSmallSize {
|
||||||
innerIconView.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, removeOnCompletion: false)
|
micDecoration.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.18, removeOnCompletion: false)
|
||||||
|
innerIconView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, removeOnCompletion: false)
|
||||||
|
innerIconView.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, removeOnCompletion: false)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private var previousSize = CGSize()
|
private var previousSize = CGSize()
|
||||||
|
@ -906,7 +906,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
|
|
||||||
var hideMicButton = false
|
var hideMicButton = false
|
||||||
var audioRecordingItemsAlpha: CGFloat = 1
|
var audioRecordingItemsAlpha: CGFloat = 1
|
||||||
if let mediaRecordingState = interfaceState.inputTextPanelState.mediaRecordingState {
|
let mediaRecordingState = interfaceState.inputTextPanelState.mediaRecordingState
|
||||||
|
if mediaRecordingState != nil || interfaceState.recordedMediaPreview != nil {
|
||||||
audioRecordingItemsAlpha = 0
|
audioRecordingItemsAlpha = 0
|
||||||
|
|
||||||
let audioRecordingInfoContainerNode: ASDisplayNode
|
let audioRecordingInfoContainerNode: ASDisplayNode
|
||||||
@ -927,7 +928,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
self.audioRecordingTimeNode = audioRecordingTimeNode
|
self.audioRecordingTimeNode = audioRecordingTimeNode
|
||||||
audioRecordingInfoContainerNode.addSubnode(audioRecordingTimeNode)
|
audioRecordingInfoContainerNode.addSubnode(audioRecordingTimeNode)
|
||||||
|
|
||||||
if transition.isAnimated {
|
if transition.isAnimated && mediaRecordingState != nil {
|
||||||
animateTimeSlideIn = true
|
animateTimeSlideIn = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -938,7 +939,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
if let currentAudioRecordingCancelIndicator = self.audioRecordingCancelIndicator {
|
if let currentAudioRecordingCancelIndicator = self.audioRecordingCancelIndicator {
|
||||||
audioRecordingCancelIndicator = currentAudioRecordingCancelIndicator
|
audioRecordingCancelIndicator = currentAudioRecordingCancelIndicator
|
||||||
} else {
|
} else {
|
||||||
animateCancelSlideIn = transition.isAnimated
|
animateCancelSlideIn = transition.isAnimated && mediaRecordingState != nil
|
||||||
|
|
||||||
audioRecordingCancelIndicator = ChatTextInputAudioRecordingCancelIndicator(theme: interfaceState.theme, strings: interfaceState.strings, cancel: { [weak self] in
|
audioRecordingCancelIndicator = ChatTextInputAudioRecordingCancelIndicator(theme: interfaceState.theme, strings: interfaceState.strings, cancel: { [weak self] in
|
||||||
self?.interfaceInteraction?.finishMediaRecording(.dismiss)
|
self?.interfaceInteraction?.finishMediaRecording(.dismiss)
|
||||||
@ -947,15 +948,16 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
self.insertSubnode(audioRecordingCancelIndicator, at: 0)
|
self.insertSubnode(audioRecordingCancelIndicator, at: 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
let isLocked = mediaRecordingState.isLocked
|
let isLocked = mediaRecordingState?.isLocked ?? (interfaceState.recordedMediaPreview != nil)
|
||||||
var hideInfo = false
|
var hideInfo = false
|
||||||
|
|
||||||
switch mediaRecordingState {
|
if let mediaRecordingState = mediaRecordingState {
|
||||||
case let .audio(recorder, _):
|
switch mediaRecordingState {
|
||||||
self.actionButtons.micButton.audioRecorder = recorder
|
case let .audio(recorder, _):
|
||||||
audioRecordingTimeNode.audioRecorder = recorder
|
self.actionButtons.micButton.audioRecorder = recorder
|
||||||
case let .video(status, _):
|
audioRecordingTimeNode.audioRecorder = recorder
|
||||||
switch status {
|
case let .video(status, _):
|
||||||
|
switch status {
|
||||||
case let .recording(recordingStatus):
|
case let .recording(recordingStatus):
|
||||||
audioRecordingTimeNode.videoRecordingStatus = recordingStatus
|
audioRecordingTimeNode.videoRecordingStatus = recordingStatus
|
||||||
self.actionButtons.micButton.videoRecordingStatus = recordingStatus
|
self.actionButtons.micButton.videoRecordingStatus = recordingStatus
|
||||||
@ -967,6 +969,9 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
self.actionButtons.micButton.videoRecordingStatus = nil
|
self.actionButtons.micButton.videoRecordingStatus = nil
|
||||||
hideMicButton = true
|
hideMicButton = true
|
||||||
hideInfo = true
|
hideInfo = true
|
||||||
|
}
|
||||||
|
case .waitingForPreview:
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1001,7 +1006,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
audioRecordingCancelIndicator.layer.animatePosition(from: CGPoint(x: width + audioRecordingCancelIndicator.bounds.size.width, y: position.y), to: position, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring)
|
audioRecordingCancelIndicator.layer.animatePosition(from: CGPoint(x: width + audioRecordingCancelIndicator.bounds.size.width, y: position.y), to: position, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring)
|
||||||
}
|
}
|
||||||
|
|
||||||
audioRecordingCancelIndicator.updateIsDisplayingCancel(isLocked, animated: !animateCancelSlideIn)
|
audioRecordingCancelIndicator.updateIsDisplayingCancel(isLocked, animated: !animateCancelSlideIn && mediaRecordingState != nil)
|
||||||
|
|
||||||
if isLocked || self.actionButtons.micButton.cancelTranslation > cancelTransformThreshold {
|
if isLocked || self.actionButtons.micButton.cancelTranslation > cancelTransformThreshold {
|
||||||
var deltaOffset: CGFloat = 0.0
|
var deltaOffset: CGFloat = 0.0
|
||||||
@ -1046,7 +1051,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
audioRecordingDotNode = currentAudioRecordingDotNode
|
audioRecordingDotNode = currentAudioRecordingDotNode
|
||||||
} else {
|
} else {
|
||||||
self.audioRecordingDotNode?.removeFromSupernode()
|
self.audioRecordingDotNode?.removeFromSupernode()
|
||||||
audioRecordingDotNode = AnimationNode(animation: "Bin")
|
audioRecordingDotNode = AnimationNode(animation: "BinRed")
|
||||||
self.audioRecordingDotNode = audioRecordingDotNode
|
self.audioRecordingDotNode = audioRecordingDotNode
|
||||||
self.addSubnode(audioRecordingDotNode)
|
self.addSubnode(audioRecordingDotNode)
|
||||||
}
|
}
|
||||||
@ -1117,8 +1122,13 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
audioRecordingDotNode.layer.removeAllAnimations()
|
audioRecordingDotNode.layer.removeAllAnimations()
|
||||||
|
|
||||||
if self.isMediaDeleted {
|
if self.isMediaDeleted {
|
||||||
audioRecordingDotNode.completion = dismissDotNode
|
if self.prevInputPanelNode is ChatRecordingPreviewInputPanelNode {
|
||||||
audioRecordingDotNode.play()
|
self.audioRecordingDotNode?.removeFromSupernode()
|
||||||
|
self.audioRecordingDotNode = nil
|
||||||
|
} else {
|
||||||
|
audioRecordingDotNode.completion = dismissDotNode
|
||||||
|
audioRecordingDotNode.play()
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
dismissDotNode()
|
dismissDotNode()
|
||||||
}
|
}
|
||||||
@ -1333,9 +1343,80 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
|
|
||||||
self.updateActionButtons(hasText: hasText, hideMicButton: hideMicButton, animated: transition.isAnimated)
|
self.updateActionButtons(hasText: hasText, hideMicButton: hideMicButton, animated: transition.isAnimated)
|
||||||
|
|
||||||
|
if let prevInputPanelNode = prevInputPanelNode {
|
||||||
|
prevInputPanelNode.frame = CGRect(origin: .zero, size: prevInputPanelNode.frame.size)
|
||||||
|
}
|
||||||
|
if let prevPreviewInputPanelNode = self.prevInputPanelNode as? ChatRecordingPreviewInputPanelNode {
|
||||||
|
self.prevInputPanelNode = nil
|
||||||
|
|
||||||
|
prevPreviewInputPanelNode.isUserInteractionEnabled = false
|
||||||
|
|
||||||
|
if self.isMediaDeleted {
|
||||||
|
func animatePosition(for previewSubnode: ASDisplayNode) {
|
||||||
|
previewSubnode.layer.animatePosition(
|
||||||
|
from: previewSubnode.position,
|
||||||
|
to: CGPoint(x: previewSubnode.position.x - 20, y: previewSubnode.position.y),
|
||||||
|
duration: 0.15
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
animatePosition(for: prevPreviewInputPanelNode.waveformBackgroundNode)
|
||||||
|
animatePosition(for: prevPreviewInputPanelNode.waveformScubberNode)
|
||||||
|
animatePosition(for: prevPreviewInputPanelNode.durationLabel)
|
||||||
|
animatePosition(for: prevPreviewInputPanelNode.playButton)
|
||||||
|
animatePosition(for: prevPreviewInputPanelNode.pauseButton)
|
||||||
|
}
|
||||||
|
|
||||||
|
func animateAlpha(for previewSubnode: ASDisplayNode) {
|
||||||
|
previewSubnode.layer.animateAlpha(
|
||||||
|
from: 1.0,
|
||||||
|
to: 0.0,
|
||||||
|
duration: 0.15,
|
||||||
|
removeOnCompletion: false
|
||||||
|
)
|
||||||
|
}
|
||||||
|
animateAlpha(for: prevPreviewInputPanelNode.waveformBackgroundNode)
|
||||||
|
animateAlpha(for: prevPreviewInputPanelNode.waveformScubberNode)
|
||||||
|
animateAlpha(for: prevPreviewInputPanelNode.durationLabel)
|
||||||
|
animateAlpha(for: prevPreviewInputPanelNode.playButton)
|
||||||
|
animateAlpha(for: prevPreviewInputPanelNode.pauseButton)
|
||||||
|
|
||||||
|
let dismissBin = { [weak self, weak prevPreviewInputPanelNode] in
|
||||||
|
prevPreviewInputPanelNode?.deleteButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
|
prevPreviewInputPanelNode?.deleteButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
|
|
||||||
|
self?.attachmentButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
|
self?.attachmentButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
|
|
||||||
|
if prevPreviewInputPanelNode?.supernode === self {
|
||||||
|
prevPreviewInputPanelNode?.removeFromSupernode()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.isMediaDeleted {
|
||||||
|
prevPreviewInputPanelNode.binNode.completion = dismissBin
|
||||||
|
prevPreviewInputPanelNode.binNode.play()
|
||||||
|
} else {
|
||||||
|
dismissBin()
|
||||||
|
}
|
||||||
|
|
||||||
|
prevPreviewInputPanelNode.sendButton.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
||||||
|
prevPreviewInputPanelNode.sendButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
|
||||||
|
|
||||||
|
actionButtons.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
|
actionButtons.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
|
|
||||||
|
prevPreviewInputPanelNode.sendButton.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
||||||
|
prevPreviewInputPanelNode.sendButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
|
||||||
|
}
|
||||||
|
|
||||||
return panelHeight
|
return panelHeight
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override func canHandleTransition(from prevInputPanelNode: ChatInputPanelNode?) -> Bool {
|
||||||
|
return prevInputPanelNode is ChatRecordingPreviewInputPanelNode
|
||||||
|
}
|
||||||
|
|
||||||
@objc func editableTextNodeDidUpdateText(_ editableTextNode: ASEditableTextNode) {
|
@objc func editableTextNodeDidUpdateText(_ editableTextNode: ASEditableTextNode) {
|
||||||
if let textInputNode = self.textInputNode, let presentationInterfaceState = self.presentationInterfaceState {
|
if let textInputNode = self.textInputNode, let presentationInterfaceState = self.presentationInterfaceState {
|
||||||
let baseFontSize = max(17.0, presentationInterfaceState.fontSize.baseDisplaySize)
|
let baseFontSize = max(17.0, presentationInterfaceState.fontSize.baseDisplaySize)
|
||||||
|
@ -63,39 +63,49 @@ enum ChatVideoRecordingStatus: Equatable {
|
|||||||
enum ChatTextInputPanelMediaRecordingState: Equatable {
|
enum ChatTextInputPanelMediaRecordingState: Equatable {
|
||||||
case audio(recorder: ManagedAudioRecorder, isLocked: Bool)
|
case audio(recorder: ManagedAudioRecorder, isLocked: Bool)
|
||||||
case video(status: ChatVideoRecordingStatus, isLocked: Bool)
|
case video(status: ChatVideoRecordingStatus, isLocked: Bool)
|
||||||
|
case waitingForPreview
|
||||||
|
|
||||||
var isLocked: Bool {
|
var isLocked: Bool {
|
||||||
switch self {
|
switch self {
|
||||||
case let .audio(_, isLocked):
|
case let .audio(_, isLocked):
|
||||||
return isLocked
|
return isLocked
|
||||||
case let .video(_, isLocked):
|
case let .video(_, isLocked):
|
||||||
return isLocked
|
return isLocked
|
||||||
|
case .waitingForPreview:
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func withLocked(_ isLocked: Bool) -> ChatTextInputPanelMediaRecordingState {
|
func withLocked(_ isLocked: Bool) -> ChatTextInputPanelMediaRecordingState {
|
||||||
switch self {
|
switch self {
|
||||||
case let .audio(recorder, _):
|
case let .audio(recorder, _):
|
||||||
return .audio(recorder: recorder, isLocked: isLocked)
|
return .audio(recorder: recorder, isLocked: isLocked)
|
||||||
case let .video(status, _):
|
case let .video(status, _):
|
||||||
return .video(status: status, isLocked: isLocked)
|
return .video(status: status, isLocked: isLocked)
|
||||||
|
case .waitingForPreview:
|
||||||
|
return .waitingForPreview
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static func ==(lhs: ChatTextInputPanelMediaRecordingState, rhs: ChatTextInputPanelMediaRecordingState) -> Bool {
|
static func ==(lhs: ChatTextInputPanelMediaRecordingState, rhs: ChatTextInputPanelMediaRecordingState) -> Bool {
|
||||||
switch lhs {
|
switch lhs {
|
||||||
case let .audio(lhsRecorder, lhsIsLocked):
|
case let .audio(lhsRecorder, lhsIsLocked):
|
||||||
if case let .audio(rhsRecorder, rhsIsLocked) = rhs, lhsRecorder === rhsRecorder, lhsIsLocked == rhsIsLocked {
|
if case let .audio(rhsRecorder, rhsIsLocked) = rhs, lhsRecorder === rhsRecorder, lhsIsLocked == rhsIsLocked {
|
||||||
return true
|
return true
|
||||||
} else {
|
} else {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
case let .video(status, isLocked):
|
case let .video(status, isLocked):
|
||||||
if case .video(status, isLocked) = rhs {
|
if case .video(status, isLocked) = rhs {
|
||||||
return true
|
return true
|
||||||
} else {
|
} else {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
case .waitingForPreview:
|
||||||
|
if case .waitingForPreview = rhs {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user