mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-22 22:25:57 +00:00
Apply patch
This commit is contained in:
@@ -514,8 +514,8 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius
|
|||||||
_innerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
_innerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||||
_outerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
_outerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||||
if (toSmallSize) {
|
if (toSmallSize) {
|
||||||
_decoration.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.33f, 0.33f), CGAffineTransformMakeTranslation(-4, 0));
|
_decoration.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.33f, 0.33f), CGAffineTransformMakeTranslation(0, 2 - TGScreenPixel));
|
||||||
_innerIconWrapperView.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.492f, 0.492f), CGAffineTransformMakeTranslation(-TGScreenPixel, 0));
|
_innerIconWrapperView.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.492f, 0.492f), CGAffineTransformMakeTranslation(-TGScreenPixel, 1));
|
||||||
} else {
|
} else {
|
||||||
_decoration.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
_decoration.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
|
||||||
_decoration.alpha = 0.0;
|
_decoration.alpha = 0.0;
|
||||||
|
|||||||
@@ -190,8 +190,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
|
|
||||||
let panelHeight = defaultHeight(metrics: metrics)
|
let panelHeight = defaultHeight(metrics: metrics)
|
||||||
|
|
||||||
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40)))
|
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: 1), size: CGSize(width: 40.0, height: 40)))
|
||||||
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: -UIScreenPixel), size: CGSize(width: 44.0, height: panelHeight)))
|
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: 2 - UIScreenPixel), size: CGSize(width: 44.0, height: 44)))
|
||||||
self.binNode.frame = self.deleteButton.bounds
|
self.binNode.frame = self.deleteButton.bounds
|
||||||
|
|
||||||
if let slowmodeState = interfaceState.slowmodeState, !interfaceState.isScheduledMessages {
|
if let slowmodeState = interfaceState.slowmodeState, !interfaceState.isScheduledMessages {
|
||||||
@@ -230,9 +230,10 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
|
|||||||
self.prevInputPanelNode = nil
|
self.prevInputPanelNode = nil
|
||||||
|
|
||||||
if let audioRecordingDotNode = prevTextInputPanelNode.audioRecordingDotNode {
|
if let audioRecordingDotNode = prevTextInputPanelNode.audioRecordingDotNode {
|
||||||
audioRecordingDotNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
let startAlpha = CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1.0)
|
||||||
audioRecordingDotNode.layer.removeAllAnimations()
|
audioRecordingDotNode.layer.removeAllAnimations()
|
||||||
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1.0), to: 0.0, duration: 0.15, removeOnCompletion: false)
|
audioRecordingDotNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
|
||||||
|
audioRecordingDotNode.layer.animateAlpha(from: startAlpha, to: 0.0, duration: 0.15, removeOnCompletion: false)
|
||||||
}
|
}
|
||||||
|
|
||||||
if let audioRecordingTimeNode = prevTextInputPanelNode.audioRecordingTimeNode {
|
if let audioRecordingTimeNode = prevTextInputPanelNode.audioRecordingTimeNode {
|
||||||
|
|||||||
@@ -1060,13 +1060,15 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
animateDotAppearing = transition.isAnimated && !hideInfo
|
animateDotAppearing = transition.isAnimated && !hideInfo
|
||||||
|
if let mediaRecordingState = mediaRecordingState, case .waitingForPreview = mediaRecordingState {
|
||||||
|
animateDotAppearing = false
|
||||||
|
}
|
||||||
|
|
||||||
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40))
|
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: audioRecordingTimeNode.frame.midY - 20), size: CGSize(width: 40.0, height: 40))
|
||||||
if animateDotAppearing {
|
if animateDotAppearing {
|
||||||
let dotStartScale: CGFloat = (audioRecordingDotNode.layer.presentation()?.value(forKeyPath: "transform.scale.x") as? CGFloat) ?? 1
|
audioRecordingDotNode.layer.animateScale(from: 0.3, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false)
|
||||||
audioRecordingDotNode.layer.animateScale(from: dotStartScale, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false)
|
|
||||||
if audioRecordingDotNode.layer.animation(forKey: "recording") == nil {
|
if audioRecordingDotNode.layer.animation(forKey: "recording") == nil {
|
||||||
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in
|
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 0), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in
|
||||||
if finished {
|
if finished {
|
||||||
let animation = CAKeyframeAnimation(keyPath: "opacity")
|
let animation = CAKeyframeAnimation(keyPath: "opacity")
|
||||||
animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber]
|
animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber]
|
||||||
|
|||||||
Reference in New Issue
Block a user