Initial implementation

This commit is contained in:
Ali
2020-06-10 20:25:45 +04:00
parent 853febcb28
commit 57ac20c121
14 changed files with 387 additions and 118 deletions

View File

@@ -13,6 +13,7 @@ import AccountContext
import TouchDownGesture
import ImageTransparency
import ActivityIndicator
import AnimationUI
private let accessoryButtonFont = Font.medium(14.0)
@@ -210,7 +211,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
private let searchLayoutClearImageNode: ASImageNode
private var searchActivityIndicator: ActivityIndicator?
var audioRecordingInfoContainerNode: ASDisplayNode?
var audioRecordingDotNode: ASImageNode?
var audioRecordingDotNode: AnimationNode?
var audioRecordingTimeNode: ChatTextInputAudioRecordingTimeNode?
var audioRecordingCancelIndicator: ChatTextInputAudioRecordingCancelIndicator?
@@ -235,6 +236,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
private var keepSendButtonEnabled = false
private var extendedSearchLayout = false
var isMediaDeleted: Bool = false
private let inputMenu = ChatTextInputMenu()
private var theme: PresentationTheme?
@@ -445,6 +448,13 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
}
}
}
self.actionButtons.micButton.updateCancelTranslation = { [weak self] in
if let strongSelf = self, let presentationInterfaceState = strongSelf.presentationInterfaceState {
if let (width, leftInset, rightInset, maxHeight, metrics, isSecondary) = strongSelf.validLayout {
let _ = strongSelf.updateLayout(width: width, leftInset: leftInset, rightInset: rightInset, maxHeight: maxHeight, isSecondary: isSecondary, transition: .immediate, interfaceState: presentationInterfaceState, metrics: metrics)
}
}
}
self.actionButtons.micButton.stopRecording = { [weak self] in
if let strongSelf = self, let interfaceInteraction = strongSelf.interfaceInteraction {
interfaceInteraction.stopMediaRecording()
@@ -744,10 +754,6 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.searchLayoutClearImageNode.image = PresentationResourcesChat.chatInputTextFieldClearImage(interfaceState.theme)
if let audioRecordingDotNode = self.audioRecordingDotNode {
audioRecordingDotNode.image = PresentationResourcesChat.chatInputPanelMediaRecordingDotImage(interfaceState.theme)
}
self.audioRecordingTimeNode?.updateTheme(theme: interfaceState.theme)
self.audioRecordingCancelIndicator?.updateTheme(theme: interfaceState.theme)
@@ -899,9 +905,9 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.actionButtons.micButton.updateMode(mode: interfaceState.interfaceState.mediaRecordingMode, animated: transition.isAnimated)
var hideMicButton = false
var audioRecordingItemsVerticalOffset: CGFloat = 0.0
var audioRecordingItemsAlpha: CGFloat = 1
if let mediaRecordingState = interfaceState.inputTextPanelState.mediaRecordingState {
audioRecordingItemsVerticalOffset = panelHeight * 2.0
audioRecordingItemsAlpha = 0
transition.updateAlpha(layer: self.textInputBackgroundNode.layer, alpha: 0.0)
if let textInputNode = self.textInputNode {
transition.updateAlpha(node: textInputNode, alpha: 0.0)
@@ -936,7 +942,19 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.insertSubnode(audioRecordingCancelIndicator, at: 0)
}
audioRecordingCancelIndicator.frame = CGRect(origin: CGPoint(x: leftInset + floor((baseWidth - audioRecordingCancelIndicator.bounds.size.width) / 2.0) - self.actionButtons.micButton.controlsOffset, y: panelHeight - minimalHeight + floor((minimalHeight - audioRecordingCancelIndicator.bounds.size.height) / 2.0)), size: audioRecordingCancelIndicator.bounds.size)
let cancelTransformThreshold: CGFloat = 8.0
let indicatorTranslation = max(0.0, self.actionButtons.micButton.cancelTranslation - cancelTransformThreshold)
audioRecordingCancelIndicator.frame = CGRect(
origin: CGPoint(
x: leftInset + floor((baseWidth - audioRecordingCancelIndicator.bounds.size.width - indicatorTranslation) / 2.0),
y: panelHeight - minimalHeight + floor((minimalHeight - audioRecordingCancelIndicator.bounds.size.height) / 2.0)),
size: audioRecordingCancelIndicator.bounds.size)
if self.actionButtons.micButton.cancelTranslation > cancelTransformThreshold {
let progress = 1 - (self.actionButtons.micButton.cancelTranslation - cancelTransformThreshold) / 80
audioRecordingCancelIndicator.alpha = progress
}
if animateCancelSlideIn {
let position = audioRecordingCancelIndicator.layer.position
@@ -945,6 +963,18 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
audioRecordingCancelIndicator.updateIsDisplayingCancel(isLocked, animated: !animateCancelSlideIn)
if isLocked || self.actionButtons.micButton.cancelTranslation > cancelTransformThreshold {
audioRecordingCancelIndicator.layer.removeAnimation(forKey: "slide_juggle")
} else if audioRecordingCancelIndicator.layer.animation(forKey: "slide_juggle") == nil {
let slideJuggleAnimation = CABasicAnimation(keyPath: "transform")
slideJuggleAnimation.toValue = CATransform3DMakeTranslation(-6, 0, 0)
slideJuggleAnimation.duration = 1
slideJuggleAnimation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.easeInEaseOut)
slideJuggleAnimation.autoreverses = true
slideJuggleAnimation.repeatCount = Float.infinity
audioRecordingCancelIndicator.layer.add(slideJuggleAnimation, forKey: "slide_juggle")
}
var animateTimeSlideIn = false
let audioRecordingTimeNode: ChatTextInputAudioRecordingTimeNode
if let currentAudioRecordingTimeNode = self.audioRecordingTimeNode {
@@ -961,32 +991,41 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
let audioRecordingTimeSize = audioRecordingTimeNode.measure(CGSize(width: 200.0, height: 100.0))
audioRecordingInfoContainerNode.frame = CGRect(origin: CGPoint(x: min(leftInset, audioRecordingCancelIndicator.frame.minX - audioRecordingTimeSize.width - 8.0 - 28.0), y: 0.0), size: CGSize(width: baseWidth, height: panelHeight))
let cancelMinX = audioRecordingCancelIndicator.alpha > 0.5 ? audioRecordingCancelIndicator.frame.minX : width
audioRecordingTimeNode.frame = CGRect(origin: CGPoint(x: 28.0, y: panelHeight - minimalHeight + floor((minimalHeight - audioRecordingTimeSize.height) / 2.0)), size: audioRecordingTimeSize)
audioRecordingInfoContainerNode.frame = CGRect(
origin: CGPoint(
x: min(leftInset, cancelMinX - audioRecordingTimeSize.width - 8.0 - 28.0),
y: 0.0
),
size: CGSize(width: baseWidth, height: panelHeight)
)
audioRecordingTimeNode.frame = CGRect(origin: CGPoint(x: 40.0, y: panelHeight - minimalHeight + floor((minimalHeight - audioRecordingTimeSize.height) / 2.0)), size: audioRecordingTimeSize)
if animateTimeSlideIn {
let position = audioRecordingTimeNode.layer.position
audioRecordingTimeNode.layer.animatePosition(from: CGPoint(x: position.x - 28.0 - audioRecordingTimeSize.width, y: position.y), to: position, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
audioRecordingTimeNode.layer.animatePosition(from: CGPoint(x: position.x - 10.0, y: position.y), to: position, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
audioRecordingTimeNode.layer.animateAlpha(from: 0, to: 1, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
}
audioRecordingTimeNode.audioRecorder = recorder
var animateDotSlideIn = false
let audioRecordingDotNode: ASImageNode
var animateDotAppearing = false
let audioRecordingDotNode: AnimationNode
if let currentAudioRecordingDotNode = self.audioRecordingDotNode {
audioRecordingDotNode = currentAudioRecordingDotNode
} else {
animateDotSlideIn = transition.isAnimated
audioRecordingDotNode = ASImageNode()
audioRecordingDotNode.image = PresentationResourcesChat.chatInputPanelMediaRecordingDotImage(interfaceState.theme)
audioRecordingDotNode = AnimationNode(animation: "voicebin")
audioRecordingDotNode.speed = 2.0
self.audioRecordingDotNode = audioRecordingDotNode
audioRecordingInfoContainerNode.addSubnode(audioRecordingDotNode)
self.addSubnode(audioRecordingDotNode)
}
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: audioRecordingTimeNode.frame.minX - 17.0, y: panelHeight - minimalHeight + floor((minimalHeight - 9.0) / 2.0)), size: CGSize(width: 9.0, height: 9.0))
if animateDotSlideIn {
let position = audioRecordingDotNode.layer.position
audioRecordingDotNode.layer.animatePosition(from: CGPoint(x: position.x - 9.0 - 51.0, y: position.y), to: position, duration: 0.7, timingFunction: kCAMediaTimingFunctionSpring, completion: { [weak audioRecordingDotNode] finished in
animateDotAppearing = transition.isAnimated
audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44), size: CGSize(width: 40.0, height: 40))
if animateDotAppearing {
audioRecordingDotNode.layer.animateAlpha(from: 0, to: 1, duration: 0.25, delay: 0, timingFunction: kCAMediaTimingFunctionSpring, completion: { [weak audioRecordingDotNode] finished in
if finished {
let animation = CAKeyframeAnimation(keyPath: "opacity")
animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber]
@@ -998,6 +1037,9 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
audioRecordingDotNode?.layer.add(animation, forKey: "recording")
}
})
self.attachmentButton.layer.animateAlpha(from: 1, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)
self.attachmentButton.layer.animateScale(from: 1, to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false)
}
case let .video(status, _):
switch status {
@@ -1021,13 +1063,38 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
if let audioRecordingInfoContainerNode = self.audioRecordingInfoContainerNode {
self.audioRecordingInfoContainerNode = nil
transition.updateFrame(node: audioRecordingInfoContainerNode, frame: CGRect(origin: CGPoint(x: -width, y: 0.0), size: audioRecordingInfoContainerNode.bounds.size), completion: { [weak audioRecordingInfoContainerNode] _ in
transition.updateTransformScale(node: audioRecordingInfoContainerNode, scale: 0)
transition.updatePosition(node: audioRecordingInfoContainerNode, position: CGPoint(x: audioRecordingInfoContainerNode.position.x - 10, y: audioRecordingInfoContainerNode.position.y))
transition.updateAlpha(node: audioRecordingInfoContainerNode, alpha: 0) { [weak audioRecordingInfoContainerNode] _ in
audioRecordingInfoContainerNode?.removeFromSupernode()
})
}
}
if let _ = self.audioRecordingDotNode {
self.audioRecordingDotNode = nil
if let audioRecordingDotNode = self.audioRecordingDotNode {
let dismissDotNode = { [weak audioRecordingDotNode, weak attachmentButton, weak self] in
guard let audioRecordingDotNode = audioRecordingDotNode else { return }
if audioRecordingDotNode === self?.audioRecordingDotNode {
self?.audioRecordingDotNode = nil
}
audioRecordingDotNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false)
audioRecordingDotNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, delay: 0, removeOnCompletion: false) { [weak audioRecordingDotNode] _ in
audioRecordingDotNode?.removeFromSupernode()
}
attachmentButton?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
attachmentButton?.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
}
audioRecordingDotNode.layer.removeAllAnimations()
if self.isMediaDeleted {
audioRecordingDotNode.completion = dismissDotNode
audioRecordingDotNode.play()
} else {
dismissDotNode()
}
}
if let _ = self.audioRecordingTimeNode {
@@ -1037,24 +1104,16 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
if let audioRecordingCancelIndicator = self.audioRecordingCancelIndicator {
self.audioRecordingCancelIndicator = nil
if transition.isAnimated {
if audioRecordingCancelIndicator.isDisplayingCancel {
audioRecordingCancelIndicator.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false)
audioRecordingCancelIndicator.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -22.0), duration: 0.25, removeOnCompletion: false, additive: true, completion: { [weak audioRecordingCancelIndicator] _ in
audioRecordingCancelIndicator?.removeFromSupernode()
})
} else {
let position = audioRecordingCancelIndicator.layer.position
audioRecordingCancelIndicator.layer.animatePosition(from: position, to: CGPoint(x: 0.0 - audioRecordingCancelIndicator.bounds.size.width, y: position.y), duration: 0.3, removeOnCompletion: false, completion: { [weak audioRecordingCancelIndicator] _ in
audioRecordingCancelIndicator?.removeFromSupernode()
})
}
audioRecordingCancelIndicator.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, completion: { [weak audioRecordingCancelIndicator] _ in
audioRecordingCancelIndicator?.removeFromSupernode()
})
} else {
audioRecordingCancelIndicator.removeFromSupernode()
}
}
}
transition.updateFrame(layer: self.attachmentButton.layer, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - minimalHeight + audioRecordingItemsVerticalOffset), size: CGSize(width: 40.0, height: minimalHeight)))
transition.updateFrame(layer: self.attachmentButton.layer, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - minimalHeight), size: CGSize(width: 40.0, height: minimalHeight)))
transition.updateFrame(node: self.attachmentButtonDisabledNode, frame: self.attachmentButton.frame)
var composeButtonsOffset: CGFloat = 0.0
@@ -1113,8 +1172,9 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.searchLayoutClearImageNode.frame = CGRect(origin: CGPoint(x: floor((searchLayoutClearButtonSize.width - image.size.width) / 2.0), y: floor((searchLayoutClearButtonSize.height - image.size.height) / 2.0)), size: image.size)
}
let textInputFrame = CGRect(x: leftInset + textFieldInsets.left, y: textFieldInsets.top + audioRecordingItemsVerticalOffset, width: baseWidth - textFieldInsets.left - textFieldInsets.right + textInputBackgroundWidthOffset, height: panelHeight - textFieldInsets.top - textFieldInsets.bottom)
let textInputFrame = CGRect(x: leftInset + textFieldInsets.left, y: textFieldInsets.top, width: baseWidth - textFieldInsets.left - textFieldInsets.right + textInputBackgroundWidthOffset, height: panelHeight - textFieldInsets.top - textFieldInsets.bottom)
transition.updateFrame(node: self.textInputContainer, frame: textInputFrame)
transition.updateAlpha(node: self.textInputContainer, alpha: audioRecordingItemsAlpha)
if let textInputNode = self.textInputNode {
let textFieldFrame = CGRect(origin: CGPoint(x: self.textInputViewInternalInsets.left, y: self.textInputViewInternalInsets.top), size: CGSize(width: textInputFrame.size.width - (self.textInputViewInternalInsets.left + self.textInputViewInternalInsets.right + accessoryButtonsWidth), height: textInputFrame.size.height - self.textInputViewInternalInsets.top - textInputViewInternalInsets.bottom))
@@ -1143,7 +1203,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
let _ = placeholderApply()
contextPlaceholderNode.frame = CGRect(origin: CGPoint(x: leftInset + textFieldInsets.left + self.textInputViewInternalInsets.left, y: textFieldInsets.top + self.textInputViewInternalInsets.top + self.textInputViewRealInsets.top + audioRecordingItemsVerticalOffset + UIScreenPixel), size: placeholderSize.size)
contextPlaceholderNode.frame = CGRect(origin: CGPoint(x: leftInset + textFieldInsets.left + self.textInputViewInternalInsets.left, y: textFieldInsets.top + self.textInputViewInternalInsets.top + self.textInputViewRealInsets.top + UIScreenPixel), size: placeholderSize.size)
contextPlaceholderNode.alpha = audioRecordingItemsAlpha
} else if let contextPlaceholderNode = self.contextPlaceholderNode {
self.contextPlaceholderNode = nil
contextPlaceholderNode.removeFromSupernode()
@@ -1159,9 +1220,10 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.slowmodePlaceholderNode = slowmodePlaceholderNode
self.insertSubnode(slowmodePlaceholderNode, aboveSubnode: self.textPlaceholderNode)
}
let placeholderFrame = CGRect(origin: CGPoint(x: leftInset + textFieldInsets.left + self.textInputViewInternalInsets.left, y: textFieldInsets.top + self.textInputViewInternalInsets.top + self.textInputViewRealInsets.top + audioRecordingItemsVerticalOffset + UIScreenPixel), size: CGSize(width: width - leftInset - rightInset - textFieldInsets.left - textFieldInsets.right - self.textInputViewInternalInsets.left - self.textInputViewInternalInsets.right - accessoryButtonsWidth, height: 30.0))
let placeholderFrame = CGRect(origin: CGPoint(x: leftInset + textFieldInsets.left + self.textInputViewInternalInsets.left, y: textFieldInsets.top + self.textInputViewInternalInsets.top + self.textInputViewRealInsets.top + UIScreenPixel), size: CGSize(width: width - leftInset - rightInset - textFieldInsets.left - textFieldInsets.right - self.textInputViewInternalInsets.left - self.textInputViewInternalInsets.right - accessoryButtonsWidth, height: 30.0))
slowmodePlaceholderNode.updateState(slowmodeState)
slowmodePlaceholderNode.frame = placeholderFrame
slowmodePlaceholderNode.alpha = audioRecordingItemsAlpha
slowmodePlaceholderNode.updateLayout(size: placeholderFrame.size)
} else if let slowmodePlaceholderNode = self.slowmodePlaceholderNode {
self.slowmodePlaceholderNode = nil
@@ -1181,11 +1243,13 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.slowmodePlaceholderNode?.isHidden = true
}
transition.updateFrame(node: self.textPlaceholderNode, frame: CGRect(origin: CGPoint(x: leftInset + textFieldInsets.left + self.textInputViewInternalInsets.left, y: textFieldInsets.top + self.textInputViewInternalInsets.top + self.textInputViewRealInsets.top + audioRecordingItemsVerticalOffset + UIScreenPixel), size: self.textPlaceholderNode.frame.size))
transition.updateFrame(node: self.textPlaceholderNode, frame: CGRect(origin: CGPoint(x: leftInset + textFieldInsets.left + self.textInputViewInternalInsets.left, y: textFieldInsets.top + self.textInputViewInternalInsets.top + self.textInputViewRealInsets.top + UIScreenPixel), size: self.textPlaceholderNode.frame.size))
transition.updateAlpha(node: self.textPlaceholderNode, alpha: audioRecordingItemsAlpha)
transition.updateFrame(layer: self.textInputBackgroundNode.layer, frame: CGRect(x: leftInset + textFieldInsets.left, y: textFieldInsets.top + audioRecordingItemsVerticalOffset, width: baseWidth - textFieldInsets.left - textFieldInsets.right + textInputBackgroundWidthOffset, height: panelHeight - textFieldInsets.top - textFieldInsets.bottom))
transition.updateFrame(layer: self.textInputBackgroundNode.layer, frame: CGRect(x: leftInset + textFieldInsets.left, y: textFieldInsets.top, width: baseWidth - textFieldInsets.left - textFieldInsets.right + textInputBackgroundWidthOffset, height: panelHeight - textFieldInsets.top - textFieldInsets.bottom))
transition.updateAlpha(node: self.textInputBackgroundNode, alpha: audioRecordingItemsAlpha)
var nextButtonTopRight = CGPoint(x: width - rightInset - textFieldInsets.right - accessoryButtonInset, y: panelHeight - textFieldInsets.bottom - minimalInputHeight + audioRecordingItemsVerticalOffset)
var nextButtonTopRight = CGPoint(x: width - rightInset - textFieldInsets.right - accessoryButtonInset, y: panelHeight - textFieldInsets.bottom - minimalInputHeight)
for (_, button) in self.accessoryItemButtons.reversed() {
let buttonSize = CGSize(width: button.buttonWidth, height: minimalInputHeight)
button.updateLayout(size: buttonSize)