Merge branch 'experimental-2'

This commit is contained in:
Ali 2020-07-10 19:58:00 +04:00
commit b88b4e677d
21 changed files with 416 additions and 106 deletions

View File

@ -27,6 +27,42 @@ internal:
- build/artifacts/Telegram.DSYMs.zip
expire_in: 1 week
experimental:
tags:
- ios_internal
stage: build
only:
- experimental
except:
- tags
script:
- bash buildbox/build-telegram.sh appcenter-experimental
- bash buildbox/deploy-telegram.sh appcenter-experimental
environment:
name: experimental
artifacts:
paths:
- build/artifacts/Telegram.DSYMs.zip
expire_in: 1 week
experimental:
tags:
- ios_internal
stage: build
only:
- experimental-2
except:
- tags
script:
- bash buildbox/build-telegram.sh appcenter-experimental-2
- bash buildbox/deploy-telegram.sh appcenter-experimental-2
environment:
name: experimental-2
artifacts:
paths:
- build/artifacts/Telegram.DSYMs.zip
expire_in: 1 week
beta_testflight:
tags:
- ios_beta

View File

@ -57,7 +57,7 @@ cp "$BUCK" "tools/buck"
BUILD_CONFIGURATION="$1"
if [ "$BUILD_CONFIGURATION" == "hockeyapp" ]; then
if [ "$BUILD_CONFIGURATION" == "hockeyapp" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental-2" ]; then
CODESIGNING_SUBPATH="transient-data/codesigning"
CODESIGNING_TEAMS_SUBPATH="transient-data/teams"
elif [ "$BUILD_CONFIGURATION" == "appstore" ]; then
@ -89,7 +89,7 @@ fi
BASE_DIR=$(pwd)
if [ "$BUILD_CONFIGURATION" == "hockeyapp" ] || [ "$BUILD_CONFIGURATION" == "appstore" ]; then
if [ "$BUILD_CONFIGURATION" == "hockeyapp" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental-2" ] || [ "$BUILD_CONFIGURATION" == "appstore" ]; then
if [ ! `which setup-telegram-build.sh` ]; then
echo "setup-telegram-build.sh not found in PATH $PATH"
exit 1
@ -101,7 +101,13 @@ if [ "$BUILD_CONFIGURATION" == "hockeyapp" ] || [ "$BUILD_CONFIGURATION" == "app
source `which setup-telegram-build.sh`
setup_telegram_build "$BUILD_CONFIGURATION" "$BASE_DIR/$BUILDBOX_DIR/transient-data"
source `which setup-codesigning.sh`
setup_codesigning "$BUILD_CONFIGURATION" "$BASE_DIR/$BUILDBOX_DIR/transient-data"
CODESIGNING_CONFIGURATION="$BUILD_CONFIGURATION"
if [ "$BUILD_CONFIGURATION" == "appcenter-experimental" ] || [ "$BUILD_CONFIGURATION" == "appcenter-experimental-2" ]; then
CODESIGNING_CONFIGURATION="hockeyapp"
fi
setup_codesigning "$CODESIGNING_CONFIGURATION" "$BASE_DIR/$BUILDBOX_DIR/transient-data"
if [ "$SETUP_TELEGRAM_BUILD_VERSION" != "$BUILD_TELEGRAM_VERSION" ]; then
echo "setup-telegram-build.sh script version doesn't match"
exit 1

View File

@ -31,7 +31,7 @@ else
BUILD_NUMBER="$2"
fi
if [ "$CONFIGURATION" == "hockeyapp" ]; then
if [ "$CONFIGURATION" == "hockeyapp" ] || [ "$CONFIGURATION" == "appcenter-experimental" ] || [ "$CONFIGURATION" == "appcenter-experimental-2" ]; then
FASTLANE_PASSWORD=""
FASTLANE_ITC_TEAM_NAME=""
elif [ "$CONFIGURATION" == "appstore" ]; then
@ -62,4 +62,6 @@ if [ "$1" == "appstore" ]; then
FASTLANE_PASSWORD="$FASTLANE_PASSWORD" xcrun altool --upload-app --type ios --file "$IPA_PATH" --username "$FASTLANE_ITC_USERNAME" --password "@env:FASTLANE_PASSWORD"
elif [ "$1" == "hockeyapp" ]; then
API_USER_NAME="$API_USER_NAME" API_APP_NAME="$API_APP_NAME" API_TOKEN="$API_TOKEN" sh buildbox/deploy-appcenter.sh
elif [ "$1" == "appcenter-experimental" ] || [ "$1" == "appcenter-experimental-2" ]; then
API_USER_NAME="$API_USER_NAME" API_APP_NAME="$API_APP_NAME" API_TOKEN="$API_TOKEN" sh buildbox/deploy-appcenter.sh
fi

View File

@ -12,7 +12,7 @@ if [ -z "COMMIT_ID" ]; then
exit 1
fi
if [ "$1" == "hockeyapp" ] || [ "$1" == "testinghockeyapp" ]; then
if [ "$1" == "hockeyapp" ] || [ "$1" == "appcenter-experimental" ] || [ "$1" == "appcenter-experimental-2" ] || [ "$1" == "testinghockeyapp" ]; then
CERTS_PATH="$HOME/codesigning_data/certs"
PROFILES_PATH="$HOME/codesigning_data/profiles"
elif [ "$1" == "testinghockeyapp-local" ]; then
@ -99,7 +99,7 @@ for f in $(ls "$PROFILES_PATH"); do
cp -f "$PROFILE_PATH" "$HOME/Library/MobileDevice/Provisioning Profiles/$uuid.mobileprovision"
done
if [ "$1" == "hockeyapp" ]; then
if [ "$1" == "hockeyapp" ] || [ "$1" == "appcenter-experimental" ] || [ "$1" == "appcenter-experimental-2" ]; then
BUILD_ENV_SCRIPT="../telegram-ios-shared/buildbox/bin/internal.sh"
APP_TARGET="app_arm64"
elif [ "$1" == "appstore" ]; then
@ -120,6 +120,12 @@ if [ -d "$BUCK_DIR_CACHE" ]; then
sudo chown telegram "$BUCK_DIR_CACHE"
fi
if [ "$1" == "appcenter-experimental" ]; then
export APP_CENTER_ID="$APP_CENTER_EXPERIMENTAL_ID"
elif [ "$1" == "appcenter-experimental-2" ]; then
export APP_CENTER_ID="$APP_CENTER_EXPERIMENTAL_2_ID"
fi
BUCK="$(pwd)/tools/buck" BUCK_HTTP_CACHE="$BUCK_HTTP_CACHE" BUCK_CACHE_MODE="$BUCK_CACHE_MODE" BUCK_DIR_CACHE="$BUCK_DIR_CACHE" LOCAL_CODESIGNING=1 sh "$BUILD_ENV_SCRIPT" make "$APP_TARGET"
OUTPUT_PATH="build/artifacts"

View File

@ -224,6 +224,10 @@ public extension CALayer {
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.scale", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
}
func animateScaleY(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.scale.y", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
}
func animateRotation(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.rotation.z", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
}

View File

@ -13,6 +13,8 @@
- (void)updateLevel:(CGFloat)level;
- (void)tick:(CGFloat)level;
- (void)setColor:(UIColor *)color;
- (void)stopAnimating;
- (void)startAnimating;
@end
@ -77,7 +79,7 @@
@property (nonatomic) bool fadeDisabled;
- (void)animateIn;
- (void)animateOut;
- (void)animateOut:(BOOL)toSmallSize;
- (void)addMicLevel:(CGFloat)level;
- (void)dismiss;

View File

@ -500,7 +500,7 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius
}
}
- (void)animateOut {
- (void)animateOut:(BOOL)toSmallSize {
_locked = false;
_animatedIn = false;
_displayLink.paused = true;
@ -511,15 +511,20 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius
_cancelTargetTranslation = 0;
_currentScale = 1.0f;
[UIView animateWithDuration:0.18 animations:^{
_innerIconWrapperView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
_innerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
_outerCircleView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
_decoration.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
if (toSmallSize) {
_decoration.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.33f, 0.33f), CGAffineTransformMakeTranslation(-4, 0));
_innerIconWrapperView.transform = CGAffineTransformConcat(CGAffineTransformMakeScale(0.492f, 0.492f), CGAffineTransformMakeTranslation(-TGScreenPixel, 0));
} else {
_decoration.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
_decoration.alpha = 0.0;
_innerIconWrapperView.transform = CGAffineTransformMakeScale(0.2f, 0.2f);
_innerIconWrapperView.alpha = 0.0f;
}
_innerCircleView.alpha = 0.0f;
_outerCircleView.alpha = 0.0f;
_decoration.alpha = 0.0f;
self.iconView.alpha = 1.0f;
_innerIconWrapperView.alpha = 0.0f;
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, 100.0f);
transform = CGAffineTransformScale(transform, 0.2f, 0.2f);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -4,13 +4,16 @@ import Display
import AsyncDisplayKit
private final class AudioWaveformNodeParameters: NSObject {
let waveform: AudioWaveform?
let color: UIColor?
let gravity: AudioWaveformNode.Gravity?
let progress: CGFloat?
init(waveform: AudioWaveform?, color: UIColor?, progress: CGFloat?) {
init(waveform: AudioWaveform?, color: UIColor?, gravity: AudioWaveformNode.Gravity?, progress: CGFloat?) {
self.waveform = waveform
self.color = color
self.gravity = gravity
self.progress = progress
super.init()
@ -18,8 +21,16 @@ private final class AudioWaveformNodeParameters: NSObject {
}
final class AudioWaveformNode: ASDisplayNode {
enum Gravity {
case bottom
case center
}
private var waveform: AudioWaveform?
private var color: UIColor?
private var gravity: Gravity?
var progress: CGFloat? {
didSet {
@ -48,16 +59,17 @@ final class AudioWaveformNode: ASDisplayNode {
}
}
func setup(color: UIColor, waveform: AudioWaveform?) {
if self.color == nil || !self.color!.isEqual(color) || self.waveform != waveform {
func setup(color: UIColor, gravity: Gravity, waveform: AudioWaveform?) {
if self.color == nil || !self.color!.isEqual(color) || self.waveform != waveform || self.gravity != gravity {
self.color = color
self.gravity = gravity
self.waveform = waveform
self.setNeedsDisplay()
}
}
override func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, progress: self.progress)
return AudioWaveformNodeParameters(waveform: self.waveform, color: self.color, gravity: self.gravity, progress: self.progress)
}
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
@ -128,12 +140,26 @@ final class AudioWaveformNode: ASDisplayNode {
diff = sampleWidth * 1.5
}
let gravityMultiplierY: CGFloat = {
switch parameters.gravity ?? .bottom {
case .bottom:
return 1
case .center:
return 0.5
}
}()
let adjustedSampleHeight = sampleHeight - diff
if adjustedSampleHeight.isLessThanOrEqualTo(sampleWidth) {
context.fillEllipse(in: CGRect(x: offset, y: size.height - sampleWidth, width: sampleWidth, height: sampleWidth))
context.fill(CGRect(x: offset, y: size.height - halfSampleWidth, width: sampleWidth, height: halfSampleWidth))
context.fillEllipse(in: CGRect(x: offset, y: (size.height - sampleWidth) * gravityMultiplierY, width: sampleWidth, height: sampleWidth))
context.fill(CGRect(x: offset, y: (size.height - halfSampleWidth) * gravityMultiplierY, width: sampleWidth, height: halfSampleWidth))
} else {
let adjustedRect = CGRect(x: offset, y: size.height - adjustedSampleHeight, width: sampleWidth, height: adjustedSampleHeight)
let adjustedRect = CGRect(
x: offset,
y: (size.height - adjustedSampleHeight) * gravityMultiplierY,
width: sampleWidth,
height: adjustedSampleHeight
)
context.fill(adjustedRect)
context.fillEllipse(in: CGRect(x: adjustedRect.minX, y: adjustedRect.minY - halfSampleWidth, width: sampleWidth, height: sampleWidth))
context.fillEllipse(in: CGRect(x: adjustedRect.minX, y: adjustedRect.maxY - halfSampleWidth, width: sampleWidth, height: sampleWidth))

View File

@ -78,6 +78,16 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration
bigBlob.level = normalizedLevel
}
func startAnimating() {
mediumBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.1, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.1, removeOnCompletion: false)
}
func stopAnimating() {
mediumBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.1, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.1, removeOnCompletion: false)
}
override func layoutSubviews() {
super.layoutSubviews()

View File

@ -2493,11 +2493,15 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
let isLocked = strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId
if let audioRecorder = audioRecorder {
if panelState.mediaRecordingState == nil {
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId))
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: isLocked))
}
} else {
if case .waitingForPreview = panelState.mediaRecordingState {
return panelState
}
return panelState.withUpdatedMediaRecordingState(nil)
}
return panelState
@ -7316,18 +7320,30 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
self.chatDisplayNode.updateRecordedMediaDeleted(true)
break
case .preview:
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.waitingForPreview)
}
})
let _ = (audioRecorderValue.takenRecordedData() |> deliverOnMainQueue).start(next: { [weak self] data in
if let strongSelf = self, let data = data {
if data.duration < 0.5 {
strongSelf.recorderFeedback?.error()
strongSelf.recorderFeedback = nil
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(nil)
}
})
} else if let waveform = data.waveform {
let resource = LocalFileMediaResource(fileId: arc4random64(), size: data.compressedData.count)
strongSelf.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData)
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedRecordedMediaPreview(ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5)))
$0.updatedRecordedMediaPreview(ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5))).updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(nil)
}
})
strongSelf.recorderFeedback = nil
}
@ -7425,12 +7441,14 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
private func deleteMediaRecording() {
self.chatDisplayNode.updateRecordedMediaDeleted(true)
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedRecordedMediaPreview(nil)
})
}
private func sendMediaRecording() {
self.chatDisplayNode.updateRecordedMediaDeleted(false)
if let recordedMediaPreview = self.presentationInterfaceState.recordedMediaPreview {
if let _ = self.presentationInterfaceState.slowmodeState, !self.presentationInterfaceState.isScheduledMessages {
if let rect = self.chatDisplayNode.frameForInputActionButton() {

View File

@ -1121,6 +1121,7 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
var immediatelyLayoutInputPanelAndAnimateAppearance = false
var secondaryInputPanelSize: CGSize?
var immediatelyLayoutSecondaryInputPanelAndAnimateAppearance = false
var inputPanelNodeHandlesTransition = false
let inputPanelNodes = inputPanelForChatPresentationIntefaceState(self.chatPresentationInterfaceState, context: self.context, currentPanel: self.inputPanelNode, currentSecondaryPanel: self.secondaryInputPanelNode, textInputPanelNode: self.textInputPanelNode, interfaceInteraction: self.interfaceInteraction)
@ -1132,11 +1133,18 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
}
let _ = inputTextPanelNode.updateLayout(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, maxHeight: layout.size.height - insets.top - insets.bottom, isSecondary: false, transition: transition, interfaceState: self.chatPresentationInterfaceState, metrics: layout.metrics)
}
dismissedInputPanelNode = self.inputPanelNode
let inputPanelHeight = inputPanelNode.updateLayout(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, maxHeight: layout.size.height - insets.top - insets.bottom, isSecondary: false, transition: inputPanelNode.supernode == nil ? .immediate : transition, interfaceState: self.chatPresentationInterfaceState, metrics: layout.metrics)
if let prevInputPanelNode = self.inputPanelNode, inputPanelNode.canHandleTransition(from: prevInputPanelNode) {
inputPanelNodeHandlesTransition = true
inputPanelNode.removeFromSupernode()
inputPanelNode.prevInputPanelNode = prevInputPanelNode
inputPanelNode.addSubnode(prevInputPanelNode)
} else {
dismissedInputPanelNode = self.inputPanelNode
}
let inputPanelHeight = inputPanelNode.updateLayout(width: layout.size.width, leftInset: layout.safeInsets.left, rightInset: layout.safeInsets.right, maxHeight: layout.size.height - insets.top - insets.bottom, isSecondary: false, transition: inputPanelNode.supernode !== self ? .immediate : transition, interfaceState: self.chatPresentationInterfaceState, metrics: layout.metrics)
inputPanelSize = CGSize(width: layout.size.width, height: inputPanelHeight)
self.inputPanelNode = inputPanelNode
if inputPanelNode.supernode == nil {
if inputPanelNode.supernode !== self {
immediatelyLayoutInputPanelAndAnimateAppearance = true
self.insertSubnode(inputPanelNode, aboveSubnode: self.inputPanelBackgroundNode)
}
@ -1547,21 +1555,6 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
transition.animatePositionAdditive(node: titleAccessoryPanelNode, offset: CGPoint(x: 0.0, y: -titleAccessoryPanelFrame.height))
}
if let inputPanelNode = self.inputPanelNode, let apparentInputPanelFrame = apparentInputPanelFrame, !inputPanelNode.frame.equalTo(apparentInputPanelFrame) {
if immediatelyLayoutInputPanelAndAnimateAppearance {
inputPanelNode.frame = apparentInputPanelFrame.offsetBy(dx: 0.0, dy: apparentInputPanelFrame.height + previousInputPanelBackgroundFrame.maxY - apparentInputBackgroundFrame.maxY)
inputPanelNode.alpha = 0.0
}
if !transition.isAnimated {
inputPanelNode.layer.removeAllAnimations()
if let currentDismissedInputPanelNode = self.currentDismissedInputPanelNode, inputPanelNode is ChatSearchInputPanelNode {
currentDismissedInputPanelNode.layer.removeAllAnimations()
}
}
transition.updateFrame(node: inputPanelNode, frame: apparentInputPanelFrame)
transition.updateAlpha(node: inputPanelNode, alpha: 1.0)
}
if let secondaryInputPanelNode = self.secondaryInputPanelNode, let apparentSecondaryInputPanelFrame = apparentSecondaryInputPanelFrame, !secondaryInputPanelNode.frame.equalTo(apparentSecondaryInputPanelFrame) {
if immediatelyLayoutSecondaryInputPanelAndAnimateAppearance {
secondaryInputPanelNode.frame = apparentSecondaryInputPanelFrame.offsetBy(dx: 0.0, dy: apparentSecondaryInputPanelFrame.height + previousInputPanelBackgroundFrame.maxY - apparentSecondaryInputPanelFrame.maxY)
@ -1646,6 +1639,28 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
})
}
if let inputPanelNode = self.inputPanelNode,
let apparentInputPanelFrame = apparentInputPanelFrame,
!inputPanelNode.frame.equalTo(apparentInputPanelFrame) {
if immediatelyLayoutInputPanelAndAnimateAppearance {
inputPanelNode.frame = apparentInputPanelFrame.offsetBy(dx: 0.0, dy: apparentInputPanelFrame.height + previousInputPanelBackgroundFrame.maxY - apparentInputBackgroundFrame.maxY)
inputPanelNode.alpha = 0.0
}
if !transition.isAnimated {
inputPanelNode.layer.removeAllAnimations()
if let currentDismissedInputPanelNode = self.currentDismissedInputPanelNode, inputPanelNode is ChatSearchInputPanelNode {
currentDismissedInputPanelNode.layer.removeAllAnimations()
}
}
if inputPanelNodeHandlesTransition {
inputPanelNode.frame = apparentInputPanelFrame
inputPanelNode.alpha = 1.0
} else {
transition.updateFrame(node: inputPanelNode, frame: apparentInputPanelFrame)
transition.updateAlpha(node: inputPanelNode, alpha: 1.0)
}
}
if let dismissedInputPanelNode = dismissedInputPanelNode, dismissedInputPanelNode !== self.secondaryInputPanelNode {
var frameCompleted = false
var alphaCompleted = false

View File

@ -10,6 +10,7 @@ import AccountContext
class ChatInputPanelNode: ASDisplayNode {
var context: AccountContext?
var interfaceInteraction: ChatPanelInterfaceInteraction?
var prevInputPanelNode: ChatInputPanelNode?
func updateLayout(width: CGFloat, leftInset: CGFloat, rightInset: CGFloat, maxHeight: CGFloat, isSecondary: Bool, transition: ContainedViewLayoutTransition, interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics) -> CGFloat {
return 0.0
@ -26,4 +27,8 @@ class ChatInputPanelNode: ASDisplayNode {
return 45.0
}
}
func canHandleTransition(from prevInputPanelNode: ChatInputPanelNode?) -> Bool {
return false
}
}

View File

@ -609,8 +609,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
} else {
waveformColor = messageTheme.mediaInactiveControlColor
}
strongSelf.waveformNode.setup(color: waveformColor, waveform: audioWaveform)
strongSelf.waveformForegroundNode.setup(color: messageTheme.mediaActiveControlColor, waveform: audioWaveform)
strongSelf.waveformNode.setup(color: waveformColor, gravity: .bottom, waveform: audioWaveform)
strongSelf.waveformForegroundNode.setup(color: messageTheme.mediaActiveControlColor, gravity: .bottom, waveform: audioWaveform)
} else if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
strongSelf.waveformScrubbingNode = nil
waveformScrubbingNode.removeFromSupernode()

View File

@ -10,6 +10,7 @@ import TelegramPresentationData
import UniversalMediaPlayer
import AppBundle
import ContextUI
import AnimationUI
private func generatePauseIcon(_ theme: PresentationTheme) -> UIImage? {
return generateTintedImage(image: UIImage(bundleImageName: "GlobalMusicPlayer/MinimizedPause"), color: theme.chat.inputPanel.actionControlForegroundColor)
@ -24,31 +25,44 @@ extension AudioWaveformNode: CustomMediaPlayerScrubbingForegroundNode {
}
final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
private let deleteButton: HighlightableButtonNode
let deleteButton: HighlightableButtonNode
let binNode: AnimationNode
let sendButton: HighlightTrackingButtonNode
private var sendButtonRadialStatusNode: ChatSendButtonRadialStatusNode?
private let playButton: HighlightableButtonNode
private let pauseButton: HighlightableButtonNode
let playButton: HighlightableButtonNode
let pauseButton: HighlightableButtonNode
private let waveformButton: ASButtonNode
private let waveformBackgroundNode: ASImageNode
let waveformBackgroundNode: ASImageNode
private let waveformNode: AudioWaveformNode
private let waveformForegroundNode: AudioWaveformNode
private let waveformScubberNode: MediaPlayerScrubbingNode
let waveformScubberNode: MediaPlayerScrubbingNode
private var presentationInterfaceState: ChatPresentationInterfaceState?
private var mediaPlayer: MediaPlayer?
private let durationLabel: MediaPlayerTimeTextNode
let durationLabel: MediaPlayerTimeTextNode
private let statusDisposable = MetaDisposable()
private var gestureRecognizer: ContextGesture?
private(set) var gestureRecognizer: ContextGesture?
init(theme: PresentationTheme) {
self.deleteButton = HighlightableButtonNode()
self.deleteButton.displaysAsynchronously = false
self.deleteButton.setImage(generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Accessory Panels/MessageSelectionTrash"), color: theme.chat.inputPanel.panelControlAccentColor), for: [])
self.binNode = AnimationNode(
animation: "BinBlue",
colors: [
"Cap11.Cap2.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Bin 5.Bin.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Cap12.Cap1.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Line15.Line1.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Line13.Line3.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Line14.Line2.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Line13.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
]
)
self.sendButton = HighlightTrackingButtonNode()
self.sendButton.displaysAsynchronously = false
@ -87,8 +101,9 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
super.init()
self.addSubnode(self.deleteButton)
self.addSubnode(self.sendButton)
self.deleteButton.addSubnode(binNode)
self.addSubnode(self.waveformBackgroundNode)
self.addSubnode(self.sendButton)
self.addSubnode(self.waveformScubberNode)
self.addSubnode(self.playButton)
self.addSubnode(self.pauseButton)
@ -144,8 +159,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.presentationInterfaceState = interfaceState
if let recordedMediaPreview = interfaceState.recordedMediaPreview, updateWaveform {
self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), waveform: recordedMediaPreview.waveform)
self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, waveform: recordedMediaPreview.waveform)
self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), gravity: .center, waveform: recordedMediaPreview.waveform)
self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, gravity: .center, waveform: recordedMediaPreview.waveform)
if self.mediaPlayer != nil {
self.mediaPlayer?.pause()
@ -175,8 +190,9 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
let panelHeight = defaultHeight(metrics: metrics)
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset, y: -1.0), size: CGSize(width: 48.0, height: panelHeight)))
transition.updateFrame(node: self.deleteButton, frame: CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40)))
transition.updateFrame(node: self.sendButton, frame: CGRect(origin: CGPoint(x: width - rightInset - 43.0 - UIScreenPixel, y: -UIScreenPixel), size: CGSize(width: 44.0, height: panelHeight)))
self.binNode.frame = self.deleteButton.bounds
if let slowmodeState = interfaceState.slowmodeState, !interfaceState.isScheduledMessages {
let sendButtonRadialStatusNode: ChatSendButtonRadialStatusNode
@ -203,15 +219,73 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
transition.updateFrame(node: self.playButton, frame: CGRect(origin: CGPoint(x: leftInset + 52.0, y: 10.0), size: CGSize(width: 26.0, height: 26.0)))
transition.updateFrame(node: self.pauseButton, frame: CGRect(origin: CGPoint(x: leftInset + 50.0, y: 10.0), size: CGSize(width: 26.0, height: 26.0)))
transition.updateFrame(node: self.waveformBackgroundNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: CGSize(width: width - leftInset - rightInset - 90.0, height: 33.0)))
let waveformBackgroundFrame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: CGSize(width: width - leftInset - rightInset - 90.0, height: 33.0))
transition.updateFrame(node: self.waveformBackgroundNode, frame: waveformBackgroundFrame)
transition.updateFrame(node: self.waveformButton, frame: CGRect(origin: CGPoint(x: leftInset + 45.0, y: 0.0), size: CGSize(width: width - leftInset - rightInset - 90.0, height: panelHeight)))
transition.updateFrame(node: self.waveformScubberNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0 + 35.0, y: 7.0 + floor((33.0 - 13.0) / 2.0)), size: CGSize(width: width - leftInset - rightInset - 90.0 - 45.0 - 40.0, height: 13.0)))
transition.updateFrame(node: self.durationLabel, frame: CGRect(origin: CGPoint(x: width - rightInset - 90.0 - 4.0, y: 15.0), size: CGSize(width: 35.0, height: 20.0)))
prevInputPanelNode?.frame = CGRect(origin: .zero, size: CGSize(width: width, height: panelHeight))
if let prevTextInputPanelNode = prevInputPanelNode as? ChatTextInputPanelNode {
self.prevInputPanelNode = nil
if let audioRecordingDotNode = prevTextInputPanelNode.audioRecordingDotNode {
audioRecordingDotNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
audioRecordingDotNode.layer.removeAllAnimations()
audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1.0), to: 0.0, duration: 0.15, removeOnCompletion: false)
}
if let audioRecordingTimeNode = prevTextInputPanelNode.audioRecordingTimeNode {
audioRecordingTimeNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
audioRecordingTimeNode.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
let timePosition = audioRecordingTimeNode.position
audioRecordingTimeNode.layer.animatePosition(from: timePosition, to: CGPoint(x: timePosition.x - 20, y: timePosition.y), duration: 0.15, removeOnCompletion: false)
}
if let audioRecordingCancelIndicator = prevTextInputPanelNode.audioRecordingCancelIndicator {
audioRecordingCancelIndicator.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
}
prevTextInputPanelNode.actionButtons.micButton.animateOut(true)
self.deleteButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15)
self.deleteButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
self.playButton.layer.animateScale(from: 0.01, to: 1.0, duration: 0.3, delay: 0.1)
self.playButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.1)
self.pauseButton.layer.animateScale(from: 0.01, to: 1.0, duration: 0.3, delay: 0.1)
self.pauseButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.1)
self.durationLabel.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
self.waveformScubberNode.layer.animateScaleY(from: 0.1, to: 1.0, duration: 0.3, delay: 0.1)
self.waveformScubberNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.1)
self.waveformBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
self.waveformBackgroundNode.layer.animateFrame(
from: self.sendButton.frame.insetBy(dx: 5.5, dy: 5.5),
to: waveformBackgroundFrame,
duration: 0.2,
delay: 0.12,
timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue,
removeOnCompletion: false
) { [weak self, weak prevTextInputPanelNode] finished in
if finished, prevTextInputPanelNode?.supernode === self {
prevTextInputPanelNode?.removeFromSupernode()
}
}
}
return panelHeight
}
override func canHandleTransition(from prevInputPanelNode: ChatInputPanelNode?) -> Bool {
return prevInputPanelNode is ChatTextInputPanelNode
}
@objc func deletePressed() {
self.mediaPlayer?.pause()
self.interfaceInteraction?.deleteRecordedMedia()
}
@ -230,15 +304,5 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
func frameForInputActionButton() -> CGRect? {
return self.sendButton.frame
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
if self.deleteButton.frame.contains(point) {
return self.deleteButton.view
}
if self.sendButton.frame.contains(point) {
return self.sendButton.view
}
return super.hitTest(point, with: event)
}
}

View File

@ -231,7 +231,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
if self.hasRecorder {
self.animateIn()
} else {
self.animateOut()
self.animateOut(false)
}
}
}
@ -424,16 +424,25 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
override func animateIn() {
super.animateIn()
micDecoration.startAnimating()
innerIconView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
innerIconView.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
}
override func animateOut() {
super.animateOut()
innerIconView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, removeOnCompletion: false)
innerIconView.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, removeOnCompletion: false)
override func animateOut(_ toSmallSize: Bool) {
super.animateOut(toSmallSize)
micDecoration.stopAnimating()
if toSmallSize {
micDecoration.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.03, delay: 0.15, removeOnCompletion: false)
} else {
micDecoration.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.18, removeOnCompletion: false)
innerIconView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, removeOnCompletion: false)
innerIconView.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, removeOnCompletion: false)
}
}
private var previousSize = CGSize()

View File

@ -214,6 +214,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
var audioRecordingDotNode: AnimationNode?
var audioRecordingTimeNode: ChatTextInputAudioRecordingTimeNode?
var audioRecordingCancelIndicator: ChatTextInputAudioRecordingCancelIndicator?
var animatingBinNode: AnimationNode?
private var accessoryItemButtons: [(ChatTextInputAccessoryItem, AccessoryItemIconButton)] = []
@ -906,7 +907,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
var hideMicButton = false
var audioRecordingItemsAlpha: CGFloat = 1
if let mediaRecordingState = interfaceState.inputTextPanelState.mediaRecordingState {
let mediaRecordingState = interfaceState.inputTextPanelState.mediaRecordingState
if mediaRecordingState != nil || interfaceState.recordedMediaPreview != nil {
audioRecordingItemsAlpha = 0
let audioRecordingInfoContainerNode: ASDisplayNode
@ -927,7 +929,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.audioRecordingTimeNode = audioRecordingTimeNode
audioRecordingInfoContainerNode.addSubnode(audioRecordingTimeNode)
if transition.isAnimated {
if transition.isAnimated && mediaRecordingState != nil {
animateTimeSlideIn = true
}
}
@ -938,7 +940,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
if let currentAudioRecordingCancelIndicator = self.audioRecordingCancelIndicator {
audioRecordingCancelIndicator = currentAudioRecordingCancelIndicator
} else {
animateCancelSlideIn = transition.isAnimated
animateCancelSlideIn = transition.isAnimated && mediaRecordingState != nil
audioRecordingCancelIndicator = ChatTextInputAudioRecordingCancelIndicator(theme: interfaceState.theme, strings: interfaceState.strings, cancel: { [weak self] in
self?.interfaceInteraction?.finishMediaRecording(.dismiss)
@ -947,15 +949,16 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.insertSubnode(audioRecordingCancelIndicator, at: 0)
}
let isLocked = mediaRecordingState.isLocked
let isLocked = mediaRecordingState?.isLocked ?? (interfaceState.recordedMediaPreview != nil)
var hideInfo = false
switch mediaRecordingState {
case let .audio(recorder, _):
self.actionButtons.micButton.audioRecorder = recorder
audioRecordingTimeNode.audioRecorder = recorder
case let .video(status, _):
switch status {
if let mediaRecordingState = mediaRecordingState {
switch mediaRecordingState {
case let .audio(recorder, _):
self.actionButtons.micButton.audioRecorder = recorder
audioRecordingTimeNode.audioRecorder = recorder
case let .video(status, _):
switch status {
case let .recording(recordingStatus):
audioRecordingTimeNode.videoRecordingStatus = recordingStatus
self.actionButtons.micButton.videoRecordingStatus = recordingStatus
@ -967,6 +970,9 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.actionButtons.micButton.videoRecordingStatus = nil
hideMicButton = true
hideInfo = true
}
case .waitingForPreview:
break
}
}
@ -1001,7 +1007,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
audioRecordingCancelIndicator.layer.animatePosition(from: CGPoint(x: width + audioRecordingCancelIndicator.bounds.size.width, y: position.y), to: position, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring)
}
audioRecordingCancelIndicator.updateIsDisplayingCancel(isLocked, animated: !animateCancelSlideIn)
audioRecordingCancelIndicator.updateIsDisplayingCancel(isLocked, animated: !animateCancelSlideIn && mediaRecordingState != nil)
if isLocked || self.actionButtons.micButton.cancelTranslation > cancelTransformThreshold {
var deltaOffset: CGFloat = 0.0
@ -1046,9 +1052,11 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
audioRecordingDotNode = currentAudioRecordingDotNode
} else {
self.audioRecordingDotNode?.removeFromSupernode()
audioRecordingDotNode = AnimationNode(animation: "Bin")
audioRecordingDotNode = AnimationNode(animation: "BinRed")
self.audioRecordingDotNode = audioRecordingDotNode
self.addSubnode(audioRecordingDotNode)
self.animatingBinNode?.removeFromSupernode()
self.animatingBinNode = nil
}
animateDotAppearing = transition.isAnimated && !hideInfo
@ -1117,8 +1125,13 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
audioRecordingDotNode.layer.removeAllAnimations()
if self.isMediaDeleted {
audioRecordingDotNode.completion = dismissDotNode
audioRecordingDotNode.play()
if self.prevInputPanelNode is ChatRecordingPreviewInputPanelNode {
self.audioRecordingDotNode?.removeFromSupernode()
self.audioRecordingDotNode = nil
} else {
audioRecordingDotNode.completion = dismissDotNode
audioRecordingDotNode.play()
}
} else {
dismissDotNode()
}
@ -1333,9 +1346,87 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate {
self.updateActionButtons(hasText: hasText, hideMicButton: hideMicButton, animated: transition.isAnimated)
if let prevInputPanelNode = prevInputPanelNode {
prevInputPanelNode.frame = CGRect(origin: .zero, size: prevInputPanelNode.frame.size)
}
if let prevPreviewInputPanelNode = self.prevInputPanelNode as? ChatRecordingPreviewInputPanelNode {
self.prevInputPanelNode = nil
prevPreviewInputPanelNode.gestureRecognizer?.isEnabled = false
prevPreviewInputPanelNode.isUserInteractionEnabled = false
if self.isMediaDeleted {
func animatePosition(for previewSubnode: ASDisplayNode) {
previewSubnode.layer.animatePosition(
from: previewSubnode.position,
to: CGPoint(x: previewSubnode.position.x - 20, y: previewSubnode.position.y),
duration: 0.15
)
}
animatePosition(for: prevPreviewInputPanelNode.waveformBackgroundNode)
animatePosition(for: prevPreviewInputPanelNode.waveformScubberNode)
animatePosition(for: prevPreviewInputPanelNode.durationLabel)
animatePosition(for: prevPreviewInputPanelNode.playButton)
animatePosition(for: prevPreviewInputPanelNode.pauseButton)
}
func animateAlpha(for previewSubnode: ASDisplayNode) {
previewSubnode.layer.animateAlpha(
from: 1.0,
to: 0.0,
duration: 0.15,
removeOnCompletion: false
)
}
animateAlpha(for: prevPreviewInputPanelNode.waveformBackgroundNode)
animateAlpha(for: prevPreviewInputPanelNode.waveformScubberNode)
animateAlpha(for: prevPreviewInputPanelNode.durationLabel)
animateAlpha(for: prevPreviewInputPanelNode.playButton)
animateAlpha(for: prevPreviewInputPanelNode.pauseButton)
let binNode = prevPreviewInputPanelNode.binNode
self.animatingBinNode = binNode
let dismissBin = { [weak self, weak prevPreviewInputPanelNode, weak binNode] in
if binNode?.supernode != nil {
prevPreviewInputPanelNode?.deleteButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, delay: 0, removeOnCompletion: false) { [weak prevPreviewInputPanelNode] _ in
if prevPreviewInputPanelNode?.supernode === self {
prevPreviewInputPanelNode?.removeFromSupernode()
}
}
prevPreviewInputPanelNode?.deleteButton.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false)
self?.attachmentButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
self?.attachmentButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
} else if prevPreviewInputPanelNode?.supernode === self {
prevPreviewInputPanelNode?.removeFromSupernode()
}
}
if self.isMediaDeleted {
binNode.completion = dismissBin
binNode.play()
} else {
dismissBin()
}
prevPreviewInputPanelNode.sendButton.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
prevPreviewInputPanelNode.sendButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
actionButtons.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
actionButtons.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15, delay: 0, removeOnCompletion: false)
prevPreviewInputPanelNode.sendButton.layer.animateScale(from: 1.0, to: 0.3, duration: 0.15, removeOnCompletion: false)
prevPreviewInputPanelNode.sendButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
}
return panelHeight
}
override func canHandleTransition(from prevInputPanelNode: ChatInputPanelNode?) -> Bool {
return prevInputPanelNode is ChatRecordingPreviewInputPanelNode
}
@objc func editableTextNodeDidUpdateText(_ editableTextNode: ASEditableTextNode) {
if let textInputNode = self.textInputNode, let presentationInterfaceState = self.presentationInterfaceState {
let baseFontSize = max(17.0, presentationInterfaceState.fontSize.baseDisplaySize)

View File

@ -63,39 +63,49 @@ enum ChatVideoRecordingStatus: Equatable {
enum ChatTextInputPanelMediaRecordingState: Equatable {
case audio(recorder: ManagedAudioRecorder, isLocked: Bool)
case video(status: ChatVideoRecordingStatus, isLocked: Bool)
case waitingForPreview
var isLocked: Bool {
switch self {
case let .audio(_, isLocked):
return isLocked
case let .video(_, isLocked):
return isLocked
case let .audio(_, isLocked):
return isLocked
case let .video(_, isLocked):
return isLocked
case .waitingForPreview:
return true
}
}
func withLocked(_ isLocked: Bool) -> ChatTextInputPanelMediaRecordingState {
switch self {
case let .audio(recorder, _):
return .audio(recorder: recorder, isLocked: isLocked)
case let .video(status, _):
return .video(status: status, isLocked: isLocked)
case let .audio(recorder, _):
return .audio(recorder: recorder, isLocked: isLocked)
case let .video(status, _):
return .video(status: status, isLocked: isLocked)
case .waitingForPreview:
return .waitingForPreview
}
}
static func ==(lhs: ChatTextInputPanelMediaRecordingState, rhs: ChatTextInputPanelMediaRecordingState) -> Bool {
switch lhs {
case let .audio(lhsRecorder, lhsIsLocked):
if case let .audio(rhsRecorder, rhsIsLocked) = rhs, lhsRecorder === rhsRecorder, lhsIsLocked == rhsIsLocked {
return true
} else {
return false
}
case let .video(status, isLocked):
if case .video(status, isLocked) = rhs {
return true
} else {
return false
}
case let .audio(lhsRecorder, lhsIsLocked):
if case let .audio(rhsRecorder, rhsIsLocked) = rhs, lhsRecorder === rhsRecorder, lhsIsLocked == rhsIsLocked {
return true
} else {
return false
}
case let .video(status, isLocked):
if case .video(status, isLocked) = rhs {
return true
} else {
return false
}
case .waitingForPreview:
if case .waitingForPreview = rhs {
return true
}
return false
}
}
}

View File

@ -26,7 +26,7 @@ private struct Constants {
static let idleRotationDiff: CGFloat = 0.1 * idleRotationSpeed
}
class CombinedWaveView: UIView, TGModernConversationInputMicButtonDecoration {
class CombinedWaveView: UIView {
private let bigWaveView: WaveView
private let smallWaveView: WaveView