mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Voice Chat Fixes
This commit is contained in:
parent
d267acc902
commit
31d9ae2869
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
Telegram/Telegram-iOS/Resources/VoiceHand_10.tgs
Normal file
BIN
Telegram/Telegram-iOS/Resources/VoiceHand_10.tgs
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
Telegram/Telegram-iOS/Resources/VoiceHand_8.tgs
Normal file
BIN
Telegram/Telegram-iOS/Resources/VoiceHand_8.tgs
Normal file
Binary file not shown.
BIN
Telegram/Telegram-iOS/Resources/VoiceHand_9.tgs
Normal file
BIN
Telegram/Telegram-iOS/Resources/VoiceHand_9.tgs
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -6340,7 +6340,8 @@ Sorry for the inconvenience.";
|
|||||||
"VoiceChat.PinVideo" = "Pin Video";
|
"VoiceChat.PinVideo" = "Pin Video";
|
||||||
"VoiceChat.UnpinVideo" = "Unpin Video";
|
"VoiceChat.UnpinVideo" = "Unpin Video";
|
||||||
|
|
||||||
"Notification.VoiceChatScheduled" = "Voice chat scheduled for %@";
|
"Notification.VoiceChatScheduledChannel" = "Voice chat scheduled for %@";
|
||||||
|
"Notification.VoiceChatScheduled" = "%1$@ Voice chat scheduled for %2$@";
|
||||||
|
|
||||||
"VoiceChat.StartsIn" = "Starts in";
|
"VoiceChat.StartsIn" = "Starts in";
|
||||||
"VoiceChat.LateBy" = "Late by";
|
"VoiceChat.LateBy" = "Late by";
|
||||||
|
@ -347,7 +347,7 @@ public class CallStatusBarNodeImpl: CallStatusBarNode {
|
|||||||
var title: String = ""
|
var title: String = ""
|
||||||
var speakerSubtitle: String = ""
|
var speakerSubtitle: String = ""
|
||||||
|
|
||||||
let textFont = Font.regular(13.0)
|
let textFont = Font.with(size: 13.0, design: .regular, weight: .regular, traits: [.monospacedNumbers])
|
||||||
let textColor = UIColor.white
|
let textColor = UIColor.white
|
||||||
var segments: [AnimatedCountLabelNode.Segment] = []
|
var segments: [AnimatedCountLabelNode.Segment] = []
|
||||||
var displaySpeakerSubtitle = false
|
var displaySpeakerSubtitle = false
|
||||||
@ -381,7 +381,22 @@ public class CallStatusBarNodeImpl: CallStatusBarNode {
|
|||||||
}
|
}
|
||||||
displaySpeakerSubtitle = speakerSubtitle != title && !speakerSubtitle.isEmpty
|
displaySpeakerSubtitle = speakerSubtitle != title && !speakerSubtitle.isEmpty
|
||||||
|
|
||||||
if let membersCount = membersCount {
|
var requiresTimer = false
|
||||||
|
if let scheduleTime = self.currentGroupCallState?.info?.scheduleTimestamp {
|
||||||
|
requiresTimer = true
|
||||||
|
|
||||||
|
let currentTime = Int32(CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970)
|
||||||
|
let elapsedTime = scheduleTime - currentTime
|
||||||
|
let timerText: String
|
||||||
|
if elapsedTime >= 86400 {
|
||||||
|
timerText = timeIntervalString(strings: presentationData.strings, value: elapsedTime)
|
||||||
|
} else if elapsedTime < 0 {
|
||||||
|
timerText = presentationData.strings.VoiceChat_StatusLateBy(textForTimeout(value: abs(elapsedTime))).0
|
||||||
|
} else {
|
||||||
|
timerText = presentationData.strings.VoiceChat_StatusStartsIn(textForTimeout(value: elapsedTime)).0
|
||||||
|
}
|
||||||
|
segments.append(.text(0, NSAttributedString(string: timerText, font: textFont, textColor: textColor)))
|
||||||
|
} else if let membersCount = membersCount {
|
||||||
var membersPart = presentationData.strings.VoiceChat_Status_Members(membersCount)
|
var membersPart = presentationData.strings.VoiceChat_Status_Members(membersCount)
|
||||||
if membersPart.contains("[") && membersPart.contains("]") {
|
if membersPart.contains("[") && membersPart.contains("]") {
|
||||||
if let startIndex = membersPart.firstIndex(of: "["), let endIndex = membersPart.firstIndex(of: "]") {
|
if let startIndex = membersPart.firstIndex(of: "["), let endIndex = membersPart.firstIndex(of: "]") {
|
||||||
@ -433,6 +448,19 @@ public class CallStatusBarNodeImpl: CallStatusBarNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.backgroundNode.connectingColor = color
|
self.backgroundNode.connectingColor = color
|
||||||
|
|
||||||
|
if requiresTimer {
|
||||||
|
if self.currentCallTimer == nil {
|
||||||
|
let timer = SwiftSignalKit.Timer(timeout: 0.5, repeat: true, completion: { [weak self] in
|
||||||
|
self?.update()
|
||||||
|
}, queue: Queue.mainQueue())
|
||||||
|
timer.start()
|
||||||
|
self.currentCallTimer = timer
|
||||||
|
}
|
||||||
|
} else if let currentCallTimer = self.currentCallTimer {
|
||||||
|
self.currentCallTimer = nil
|
||||||
|
currentCallTimer.invalidate()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.subtitleNode.segments != segments && !displaySpeakerSubtitle {
|
if self.subtitleNode.segments != segments && !displaySpeakerSubtitle {
|
||||||
|
@ -2066,7 +2066,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
return transaction.getPeer(peerId)
|
return transaction.getPeer(peerId)
|
||||||
}
|
}
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] myPeer in
|
|> deliverOnMainQueue).start(next: { [weak self] myPeer in
|
||||||
guard let strongSelf = self, let _ = myPeer else {
|
guard let strongSelf = self, let myPeer = myPeer else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2076,7 +2076,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
strongSelf.joinAsPeerId = peerId
|
strongSelf.joinAsPeerId = peerId
|
||||||
|
|
||||||
if strongSelf.stateValue.scheduleTimestamp == nil {
|
if strongSelf.stateValue.scheduleTimestamp != nil {
|
||||||
|
strongSelf.stateValue.myPeerId = peerId
|
||||||
|
strongSelf.reconnectedAsEventsPipe.putNext(myPeer)
|
||||||
|
} else {
|
||||||
strongSelf.reconnectingAsPeer = myPeer
|
strongSelf.reconnectingAsPeer = myPeer
|
||||||
|
|
||||||
if let participantsContext = strongSelf.participantsContext, let immediateState = participantsContext.immediateState {
|
if let participantsContext = strongSelf.participantsContext, let immediateState = participantsContext.immediateState {
|
||||||
@ -2096,8 +2099,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
|
|
||||||
strongSelf.requestCall(movingFromBroadcastToRtc: false)
|
strongSelf.requestCall(movingFromBroadcastToRtc: false)
|
||||||
} else {
|
|
||||||
strongSelf.stateValue.myPeerId = peerId
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -2222,7 +2223,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|
|
||||||
self.isScheduledStarted = true
|
self.isScheduledStarted = true
|
||||||
self.stateValue.scheduleTimestamp = nil
|
self.stateValue.scheduleTimestamp = nil
|
||||||
self.switchToTemporaryParticipantsContext(sourceContext: nil, oldMyPeerId: self.joinAsPeerId)
|
|
||||||
|
|
||||||
self.startDisposable.set((startScheduledGroupCall(account: self.account, peerId: self.peerId, callId: callInfo.id, accessHash: callInfo.accessHash)
|
self.startDisposable.set((startScheduledGroupCall(account: self.account, peerId: self.peerId, callId: callInfo.id, accessHash: callInfo.accessHash)
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] callInfo in
|
|> deliverOnMainQueue).start(next: { [weak self] callInfo in
|
||||||
@ -2445,7 +2445,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let account = self.account
|
let account = self.account
|
||||||
|
|
||||||
let currentCall: Signal<GroupCallInfo?, CallError>
|
let currentCall: Signal<GroupCallInfo?, CallError>
|
||||||
if let initialCall = self.initialCall {
|
if let initialCall = self.initialCall {
|
||||||
currentCall = getCurrentGroupCall(account: account, callId: initialCall.id, accessHash: initialCall.accessHash)
|
currentCall = getCurrentGroupCall(account: account, callId: initialCall.id, accessHash: initialCall.accessHash)
|
||||||
@ -2455,6 +2454,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|> map { summary -> GroupCallInfo? in
|
|> map { summary -> GroupCallInfo? in
|
||||||
return summary?.info
|
return summary?.info
|
||||||
}
|
}
|
||||||
|
} else if case let .active(callInfo) = self.internalState {
|
||||||
|
currentCall = getCurrentGroupCall(account: account, callId: callInfo.id, accessHash: callInfo.accessHash)
|
||||||
|
|> mapError { _ -> CallError in
|
||||||
|
return .generic
|
||||||
|
}
|
||||||
|
|> map { summary -> GroupCallInfo? in
|
||||||
|
return summary?.info
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
currentCall = .single(nil)
|
currentCall = .single(nil)
|
||||||
}
|
}
|
||||||
|
@ -242,7 +242,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
|
|||||||
let subtitleSize = self.subtitleLabel.updateLayout(CGSize(width: size.width, height: .greatestFiniteMagnitude))
|
let subtitleSize = self.subtitleLabel.updateLayout(CGSize(width: size.width, height: .greatestFiniteMagnitude))
|
||||||
let totalHeight = titleSize.height + subtitleSize.height + 1.0
|
let totalHeight = titleSize.height + subtitleSize.height + 1.0
|
||||||
|
|
||||||
self.titleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) / 2.0), y: floor((size.height - totalHeight) / 2.0) + 88.0), size: titleSize)
|
self.titleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) / 2.0), y: floor((size.height - totalHeight) / 2.0) + 84.0), size: titleSize)
|
||||||
self.subtitleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - subtitleSize.width) / 2.0), y: self.titleLabel.frame.maxY + 1.0), size: subtitleSize)
|
self.subtitleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - subtitleSize.width) / 2.0), y: self.titleLabel.frame.maxY + 1.0), size: subtitleSize)
|
||||||
|
|
||||||
self.bottomNode.frame = CGRect(origin: CGPoint(), size: size)
|
self.bottomNode.frame = CGRect(origin: CGPoint(), size: size)
|
||||||
@ -361,6 +361,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var backgroundState: VoiceChatActionButtonBackgroundNode.State
|
var backgroundState: VoiceChatActionButtonBackgroundNode.State
|
||||||
|
var animated = true
|
||||||
switch state {
|
switch state {
|
||||||
case let .button(text):
|
case let .button(text):
|
||||||
backgroundState = .button
|
backgroundState = .button
|
||||||
@ -370,6 +371,9 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
|
|||||||
self.buttonTitleLabel.frame = CGRect(origin: CGPoint(x: floor((self.bounds.width - titleSize.width) / 2.0), y: floor((self.bounds.height - titleSize.height) / 2.0)), size: titleSize)
|
self.buttonTitleLabel.frame = CGRect(origin: CGPoint(x: floor((self.bounds.width - titleSize.width) / 2.0), y: floor((self.bounds.height - titleSize.height) / 2.0)), size: titleSize)
|
||||||
case .scheduled:
|
case .scheduled:
|
||||||
backgroundState = .disabled
|
backgroundState = .disabled
|
||||||
|
if previousState == .connecting {
|
||||||
|
animated = false
|
||||||
|
}
|
||||||
case let .active(state):
|
case let .active(state):
|
||||||
switch state {
|
switch state {
|
||||||
case .on:
|
case .on:
|
||||||
@ -385,7 +389,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
|
|||||||
self.applyIconParams()
|
self.applyIconParams()
|
||||||
|
|
||||||
self.backgroundNode.isDark = dark
|
self.backgroundNode.isDark = dark
|
||||||
self.backgroundNode.update(state: backgroundState, animated: true)
|
self.backgroundNode.update(state: backgroundState, animated: animated)
|
||||||
|
|
||||||
if case .active = state, let previousState = previousState, case .connecting = previousState, animated {
|
if case .active = state, let previousState = previousState, case .connecting = previousState, animated {
|
||||||
self.activeDisposable.set((self.activePromise.get()
|
self.activeDisposable.set((self.activePromise.get()
|
||||||
@ -755,7 +759,7 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
|
|||||||
case muted
|
case muted
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateGlowAndGradientAnimations(type: Gradient, previousType: Gradient? = nil) {
|
func updateGlowAndGradientAnimations(type: Gradient, previousType: Gradient? = nil, animated: Bool = true) {
|
||||||
let effectivePreviousTyoe = previousType ?? .active
|
let effectivePreviousTyoe = previousType ?? .active
|
||||||
|
|
||||||
let scale: CGFloat
|
let scale: CGFloat
|
||||||
@ -794,12 +798,14 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
|
|||||||
self.maskGradientLayer.transform = CATransform3DMakeScale(targetScale, targetScale, 1.0)
|
self.maskGradientLayer.transform = CATransform3DMakeScale(targetScale, targetScale, 1.0)
|
||||||
if let _ = previousType {
|
if let _ = previousType {
|
||||||
self.maskGradientLayer.animateScale(from: initialScale, to: targetScale, duration: 0.3)
|
self.maskGradientLayer.animateScale(from: initialScale, to: targetScale, duration: 0.3)
|
||||||
} else {
|
} else if animated {
|
||||||
self.maskGradientLayer.animateSpring(from: initialScale as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", duration: 0.45)
|
self.maskGradientLayer.animateSpring(from: initialScale as NSNumber, to: targetScale as NSNumber, keyPath: "transform.scale", duration: 0.45)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.foregroundGradientLayer.colors = targetColors
|
self.foregroundGradientLayer.colors = targetColors
|
||||||
self.foregroundGradientLayer.animate(from: initialColors as AnyObject, to: targetColors as AnyObject, keyPath: "colors", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3)
|
if animated {
|
||||||
|
self.foregroundGradientLayer.animate(from: initialColors as AnyObject, to: targetColors as AnyObject, keyPath: "colors", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func playMuteAnimation() {
|
private func playMuteAnimation() {
|
||||||
@ -1081,6 +1087,16 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
|
|||||||
self.playMuteAnimation()
|
self.playMuteAnimation()
|
||||||
}
|
}
|
||||||
self.transition = nil
|
self.transition = nil
|
||||||
|
} else {
|
||||||
|
if self.maskBlobView.isHidden {
|
||||||
|
self.updateGlowAndGradientAnimations(type: .muted, previousType: nil, animated: false)
|
||||||
|
self.maskCircleLayer.isHidden = false
|
||||||
|
self.maskProgressLayer.isHidden = true
|
||||||
|
self.maskGradientLayer.isHidden = false
|
||||||
|
self.maskBlobView.isHidden = false
|
||||||
|
self.maskBlobView.startAnimating()
|
||||||
|
self.maskBlobView.layer.animateSpring(from: 0.1 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.45)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
case .button:
|
case .button:
|
||||||
self.updatedActive?(true)
|
self.updatedActive?(true)
|
||||||
@ -1537,7 +1553,6 @@ final class VoiceChatActionButtonIconNode: ManagedAnimationNode {
|
|||||||
self.isColored = isColored
|
self.isColored = isColored
|
||||||
super.init(size: CGSize(width: 100.0, height: 100.0))
|
super.init(size: CGSize(width: 100.0, height: 100.0))
|
||||||
|
|
||||||
self.scale = 0.8
|
|
||||||
self.trackTo(item: ManagedAnimationItem(source: .local("VoiceUnmute"), frames: .range(startFrame: 0, endFrame: 0), duration: 0.1))
|
self.trackTo(item: ManagedAnimationItem(source: .local("VoiceUnmute"), frames: .range(startFrame: 0, endFrame: 0), duration: 0.1))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1632,15 +1647,25 @@ final class VoiceChatActionButtonIconNode: ManagedAnimationNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var useTiredAnimation = false
|
var useTiredAnimation = false
|
||||||
|
var useAngryAnimation = false
|
||||||
let val = Float.random(in: 0.0..<1.0)
|
let val = Float.random(in: 0.0..<1.0)
|
||||||
if val <= 0.01 {
|
if val <= 0.01 {
|
||||||
useTiredAnimation = true
|
useTiredAnimation = true
|
||||||
|
} else if val <= 0.05 {
|
||||||
|
useAngryAnimation = true
|
||||||
}
|
}
|
||||||
|
|
||||||
let normalAnimations = ["VoiceHand_1", "VoiceHand_2", "VoiceHand_3", "VoiceHand_4", "VoiceHand_7"]
|
let normalAnimations = ["VoiceHand_1", "VoiceHand_2", "VoiceHand_3", "VoiceHand_4", "VoiceHand_7", "VoiceHand_8"]
|
||||||
let tiredAnimations = ["VoiceHand_5", "VoiceHand_6"]
|
let tiredAnimations = ["VoiceHand_5", "VoiceHand_6"]
|
||||||
let animations = useTiredAnimation ? tiredAnimations : normalAnimations
|
let angryAnimations = ["VoiceHand_9", "VoiceHand_10"]
|
||||||
|
let animations: [String]
|
||||||
|
if useTiredAnimation {
|
||||||
|
animations = tiredAnimations
|
||||||
|
} else if useAngryAnimation {
|
||||||
|
animations = angryAnimations
|
||||||
|
} else {
|
||||||
|
animations = normalAnimations
|
||||||
|
}
|
||||||
if let animationName = animations.randomElement() {
|
if let animationName = animations.randomElement() {
|
||||||
self.trackTo(item: ManagedAnimationItem(source: .local(animationName)))
|
self.trackTo(item: ManagedAnimationItem(source: .local(animationName)))
|
||||||
}
|
}
|
||||||
|
@ -2006,17 +2006,17 @@ public final class VoiceChatController: ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
items.append(.action(ContextMenuActionItem(text: strongSelf.isNoiseSuppressionEnabled ? "Disable Noise Suppression" : "Enable Noise Suppression", textColor: .primary, icon: { theme in
|
// items.append(.action(ContextMenuActionItem(text: strongSelf.isNoiseSuppressionEnabled ? "Disable Noise Suppression" : "Enable Noise Suppression", textColor: .primary, icon: { theme in
|
||||||
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Unmute"), color: theme.actionSheet.primaryTextColor)
|
// return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Unmute"), color: theme.actionSheet.primaryTextColor)
|
||||||
}, action: { _, f in
|
// }, action: { _, f in
|
||||||
f(.dismissWithoutContent)
|
// f(.dismissWithoutContent)
|
||||||
|
//
|
||||||
guard let strongSelf = self else {
|
// guard let strongSelf = self else {
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
strongSelf.call.setIsNoiseSuppressionEnabled(!strongSelf.isNoiseSuppressionEnabled)
|
// strongSelf.call.setIsNoiseSuppressionEnabled(!strongSelf.isNoiseSuppressionEnabled)
|
||||||
})))
|
// })))
|
||||||
|
|
||||||
if let callState = strongSelf.callState, callState.canManageCall {
|
if let callState = strongSelf.callState, callState.canManageCall {
|
||||||
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_EndVoiceChat, textColor: .destructive, icon: { theme in
|
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_EndVoiceChat, textColor: .destructive, icon: { theme in
|
||||||
@ -3220,7 +3220,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
smallButtons = false
|
smallButtons = false
|
||||||
firstButtonFrame = CGRect(origin: CGPoint(x: floor(leftButtonFrame.midX - cameraButtonSize.width / 2.0), y: leftButtonFrame.minY - upperButtonDistance - cameraButtonSize.height), size: cameraButtonSize)
|
firstButtonFrame = CGRect(origin: CGPoint(x: floor(leftButtonFrame.midX - cameraButtonSize.width / 2.0), y: leftButtonFrame.minY - upperButtonDistance - cameraButtonSize.height), size: cameraButtonSize)
|
||||||
secondButtonFrame = leftButtonFrame
|
secondButtonFrame = leftButtonFrame
|
||||||
thirdButtonFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - centralButtonSize.width) / 2.0), y: floorToScreenPixels((self.effectiveBottomAreaHeight - centralButtonSize.height) / 2.0)), size: centralButtonSize)
|
thirdButtonFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - centralButtonSize.width) / 2.0), y: floor((self.effectiveBottomAreaHeight - centralButtonSize.height) / 2.0) - 3.0), size: centralButtonSize)
|
||||||
forthButtonFrame = rightButtonFrame
|
forthButtonFrame = rightButtonFrame
|
||||||
case let .fullscreen(controlsHidden):
|
case let .fullscreen(controlsHidden):
|
||||||
smallButtons = true
|
smallButtons = true
|
||||||
@ -3910,8 +3910,9 @@ public final class VoiceChatController: ViewController {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
let controller = voiceChatTitleEditController(sharedContext: strongSelf.context.sharedContext, account: strongSelf.context.account, forceTheme: strongSelf.darkTheme, title: strongSelf.presentationData.strings.VoiceChat_EditTitleTitle, text: strongSelf.presentationData.strings.VoiceChat_EditTitleText, placeholder: chatPeer.displayTitle(strings: strongSelf.presentationData.strings, displayOrder: strongSelf.presentationData.nameDisplayOrder), value: strongSelf.callState?.title, maxLength: 40, apply: { title in
|
let initialTitle = strongSelf.callState?.title ?? ""
|
||||||
if let strongSelf = self, let title = title {
|
let controller = voiceChatTitleEditController(sharedContext: strongSelf.context.sharedContext, account: strongSelf.context.account, forceTheme: strongSelf.darkTheme, title: strongSelf.presentationData.strings.VoiceChat_EditTitleTitle, text: strongSelf.presentationData.strings.VoiceChat_EditTitleText, placeholder: chatPeer.displayTitle(strings: strongSelf.presentationData.strings, displayOrder: strongSelf.presentationData.nameDisplayOrder), value: initialTitle, maxLength: 40, apply: { title in
|
||||||
|
if let strongSelf = self, let title = title, title != initialTitle {
|
||||||
strongSelf.call.updateTitle(title)
|
strongSelf.call.updateTitle(title)
|
||||||
|
|
||||||
strongSelf.presentUndoOverlay(content: .voiceChatFlag(text: title.isEmpty ? strongSelf.presentationData.strings.VoiceChat_EditTitleRemoveSuccess : strongSelf.presentationData.strings.VoiceChat_EditTitleSuccess(title).0), action: { _ in return false })
|
strongSelf.presentUndoOverlay(content: .voiceChatFlag(text: title.isEmpty ? strongSelf.presentationData.strings.VoiceChat_EditTitleRemoveSuccess : strongSelf.presentationData.strings.VoiceChat_EditTitleSuccess(title).0), action: { _ in return false })
|
||||||
|
@ -158,12 +158,12 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
|
|||||||
|
|
||||||
context.setFillColor(parameters.color.cgColor)
|
context.setFillColor(parameters.color.cgColor)
|
||||||
|
|
||||||
var clearLineWidth: CGFloat = 4.0
|
var clearLineWidth: CGFloat = 2.0
|
||||||
var lineWidth: CGFloat = 1.0 + UIScreenPixel
|
var lineWidth: CGFloat = 1.0 + UIScreenPixel
|
||||||
if bounds.size.width > 36.0 {
|
if bounds.size.width > 36.0 {
|
||||||
context.scaleBy(x: 2.0, y: 2.0)
|
context.scaleBy(x: 2.0, y: 2.0)
|
||||||
} else if bounds.size.width < 30.0 {
|
} else if bounds.size.width < 30.0 {
|
||||||
clearLineWidth = 3.0
|
clearLineWidth = 2.0
|
||||||
lineWidth = 1.0
|
lineWidth = 1.0
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -207,18 +207,19 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if parameters.reverse {
|
if parameters.reverse {
|
||||||
startPoint = CGPoint(x: origin.x + length * (1.0 - parameters.transition), y: origin.y + length * (1.0 - parameters.transition))
|
startPoint = CGPoint(x: origin.x + length * (1.0 - parameters.transition), y: origin.y + length * (1.0 - parameters.transition)).offsetBy(dx: UIScreenPixel, dy: -UIScreenPixel)
|
||||||
endPoint = CGPoint(x: origin.x + length, y: origin.y + length)
|
endPoint = CGPoint(x: origin.x + length, y: origin.y + length).offsetBy(dx: UIScreenPixel, dy: -UIScreenPixel)
|
||||||
} else {
|
} else {
|
||||||
startPoint = origin
|
startPoint = origin.offsetBy(dx: UIScreenPixel, dy: -UIScreenPixel)
|
||||||
endPoint = CGPoint(x: origin.x + length * parameters.transition, y: origin.y + length * parameters.transition)
|
endPoint = CGPoint(x: origin.x + length * parameters.transition, y: origin.y + length * parameters.transition).offsetBy(dx: UIScreenPixel, dy: -UIScreenPixel)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
context.setBlendMode(.clear)
|
context.setBlendMode(.clear)
|
||||||
context.setLineWidth(clearLineWidth)
|
context.setLineWidth(clearLineWidth)
|
||||||
|
|
||||||
context.move(to: startPoint)
|
context.move(to: startPoint.offsetBy(dx: 0.0, dy: 1.0 + UIScreenPixel))
|
||||||
context.addLine(to: endPoint)
|
context.addLine(to: endPoint.offsetBy(dx: 0.0, dy: 1.0 + UIScreenPixel))
|
||||||
context.strokePath()
|
context.strokePath()
|
||||||
|
|
||||||
context.setBlendMode(.normal)
|
context.setBlendMode(.normal)
|
||||||
|
@ -169,7 +169,7 @@ public final class VoiceChatOverlayController: ViewController {
|
|||||||
|
|
||||||
if reclaim {
|
if reclaim {
|
||||||
self.dismissed = true
|
self.dismissed = true
|
||||||
let targetPosition = CGPoint(x: layout.size.width / 2.0, y: layout.size.height - layout.intrinsicInsets.bottom - 205.0 / 2.0)
|
let targetPosition = CGPoint(x: layout.size.width / 2.0, y: layout.size.height - layout.intrinsicInsets.bottom - 205.0 / 2.0 - 2.0)
|
||||||
if self.isSlidOffscreen {
|
if self.isSlidOffscreen {
|
||||||
self.isSlidOffscreen = false
|
self.isSlidOffscreen = false
|
||||||
self.isButtonHidden = true
|
self.isButtonHidden = true
|
||||||
|
@ -133,10 +133,8 @@ final class VoiceChatTimerNode: ASDisplayNode {
|
|||||||
let timerText: String
|
let timerText: String
|
||||||
if elapsedTime >= 86400 {
|
if elapsedTime >= 86400 {
|
||||||
timerText = timeIntervalString(strings: self.strings, value: elapsedTime)
|
timerText = timeIntervalString(strings: self.strings, value: elapsedTime)
|
||||||
} else if elapsedTime < 0 {
|
|
||||||
timerText = "\(textForTimeout(value: abs(elapsedTime)))"
|
|
||||||
} else {
|
} else {
|
||||||
timerText = textForTimeout(value: elapsedTime)
|
timerText = textForTimeout(value: abs(elapsedTime))
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.updateTimer == nil {
|
if self.updateTimer == nil {
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -449,8 +449,14 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
|
|||||||
case let .groupPhoneCall(_, _, scheduleDate, duration):
|
case let .groupPhoneCall(_, _, scheduleDate, duration):
|
||||||
if let scheduleDate = scheduleDate {
|
if let scheduleDate = scheduleDate {
|
||||||
let timeString = humanReadableStringForTimestamp(strings: strings, dateTimeFormat: dateTimeFormat, timestamp: scheduleDate)
|
let timeString = humanReadableStringForTimestamp(strings: strings, dateTimeFormat: dateTimeFormat, timestamp: scheduleDate)
|
||||||
let titleString = strings.Notification_VoiceChatScheduled(timeString).0
|
if message.author?.id.namespace == Namespaces.Peer.CloudChannel {
|
||||||
attributedString = NSAttributedString(string: titleString, font: titleFont, textColor: primaryTextColor)
|
let titleString = strings.Notification_VoiceChatScheduledChannel(timeString).0
|
||||||
|
attributedString = NSAttributedString(string: titleString, font: titleFont, textColor: primaryTextColor)
|
||||||
|
} else {
|
||||||
|
let attributePeerIds: [(Int, PeerId?)] = [(0, message.author?.id)]
|
||||||
|
let titleString = strings.Notification_VoiceChatScheduled(authorName, timeString)
|
||||||
|
attributedString = addAttributesToStringWithRanges(titleString, body: bodyAttributes, argumentAttributes: peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: attributePeerIds))
|
||||||
|
}
|
||||||
} else if let duration = duration {
|
} else if let duration = duration {
|
||||||
let titleString = strings.Notification_VoiceChatEnded(callDurationString(strings: strings, value: duration)).0
|
let titleString = strings.Notification_VoiceChatEnded(callDurationString(strings: strings, value: duration)).0
|
||||||
attributedString = NSAttributedString(string: titleString, font: titleFont, textColor: primaryTextColor)
|
attributedString = NSAttributedString(string: titleString, font: titleFont, textColor: primaryTextColor)
|
||||||
|
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user