mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Voice Chat UI fixes
This commit is contained in:
parent
dae9f1e967
commit
811f6981ca
@ -359,24 +359,13 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
strongSelf.micButton.view.insertSubview(audioLevelView, at: 0)
|
||||
}
|
||||
|
||||
var value = value
|
||||
if value <= 0.15 {
|
||||
value = 0.0
|
||||
}
|
||||
let level = min(1.0, max(0.0, CGFloat(value)))
|
||||
let avatarScale: CGFloat
|
||||
|
||||
strongSelf.audioLevelView?.updateLevel(CGFloat(value) * 2.0)
|
||||
if value > 0.0 {
|
||||
strongSelf.audioLevelView?.startAnimating()
|
||||
avatarScale = 1.03 + level * 0.1
|
||||
} else {
|
||||
strongSelf.audioLevelView?.stopAnimating(duration: 0.5)
|
||||
avatarScale = 1.0
|
||||
}
|
||||
|
||||
//let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .spring)
|
||||
//transition.updateSublayerTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true)
|
||||
}))
|
||||
}
|
||||
} else if data.groupCall == nil {
|
||||
|
@ -72,9 +72,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
|
||||
private class SpeakingParticipantsContext {
|
||||
private let speakingLevelThreshold: Float = 0.15
|
||||
private let cutoffTimeout: Int32 = 1
|
||||
private let silentTimeout: Int32 = 3
|
||||
private let speakingLevelThreshold: Float = 0.1
|
||||
private let cutoffTimeout: Int32 = 3
|
||||
private let silentTimeout: Int32 = 2
|
||||
|
||||
struct Participant {
|
||||
let timestamp: Int32
|
||||
|
@ -137,19 +137,23 @@ public final class VoiceChatController: ViewController {
|
||||
}
|
||||
return signal
|
||||
|> mapToSignal { value in
|
||||
if value > 0.0 {
|
||||
return .single(value)
|
||||
|> then(.single(0.0) |> delay(0.1, queue: Queue.mainQueue()))
|
||||
} else {
|
||||
return .single(value)
|
||||
}
|
||||
return .single(value)
|
||||
}
|
||||
}
|
||||
|
||||
func updateAudioLevels(_ levels: [(PeerId, Float)]) {
|
||||
func updateAudioLevels(_ levels: [(PeerId, Float)], ignore: Set<PeerId> = Set()) {
|
||||
var updated = Set<PeerId>()
|
||||
for (peerId, level) in levels {
|
||||
if let pipe = self.audioLevels[peerId] {
|
||||
pipe.putNext(level)
|
||||
pipe.putNext(max(0.001, level))
|
||||
updated.insert(peerId)
|
||||
}
|
||||
}
|
||||
if !ignore.isEmpty {
|
||||
for (peerId, pipe) in self.audioLevels {
|
||||
if !updated.contains(peerId) && !ignore.contains(peerId) {
|
||||
pipe.putNext(0.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -593,7 +597,7 @@ public final class VoiceChatController: ViewController {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.itemInteraction?.updateAudioLevels(levels)
|
||||
strongSelf.itemInteraction?.updateAudioLevels(levels, ignore: Set([strongSelf.context.account.peerId]))
|
||||
})
|
||||
|
||||
self.myAudioLevelDisposable = (call.myAudioLevel
|
||||
|
@ -555,7 +555,7 @@ public class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
return
|
||||
}
|
||||
|
||||
if strongSelf.audioLevelView == nil {
|
||||
if strongSelf.audioLevelView == nil, value > 0.0 {
|
||||
let audioLevelView = VoiceBlobView(
|
||||
frame: blobFrame,
|
||||
maxLevel: 0.3,
|
||||
@ -574,29 +574,27 @@ public class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
playbackMaskLayer.path = maskPath.cgPath
|
||||
audioLevelView.layer.mask = playbackMaskLayer
|
||||
|
||||
audioLevelView.setColor(.green)
|
||||
audioLevelView.setColor(UIColor(rgb: 0x34c759))
|
||||
strongSelf.audioLevelView = audioLevelView
|
||||
strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0)
|
||||
}
|
||||
|
||||
var value = value
|
||||
if value <= 0.15 {
|
||||
value = 0.0
|
||||
}
|
||||
let level = min(1.0, max(0.0, CGFloat(value)))
|
||||
let avatarScale: CGFloat
|
||||
|
||||
strongSelf.audioLevelView?.updateLevel(CGFloat(value) * 2.0)
|
||||
if value > 0.0 {
|
||||
strongSelf.audioLevelView?.startAnimating()
|
||||
avatarScale = 1.03 + level * 0.1
|
||||
} else {
|
||||
strongSelf.audioLevelView?.stopAnimating(duration: 0.5)
|
||||
avatarScale = 1.0
|
||||
if let audioLevelView = strongSelf.audioLevelView {
|
||||
audioLevelView.updateLevel(CGFloat(value) * 2.0)
|
||||
|
||||
let avatarScale: CGFloat
|
||||
if value > 0.0 {
|
||||
audioLevelView.startAnimating()
|
||||
avatarScale = 1.03 + level * 0.1
|
||||
} else {
|
||||
audioLevelView.stopAnimating(duration: 0.5)
|
||||
avatarScale = 1.0
|
||||
}
|
||||
|
||||
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .spring)
|
||||
transition.updateTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true)
|
||||
}
|
||||
|
||||
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .spring)
|
||||
transition.updateTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true)
|
||||
}))
|
||||
}
|
||||
} else if let audioLevelView = strongSelf.audioLevelView {
|
||||
|
Loading…
x
Reference in New Issue
Block a user