Merge commit '811f6981ca2f744e8521903b71cbf62a81983910'

This commit is contained in:
Ali 2020-12-01 18:43:56 +04:00
commit 470ea8f692
6 changed files with 51 additions and 46 deletions

View File

@ -265,6 +265,7 @@ public protocol PresentationGroupCall: class {
var members: Signal<PresentationGroupCallMembers?, NoError> { get }
var audioLevels: Signal<[(PeerId, Float)], NoError> { get }
var myAudioLevel: Signal<Float, NoError> { get }
var speakingAudioLevels: Signal<[(PeerId, Float)], NoError> { get }
var isMuted: Signal<Bool, NoError> { get }
func leave(terminateIfPossible: Bool) -> Signal<Bool, NoError>

View File

@ -403,6 +403,7 @@ open class TelegramBaseController: ViewController, KeyShortcutResponder {
if previousTheme !== presentationData.theme || previousStrings !== presentationData.strings {
strongSelf.mediaAccessoryPanel?.0.containerNode.updatePresentationData(presentationData)
strongSelf.locationBroadcastAccessoryPanel?.updatePresentationData(presentationData)
strongSelf.groupCallAccessoryPanel?.updatePresentationData(presentationData)
}
}
})

View File

@ -217,7 +217,7 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
}
}
func updatePresentationData(_ presentationData: PresentationData) {
public func updatePresentationData(_ presentationData: PresentationData) {
self.theme = presentationData.theme
self.strings = presentationData.strings
@ -359,24 +359,13 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
strongSelf.micButton.view.insertSubview(audioLevelView, at: 0)
}
var value = value
if value <= 0.15 {
value = 0.0
}
let level = min(1.0, max(0.0, CGFloat(value)))
let avatarScale: CGFloat
strongSelf.audioLevelView?.updateLevel(CGFloat(value) * 2.0)
if value > 0.0 {
strongSelf.audioLevelView?.startAnimating()
avatarScale = 1.03 + level * 0.1
} else {
strongSelf.audioLevelView?.stopAnimating(duration: 0.5)
avatarScale = 1.0
}
//let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .spring)
//transition.updateSublayerTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true)
}))
}
} else if data.groupCall == nil {

View File

@ -72,9 +72,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
private class SpeakingParticipantsContext {
private let speakingLevelThreshold: Float = 0.15
private let cutoffTimeout: Int32 = 1
private let silentTimeout: Int32 = 3
private let speakingLevelThreshold: Float = 0.1
private let cutoffTimeout: Int32 = 3
private let silentTimeout: Int32 = 2
struct Participant {
let timestamp: Int32
@ -89,6 +89,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
}
private let audioLevelsPromise = Promise<[(PeerId, Float)]>()
init() {
}
@ -124,13 +126,23 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
}
var audioLevels: [(PeerId, Float)] = []
for (peerId, speaker) in validSpeakers {
audioLevels.append((peerId, speaker.level))
}
self.participants = validSpeakers
self.speakingParticipants = speakingParticipants
self.audioLevelsPromise.set(.single(audioLevels))
}
func get() -> Signal<Set<PeerId>, NoError> {
return self.speakingParticipantsPromise.get() |> distinctUntilChanged
}
func getAudioLevels() -> Signal<[(PeerId, Float)], NoError> {
return self.audioLevelsPromise.get()
}
}
public let account: Account
@ -189,9 +201,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
private var audioLevelsDisposable = MetaDisposable()
private let speakingParticipantsContext = SpeakingParticipantsContext()
private var speakingParticipantsReportTimestamp: [PeerId: Double] = [:]
public var speakingAudioLevels: Signal<[(PeerId, Float)], NoError> {
return self.speakingParticipantsContext.getAudioLevels()
}
private var participantsContextStateDisposable = MetaDisposable()
private var participantsContext: GroupCallParticipantsContext?

View File

@ -137,21 +137,23 @@ public final class VoiceChatController: ViewController {
}
return signal
|> mapToSignal { value in
if value > 0.0 {
return .single(value)
|> then(.single(0.0) |> delay(1.0, queue: Queue.mainQueue()))
} else {
return .single(value)
}
} |> mapToThrottled { next -> Signal<Float, NoError> in
return .single(next) |> then(.complete() |> delay(0.1, queue: Queue.mainQueue()))
return .single(value)
}
}
func updateAudioLevels(_ levels: [(PeerId, Float)]) {
func updateAudioLevels(_ levels: [(PeerId, Float)], ignore: Set<PeerId> = Set()) {
var updated = Set<PeerId>()
for (peerId, level) in levels {
if let pipe = self.audioLevels[peerId] {
pipe.putNext(level)
pipe.putNext(max(0.001, level))
updated.insert(peerId)
}
}
if !ignore.isEmpty {
for (peerId, pipe) in self.audioLevels {
if !updated.contains(peerId) && !ignore.contains(peerId) {
pipe.putNext(0.0)
}
}
}
}
@ -595,12 +597,12 @@ public final class VoiceChatController: ViewController {
}
})
self.audioLevelsDisposable = (call.audioLevels
self.audioLevelsDisposable = (call.speakingAudioLevels
|> deliverOnMainQueue).start(next: { [weak self] levels in
guard let strongSelf = self else {
return
}
strongSelf.itemInteraction?.updateAudioLevels(levels)
strongSelf.itemInteraction?.updateAudioLevels(levels, ignore: Set([strongSelf.context.account.peerId]))
})
self.myAudioLevelDisposable = (call.myAudioLevel

View File

@ -555,7 +555,7 @@ public class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
return
}
if strongSelf.audioLevelView == nil {
if strongSelf.audioLevelView == nil, value > 0.0 {
let audioLevelView = VoiceBlobView(
frame: blobFrame,
maxLevel: 0.3,
@ -574,29 +574,27 @@ public class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
playbackMaskLayer.path = maskPath.cgPath
audioLevelView.layer.mask = playbackMaskLayer
audioLevelView.setColor(.green)
audioLevelView.setColor(UIColor(rgb: 0x34c759))
strongSelf.audioLevelView = audioLevelView
strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0)
}
var value = value
if value <= 0.15 {
value = 0.0
}
let level = min(1.0, max(0.0, CGFloat(value)))
let avatarScale: CGFloat
strongSelf.audioLevelView?.updateLevel(CGFloat(value) * 2.0)
if value > 0.0 {
strongSelf.audioLevelView?.startAnimating()
avatarScale = 1.03 + level * 0.1
} else {
strongSelf.audioLevelView?.stopAnimating(duration: 0.5)
avatarScale = 1.0
if let audioLevelView = strongSelf.audioLevelView {
audioLevelView.updateLevel(CGFloat(value) * 2.0)
let avatarScale: CGFloat
if value > 0.0 {
audioLevelView.startAnimating()
avatarScale = 1.03 + level * 0.1
} else {
audioLevelView.stopAnimating(duration: 0.5)
avatarScale = 1.0
}
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .spring)
transition.updateTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true)
}
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .spring)
transition.updateTransformScale(node: strongSelf.avatarNode, scale: avatarScale, beginWithCurrentState: true)
}))
}
} else if let audioLevelView = strongSelf.audioLevelView {