Voice chat fixes

This commit is contained in:
Ali 2020-12-08 02:00:03 +04:00
parent 9eddf57416
commit e14cb87676
7 changed files with 164 additions and 5824 deletions

View File

@ -185,8 +185,8 @@
"PUSH_CHAT_MESSAGES_1" = "%2$@|%1$@ sent a message";
"PUSH_CHAT_MESSAGES_any" = "%2$@|%1$@ sent %3$d messages";
"PUSH_CHAT_ALBUM" = "%2$@|%1$@ sent an album";
"PUSH_CHAT_MESSAGE_DOCS_1" = "%2$@|%1$@ sent a file";
"PUSH_CHAT_MESSAGE_DOCS_any" = "%2$@|%1$@ sent %3$d files";
"PUSH_CHAT_MESSAGE_DOCS_FIX1_1" = "%2$@|%1$@ sent a file";
"PUSH_CHAT_MESSAGE_DOCS_FIX1_any" = "%2$@|%1$@ sent %3$d files";
"PUSH_PINNED_TEXT" = "%1$@|pinned \"%2$@\" ";
"PUSH_PINNED_NOTEXT" = "%1$@|pinned a message";

File diff suppressed because it is too large Load Diff

View File

@ -311,6 +311,11 @@ public final class ChannelMembersSearchContainerNode: SearchDisplayControllerCon
private let presentationDataPromise: Promise<PresentationData>
private var _hasDim: Bool = false
override public var hasDim: Bool {
return _hasDim
}
public init(context: AccountContext, forceTheme: PresentationTheme?, peerId: PeerId, mode: ChannelMembersSearchMode, filters: [ChannelMembersSearchFilter], searchContext: GroupMembersSearchContext?, openPeer: @escaping (Peer, RenderedChannelParticipant?) -> Void, updateActivity: @escaping (Bool) -> Void, pushController: @escaping (ViewController) -> Void) {
self.context = context
self.openPeer = openPeer
@ -333,7 +338,17 @@ public final class ChannelMembersSearchContainerNode: SearchDisplayControllerCon
self.listNode.backgroundColor = self.presentationData.theme.chatList.backgroundColor
self.listNode.isHidden = true
self.addSubnode(self.emptyQueryListNode)
if !filters.contains(where: { filter in
if case .excludeBots = filter {
return true
} else {
return false
}
}) {
self.addSubnode(self.emptyQueryListNode)
} else {
self._hasDim = true
}
self.addSubnode(self.listNode)
let statePromise = ValuePromise(ChannelMembersSearchContainerState(), ignoreRepeated: true)
@ -1360,4 +1375,14 @@ public final class ChannelMembersSearchContainerNode: SearchDisplayControllerCon
self.cancel?()
}
}
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
guard let result = self.view.hitTest(point, with: event) else {
return nil
}
if result === self.view {
return nil
}
return result
}
}

View File

@ -12,9 +12,22 @@ public enum SearchDisplayControllerMode {
}
public final class SearchDisplayController {
private final class BackgroundNode: ASDisplayNode {
var isTransparent: Bool = false
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let result = self.view.hitTest(point, with: event)
if self.isTransparent, result === self.view {
return nil
} else {
return result
}
}
}
private let searchBar: SearchBarNode
private let mode: SearchDisplayControllerMode
private let backgroundNode: ASDisplayNode
private let backgroundNode: BackgroundNode
public let contentNode: SearchDisplayControllerContentNode
private var hasSeparator: Bool
@ -26,7 +39,7 @@ public final class SearchDisplayController {
public init(presentationData: PresentationData, mode: SearchDisplayControllerMode = .navigation, placeholder: String? = nil, hasSeparator: Bool = false, contentNode: SearchDisplayControllerContentNode, cancel: @escaping () -> Void) {
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: presentationData.theme, hasSeparator: hasSeparator), strings: presentationData.strings, fieldStyle: .modern, forceSeparator: hasSeparator)
self.backgroundNode = ASDisplayNode()
self.backgroundNode = BackgroundNode()
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
self.backgroundNode.allowsGroupOpacity = true
@ -93,8 +106,10 @@ public final class SearchDisplayController {
if self.contentNode.hasDim {
self.backgroundNode.backgroundColor = .clear
self.backgroundNode.isTransparent = true
} else {
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
self.backgroundNode.isTransparent = false
}
}
@ -148,8 +163,10 @@ public final class SearchDisplayController {
if self.contentNode.hasDim {
self.backgroundNode.backgroundColor = .clear
self.backgroundNode.isTransparent = true
} else {
self.backgroundNode.alpha = 0.0
self.backgroundNode.isTransparent = false
}
var size = layout.size

View File

@ -174,7 +174,19 @@ public final class ManagedAudioSession {
private var currentTypeAndOutputMode: (ManagedAudioSessionType, AudioSessionOutputMode)?
private var deactivateTimer: SwiftSignalKit.Timer?
private var isHeadsetPluggedInValue = false
private let isHeadsetPluggedInSync = Atomic<Bool>(value: false)
private var isHeadsetPluggedInValue = false {
didSet {
if self.isHeadsetPluggedInValue != oldValue {
let _ = self.isHeadsetPluggedInSync.swap(self.isHeadsetPluggedInValue)
}
}
}
public func getIsHeadsetPluggedIn() -> Bool {
return self.isHeadsetPluggedInSync.with { $0 }
}
private let outputsToHeadphonesSubscribers = Bag<(Bool) -> Void>()
private var availableOutputsValue: [AudioSessionOutput] = []
@ -770,14 +782,17 @@ public final class ManagedAudioSession {
if let routes = AVAudioSession.sharedInstance().availableInputs {
var alreadySet = false
if self.isHeadsetPluggedInValue {
loop: for route in routes {
switch route.portType {
case .headphones, .bluetoothA2DP, .bluetoothHFP:
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
alreadySet = true
break loop
default:
break
if case .voiceCall = updatedType, case .custom(.builtin) = outputMode {
} else {
loop: for route in routes {
switch route.portType {
case .headphones, .bluetoothA2DP, .bluetoothHFP:
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
alreadySet = true
break loop
default:
break
}
}
}
}

View File

@ -324,7 +324,13 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
private var summaryStateDisposable: Disposable?
private var isMutedValue: PresentationGroupCallMuteAction = .muted(isPushToTalkActive: false)
private var isMutedValue: PresentationGroupCallMuteAction = .muted(isPushToTalkActive: false) {
didSet {
if self.isMutedValue != oldValue {
self.updateProximityMonitoring()
}
}
}
private let isMutedPromise = ValuePromise<PresentationGroupCallMuteAction>(.muted(isPushToTalkActive: false))
public var isMuted: Signal<Bool, NoError> {
return self.isMutedPromise.get()
@ -339,8 +345,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
private let audioOutputStatePromise = Promise<([AudioSessionOutput], AudioSessionOutput?)>(([], nil))
private var audioOutputStateDisposable: Disposable?
private var actualAudioOutputState: ([AudioSessionOutput], AudioSessionOutput?)?
private var audioOutputStateValue: ([AudioSessionOutput], AudioSessionOutput?) = ([], nil)
private var currentAudioOutputValue: AudioSessionOutput = .builtin
private var currentSelectedAudioOutputValue: AudioSessionOutput = .builtin
public var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> {
return self.audioOutputStatePromise.get()
}
@ -453,6 +461,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
var didReceiveAudioOutputs = false
if !audioSession.getIsHeadsetPluggedIn() {
self.currentSelectedAudioOutputValue = .speaker
}
self.audioSessionDisposable = audioSession.push(audioSessionType: .voiceCall, manualActivate: { [weak self] control in
Queue.mainQueue().async {
if let strongSelf = self {
@ -529,6 +541,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
})
self.audioOutputStateDisposable = (self.audioOutputStatePromise.get()
|> deliverOnMainQueue).start(next: { [weak self] availableOutputs, currentOutput in
guard let strongSelf = self else {
return
}
strongSelf.updateAudioOutputs(availableOutputs: availableOutputs, currentOutput: currentOutput)
})
self.groupCallParticipantUpdatesDisposable = (self.account.stateManager.groupCallParticipantUpdates
|> deliverOnMainQueue).start(next: { [weak self] updates in
guard let strongSelf = self else {
@ -655,6 +675,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if let proximityManagerIndex = self.proximityManagerIndex {
DeviceProximityManager.shared().remove(proximityManagerIndex)
}
self.audioOutputStateDisposable?.dispose()
}
private func updateSessionState(internalState: InternalState, audioSessionControl: ManagedAudioSessionControl?) {
@ -665,10 +687,13 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.internalState = internalState
if let audioSessionControl = audioSessionControl, previousControl == nil {
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
switch self.currentSelectedAudioOutputValue {
case .speaker:
audioSessionControl.setOutputMode(.custom(self.currentSelectedAudioOutputValue))
default:
break
}
audioSessionControl.setup(synchronous: true)
self.setCurrentAudioOutput(.speaker)
}
self.audioSessionShouldBeActive.set(true)
@ -1034,18 +1059,35 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
guard self.currentAudioOutputValue != output else {
guard self.currentSelectedAudioOutputValue != output else {
return
}
self.currentAudioOutputValue = output
self.currentSelectedAudioOutputValue = output
self.updateProximityMonitoring()
self.audioOutputStatePromise.set(.single((self.audioOutputStateValue.0, output))
|> then(
.single(self.audioOutputStateValue)
|> delay(1.0, queue: Queue.mainQueue())
))
if let audioSessionControl = self.audioSessionControl {
audioSessionControl.setOutputMode(.custom(output))
}
}
private func updateProximityMonitoring() {
var shouldMonitorProximity = false
switch output {
switch self.currentSelectedAudioOutputValue {
case .builtin:
shouldMonitorProximity = true
default:
break
}
if case .muted(isPushToTalkActive: true) = self.isMutedValue {
shouldMonitorProximity = false
}
if shouldMonitorProximity {
if self.proximityManagerIndex == nil {
@ -1058,15 +1100,29 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
DeviceProximityManager.shared().remove(proximityManagerIndex)
}
}
self.audioOutputStatePromise.set(.single((self.audioOutputStateValue.0, output))
|> then(
.single(self.audioOutputStateValue)
|> delay(1.0, queue: Queue.mainQueue())
))
if let audioSessionControl = self.audioSessionControl {
audioSessionControl.setOutputMode(.custom(output))
}
private func updateAudioOutputs(availableOutputs: [AudioSessionOutput], currentOutput: AudioSessionOutput?) {
if self.actualAudioOutputState?.0 != availableOutputs || self.actualAudioOutputState?.1 != currentOutput {
self.actualAudioOutputState = (availableOutputs, currentOutput)
self.setupAudioOutputs()
}
}
private func setupAudioOutputs() {
if let actualAudioOutputState = self.actualAudioOutputState, let currentOutput = actualAudioOutputState.1 {
self.currentSelectedAudioOutputValue = currentOutput
switch currentOutput {
case .headphones, .speaker:
break
case let .port(port) where port.type == .bluetooth:
break
default:
//self.setCurrentAudioOutput(.speaker)
break
}
}
}

View File

@ -147,6 +147,16 @@ func mergeGroupOrChannel(lhs: Peer?, rhs: Api.Chat) -> Peer? {
} else {
let _ = channelFlags.remove(.isVerified)
}
if (flags & Int32(1 << 23)) != 0 {
channelFlags.insert(.hasVoiceChat)
} else {
let _ = channelFlags.remove(.hasVoiceChat)
}
if (flags & Int32(1 << 24)) != 0 {
channelFlags.insert(.hasActiveVoiceChat)
} else {
let _ = channelFlags.remove(.hasActiveVoiceChat)
}
var info = lhs.info
switch info {
case .broadcast:
@ -178,6 +188,16 @@ func mergeChannel(lhs: TelegramChannel?, rhs: TelegramChannel) -> TelegramChanne
} else {
let _ = channelFlags.remove(.isVerified)
}
if rhs.flags.contains(.hasVoiceChat) {
channelFlags.insert(.hasVoiceChat)
} else {
let _ = channelFlags.remove(.hasVoiceChat)
}
if rhs.flags.contains(.hasActiveVoiceChat) {
channelFlags.insert(.hasActiveVoiceChat)
} else {
let _ = channelFlags.remove(.hasActiveVoiceChat)
}
var info = lhs.info
switch info {
case .broadcast: