mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Voice chat fixes
This commit is contained in:
parent
9eddf57416
commit
e14cb87676
@ -185,8 +185,8 @@
|
|||||||
"PUSH_CHAT_MESSAGES_1" = "%2$@|%1$@ sent a message";
|
"PUSH_CHAT_MESSAGES_1" = "%2$@|%1$@ sent a message";
|
||||||
"PUSH_CHAT_MESSAGES_any" = "%2$@|%1$@ sent %3$d messages";
|
"PUSH_CHAT_MESSAGES_any" = "%2$@|%1$@ sent %3$d messages";
|
||||||
"PUSH_CHAT_ALBUM" = "%2$@|%1$@ sent an album";
|
"PUSH_CHAT_ALBUM" = "%2$@|%1$@ sent an album";
|
||||||
"PUSH_CHAT_MESSAGE_DOCS_1" = "%2$@|%1$@ sent a file";
|
"PUSH_CHAT_MESSAGE_DOCS_FIX1_1" = "%2$@|%1$@ sent a file";
|
||||||
"PUSH_CHAT_MESSAGE_DOCS_any" = "%2$@|%1$@ sent %3$d files";
|
"PUSH_CHAT_MESSAGE_DOCS_FIX1_any" = "%2$@|%1$@ sent %3$d files";
|
||||||
|
|
||||||
"PUSH_PINNED_TEXT" = "%1$@|pinned \"%2$@\" ";
|
"PUSH_PINNED_TEXT" = "%1$@|pinned \"%2$@\" ";
|
||||||
"PUSH_PINNED_NOTEXT" = "%1$@|pinned a message";
|
"PUSH_PINNED_NOTEXT" = "%1$@|pinned a message";
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -311,6 +311,11 @@ public final class ChannelMembersSearchContainerNode: SearchDisplayControllerCon
|
|||||||
|
|
||||||
private let presentationDataPromise: Promise<PresentationData>
|
private let presentationDataPromise: Promise<PresentationData>
|
||||||
|
|
||||||
|
private var _hasDim: Bool = false
|
||||||
|
override public var hasDim: Bool {
|
||||||
|
return _hasDim
|
||||||
|
}
|
||||||
|
|
||||||
public init(context: AccountContext, forceTheme: PresentationTheme?, peerId: PeerId, mode: ChannelMembersSearchMode, filters: [ChannelMembersSearchFilter], searchContext: GroupMembersSearchContext?, openPeer: @escaping (Peer, RenderedChannelParticipant?) -> Void, updateActivity: @escaping (Bool) -> Void, pushController: @escaping (ViewController) -> Void) {
|
public init(context: AccountContext, forceTheme: PresentationTheme?, peerId: PeerId, mode: ChannelMembersSearchMode, filters: [ChannelMembersSearchFilter], searchContext: GroupMembersSearchContext?, openPeer: @escaping (Peer, RenderedChannelParticipant?) -> Void, updateActivity: @escaping (Bool) -> Void, pushController: @escaping (ViewController) -> Void) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.openPeer = openPeer
|
self.openPeer = openPeer
|
||||||
@ -333,7 +338,17 @@ public final class ChannelMembersSearchContainerNode: SearchDisplayControllerCon
|
|||||||
self.listNode.backgroundColor = self.presentationData.theme.chatList.backgroundColor
|
self.listNode.backgroundColor = self.presentationData.theme.chatList.backgroundColor
|
||||||
self.listNode.isHidden = true
|
self.listNode.isHidden = true
|
||||||
|
|
||||||
self.addSubnode(self.emptyQueryListNode)
|
if !filters.contains(where: { filter in
|
||||||
|
if case .excludeBots = filter {
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}) {
|
||||||
|
self.addSubnode(self.emptyQueryListNode)
|
||||||
|
} else {
|
||||||
|
self._hasDim = true
|
||||||
|
}
|
||||||
self.addSubnode(self.listNode)
|
self.addSubnode(self.listNode)
|
||||||
|
|
||||||
let statePromise = ValuePromise(ChannelMembersSearchContainerState(), ignoreRepeated: true)
|
let statePromise = ValuePromise(ChannelMembersSearchContainerState(), ignoreRepeated: true)
|
||||||
@ -1360,4 +1375,14 @@ public final class ChannelMembersSearchContainerNode: SearchDisplayControllerCon
|
|||||||
self.cancel?()
|
self.cancel?()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||||
|
guard let result = self.view.hitTest(point, with: event) else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if result === self.view {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,9 +12,22 @@ public enum SearchDisplayControllerMode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public final class SearchDisplayController {
|
public final class SearchDisplayController {
|
||||||
|
private final class BackgroundNode: ASDisplayNode {
|
||||||
|
var isTransparent: Bool = false
|
||||||
|
|
||||||
|
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||||
|
let result = self.view.hitTest(point, with: event)
|
||||||
|
if self.isTransparent, result === self.view {
|
||||||
|
return nil
|
||||||
|
} else {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private let searchBar: SearchBarNode
|
private let searchBar: SearchBarNode
|
||||||
private let mode: SearchDisplayControllerMode
|
private let mode: SearchDisplayControllerMode
|
||||||
private let backgroundNode: ASDisplayNode
|
private let backgroundNode: BackgroundNode
|
||||||
public let contentNode: SearchDisplayControllerContentNode
|
public let contentNode: SearchDisplayControllerContentNode
|
||||||
private var hasSeparator: Bool
|
private var hasSeparator: Bool
|
||||||
|
|
||||||
@ -26,7 +39,7 @@ public final class SearchDisplayController {
|
|||||||
|
|
||||||
public init(presentationData: PresentationData, mode: SearchDisplayControllerMode = .navigation, placeholder: String? = nil, hasSeparator: Bool = false, contentNode: SearchDisplayControllerContentNode, cancel: @escaping () -> Void) {
|
public init(presentationData: PresentationData, mode: SearchDisplayControllerMode = .navigation, placeholder: String? = nil, hasSeparator: Bool = false, contentNode: SearchDisplayControllerContentNode, cancel: @escaping () -> Void) {
|
||||||
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: presentationData.theme, hasSeparator: hasSeparator), strings: presentationData.strings, fieldStyle: .modern, forceSeparator: hasSeparator)
|
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: presentationData.theme, hasSeparator: hasSeparator), strings: presentationData.strings, fieldStyle: .modern, forceSeparator: hasSeparator)
|
||||||
self.backgroundNode = ASDisplayNode()
|
self.backgroundNode = BackgroundNode()
|
||||||
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
|
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
|
||||||
self.backgroundNode.allowsGroupOpacity = true
|
self.backgroundNode.allowsGroupOpacity = true
|
||||||
|
|
||||||
@ -93,8 +106,10 @@ public final class SearchDisplayController {
|
|||||||
|
|
||||||
if self.contentNode.hasDim {
|
if self.contentNode.hasDim {
|
||||||
self.backgroundNode.backgroundColor = .clear
|
self.backgroundNode.backgroundColor = .clear
|
||||||
|
self.backgroundNode.isTransparent = true
|
||||||
} else {
|
} else {
|
||||||
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
|
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
|
||||||
|
self.backgroundNode.isTransparent = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -148,8 +163,10 @@ public final class SearchDisplayController {
|
|||||||
|
|
||||||
if self.contentNode.hasDim {
|
if self.contentNode.hasDim {
|
||||||
self.backgroundNode.backgroundColor = .clear
|
self.backgroundNode.backgroundColor = .clear
|
||||||
|
self.backgroundNode.isTransparent = true
|
||||||
} else {
|
} else {
|
||||||
self.backgroundNode.alpha = 0.0
|
self.backgroundNode.alpha = 0.0
|
||||||
|
self.backgroundNode.isTransparent = false
|
||||||
}
|
}
|
||||||
|
|
||||||
var size = layout.size
|
var size = layout.size
|
||||||
|
@ -174,7 +174,19 @@ public final class ManagedAudioSession {
|
|||||||
private var currentTypeAndOutputMode: (ManagedAudioSessionType, AudioSessionOutputMode)?
|
private var currentTypeAndOutputMode: (ManagedAudioSessionType, AudioSessionOutputMode)?
|
||||||
private var deactivateTimer: SwiftSignalKit.Timer?
|
private var deactivateTimer: SwiftSignalKit.Timer?
|
||||||
|
|
||||||
private var isHeadsetPluggedInValue = false
|
private let isHeadsetPluggedInSync = Atomic<Bool>(value: false)
|
||||||
|
private var isHeadsetPluggedInValue = false {
|
||||||
|
didSet {
|
||||||
|
if self.isHeadsetPluggedInValue != oldValue {
|
||||||
|
let _ = self.isHeadsetPluggedInSync.swap(self.isHeadsetPluggedInValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func getIsHeadsetPluggedIn() -> Bool {
|
||||||
|
return self.isHeadsetPluggedInSync.with { $0 }
|
||||||
|
}
|
||||||
|
|
||||||
private let outputsToHeadphonesSubscribers = Bag<(Bool) -> Void>()
|
private let outputsToHeadphonesSubscribers = Bag<(Bool) -> Void>()
|
||||||
|
|
||||||
private var availableOutputsValue: [AudioSessionOutput] = []
|
private var availableOutputsValue: [AudioSessionOutput] = []
|
||||||
@ -770,14 +782,17 @@ public final class ManagedAudioSession {
|
|||||||
if let routes = AVAudioSession.sharedInstance().availableInputs {
|
if let routes = AVAudioSession.sharedInstance().availableInputs {
|
||||||
var alreadySet = false
|
var alreadySet = false
|
||||||
if self.isHeadsetPluggedInValue {
|
if self.isHeadsetPluggedInValue {
|
||||||
loop: for route in routes {
|
if case .voiceCall = updatedType, case .custom(.builtin) = outputMode {
|
||||||
switch route.portType {
|
} else {
|
||||||
case .headphones, .bluetoothA2DP, .bluetoothHFP:
|
loop: for route in routes {
|
||||||
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
|
switch route.portType {
|
||||||
alreadySet = true
|
case .headphones, .bluetoothA2DP, .bluetoothHFP:
|
||||||
break loop
|
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
|
||||||
default:
|
alreadySet = true
|
||||||
break
|
break loop
|
||||||
|
default:
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -324,7 +324,13 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
private var summaryStateDisposable: Disposable?
|
private var summaryStateDisposable: Disposable?
|
||||||
|
|
||||||
private var isMutedValue: PresentationGroupCallMuteAction = .muted(isPushToTalkActive: false)
|
private var isMutedValue: PresentationGroupCallMuteAction = .muted(isPushToTalkActive: false) {
|
||||||
|
didSet {
|
||||||
|
if self.isMutedValue != oldValue {
|
||||||
|
self.updateProximityMonitoring()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
private let isMutedPromise = ValuePromise<PresentationGroupCallMuteAction>(.muted(isPushToTalkActive: false))
|
private let isMutedPromise = ValuePromise<PresentationGroupCallMuteAction>(.muted(isPushToTalkActive: false))
|
||||||
public var isMuted: Signal<Bool, NoError> {
|
public var isMuted: Signal<Bool, NoError> {
|
||||||
return self.isMutedPromise.get()
|
return self.isMutedPromise.get()
|
||||||
@ -339,8 +345,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private let audioOutputStatePromise = Promise<([AudioSessionOutput], AudioSessionOutput?)>(([], nil))
|
private let audioOutputStatePromise = Promise<([AudioSessionOutput], AudioSessionOutput?)>(([], nil))
|
||||||
|
private var audioOutputStateDisposable: Disposable?
|
||||||
|
private var actualAudioOutputState: ([AudioSessionOutput], AudioSessionOutput?)?
|
||||||
private var audioOutputStateValue: ([AudioSessionOutput], AudioSessionOutput?) = ([], nil)
|
private var audioOutputStateValue: ([AudioSessionOutput], AudioSessionOutput?) = ([], nil)
|
||||||
private var currentAudioOutputValue: AudioSessionOutput = .builtin
|
private var currentSelectedAudioOutputValue: AudioSessionOutput = .builtin
|
||||||
public var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> {
|
public var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> {
|
||||||
return self.audioOutputStatePromise.get()
|
return self.audioOutputStatePromise.get()
|
||||||
}
|
}
|
||||||
@ -453,6 +461,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|
|
||||||
var didReceiveAudioOutputs = false
|
var didReceiveAudioOutputs = false
|
||||||
|
|
||||||
|
if !audioSession.getIsHeadsetPluggedIn() {
|
||||||
|
self.currentSelectedAudioOutputValue = .speaker
|
||||||
|
}
|
||||||
|
|
||||||
self.audioSessionDisposable = audioSession.push(audioSessionType: .voiceCall, manualActivate: { [weak self] control in
|
self.audioSessionDisposable = audioSession.push(audioSessionType: .voiceCall, manualActivate: { [weak self] control in
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
@ -529,6 +541,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
self.audioOutputStateDisposable = (self.audioOutputStatePromise.get()
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] availableOutputs, currentOutput in
|
||||||
|
guard let strongSelf = self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
strongSelf.updateAudioOutputs(availableOutputs: availableOutputs, currentOutput: currentOutput)
|
||||||
|
})
|
||||||
|
|
||||||
self.groupCallParticipantUpdatesDisposable = (self.account.stateManager.groupCallParticipantUpdates
|
self.groupCallParticipantUpdatesDisposable = (self.account.stateManager.groupCallParticipantUpdates
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] updates in
|
|> deliverOnMainQueue).start(next: { [weak self] updates in
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
@ -655,6 +675,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
if let proximityManagerIndex = self.proximityManagerIndex {
|
if let proximityManagerIndex = self.proximityManagerIndex {
|
||||||
DeviceProximityManager.shared().remove(proximityManagerIndex)
|
DeviceProximityManager.shared().remove(proximityManagerIndex)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.audioOutputStateDisposable?.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
private func updateSessionState(internalState: InternalState, audioSessionControl: ManagedAudioSessionControl?) {
|
private func updateSessionState(internalState: InternalState, audioSessionControl: ManagedAudioSessionControl?) {
|
||||||
@ -665,10 +687,13 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
self.internalState = internalState
|
self.internalState = internalState
|
||||||
|
|
||||||
if let audioSessionControl = audioSessionControl, previousControl == nil {
|
if let audioSessionControl = audioSessionControl, previousControl == nil {
|
||||||
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
|
switch self.currentSelectedAudioOutputValue {
|
||||||
|
case .speaker:
|
||||||
|
audioSessionControl.setOutputMode(.custom(self.currentSelectedAudioOutputValue))
|
||||||
|
default:
|
||||||
|
break
|
||||||
|
}
|
||||||
audioSessionControl.setup(synchronous: true)
|
audioSessionControl.setup(synchronous: true)
|
||||||
|
|
||||||
self.setCurrentAudioOutput(.speaker)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.audioSessionShouldBeActive.set(true)
|
self.audioSessionShouldBeActive.set(true)
|
||||||
@ -1034,18 +1059,35 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
|
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
|
||||||
guard self.currentAudioOutputValue != output else {
|
guard self.currentSelectedAudioOutputValue != output else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
self.currentAudioOutputValue = output
|
self.currentSelectedAudioOutputValue = output
|
||||||
|
|
||||||
|
self.updateProximityMonitoring()
|
||||||
|
|
||||||
|
self.audioOutputStatePromise.set(.single((self.audioOutputStateValue.0, output))
|
||||||
|
|> then(
|
||||||
|
.single(self.audioOutputStateValue)
|
||||||
|
|> delay(1.0, queue: Queue.mainQueue())
|
||||||
|
))
|
||||||
|
|
||||||
|
if let audioSessionControl = self.audioSessionControl {
|
||||||
|
audioSessionControl.setOutputMode(.custom(output))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func updateProximityMonitoring() {
|
||||||
var shouldMonitorProximity = false
|
var shouldMonitorProximity = false
|
||||||
switch output {
|
switch self.currentSelectedAudioOutputValue {
|
||||||
case .builtin:
|
case .builtin:
|
||||||
shouldMonitorProximity = true
|
shouldMonitorProximity = true
|
||||||
default:
|
default:
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
if case .muted(isPushToTalkActive: true) = self.isMutedValue {
|
||||||
|
shouldMonitorProximity = false
|
||||||
|
}
|
||||||
|
|
||||||
if shouldMonitorProximity {
|
if shouldMonitorProximity {
|
||||||
if self.proximityManagerIndex == nil {
|
if self.proximityManagerIndex == nil {
|
||||||
@ -1058,15 +1100,29 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
DeviceProximityManager.shared().remove(proximityManagerIndex)
|
DeviceProximityManager.shared().remove(proximityManagerIndex)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
self.audioOutputStatePromise.set(.single((self.audioOutputStateValue.0, output))
|
|
||||||
|> then(
|
private func updateAudioOutputs(availableOutputs: [AudioSessionOutput], currentOutput: AudioSessionOutput?) {
|
||||||
.single(self.audioOutputStateValue)
|
if self.actualAudioOutputState?.0 != availableOutputs || self.actualAudioOutputState?.1 != currentOutput {
|
||||||
|> delay(1.0, queue: Queue.mainQueue())
|
self.actualAudioOutputState = (availableOutputs, currentOutput)
|
||||||
))
|
|
||||||
|
self.setupAudioOutputs()
|
||||||
if let audioSessionControl = self.audioSessionControl {
|
}
|
||||||
audioSessionControl.setOutputMode(.custom(output))
|
}
|
||||||
|
|
||||||
|
private func setupAudioOutputs() {
|
||||||
|
if let actualAudioOutputState = self.actualAudioOutputState, let currentOutput = actualAudioOutputState.1 {
|
||||||
|
self.currentSelectedAudioOutputValue = currentOutput
|
||||||
|
|
||||||
|
switch currentOutput {
|
||||||
|
case .headphones, .speaker:
|
||||||
|
break
|
||||||
|
case let .port(port) where port.type == .bluetooth:
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
//self.setCurrentAudioOutput(.speaker)
|
||||||
|
break
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,6 +147,16 @@ func mergeGroupOrChannel(lhs: Peer?, rhs: Api.Chat) -> Peer? {
|
|||||||
} else {
|
} else {
|
||||||
let _ = channelFlags.remove(.isVerified)
|
let _ = channelFlags.remove(.isVerified)
|
||||||
}
|
}
|
||||||
|
if (flags & Int32(1 << 23)) != 0 {
|
||||||
|
channelFlags.insert(.hasVoiceChat)
|
||||||
|
} else {
|
||||||
|
let _ = channelFlags.remove(.hasVoiceChat)
|
||||||
|
}
|
||||||
|
if (flags & Int32(1 << 24)) != 0 {
|
||||||
|
channelFlags.insert(.hasActiveVoiceChat)
|
||||||
|
} else {
|
||||||
|
let _ = channelFlags.remove(.hasActiveVoiceChat)
|
||||||
|
}
|
||||||
var info = lhs.info
|
var info = lhs.info
|
||||||
switch info {
|
switch info {
|
||||||
case .broadcast:
|
case .broadcast:
|
||||||
@ -178,6 +188,16 @@ func mergeChannel(lhs: TelegramChannel?, rhs: TelegramChannel) -> TelegramChanne
|
|||||||
} else {
|
} else {
|
||||||
let _ = channelFlags.remove(.isVerified)
|
let _ = channelFlags.remove(.isVerified)
|
||||||
}
|
}
|
||||||
|
if rhs.flags.contains(.hasVoiceChat) {
|
||||||
|
channelFlags.insert(.hasVoiceChat)
|
||||||
|
} else {
|
||||||
|
let _ = channelFlags.remove(.hasVoiceChat)
|
||||||
|
}
|
||||||
|
if rhs.flags.contains(.hasActiveVoiceChat) {
|
||||||
|
channelFlags.insert(.hasActiveVoiceChat)
|
||||||
|
} else {
|
||||||
|
let _ = channelFlags.remove(.hasActiveVoiceChat)
|
||||||
|
}
|
||||||
var info = lhs.info
|
var info = lhs.info
|
||||||
switch info {
|
switch info {
|
||||||
case .broadcast:
|
case .broadcast:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user