[WIP] Conference calls

This commit is contained in:
Isaac 2025-01-28 20:50:52 +04:00
parent 39c96712bd
commit 846e495d12
20 changed files with 875 additions and 336 deletions

View File

@ -93,6 +93,7 @@ public enum ContactListFilter {
public final class ContactMultiselectionControllerParams {
public let context: AccountContext
public let updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?
public let title: String?
public let mode: ContactMultiselectionControllerMode
public let options: Signal<[ContactListAdditionalOption], NoError>
public let filters: [ContactListFilter]
@ -106,9 +107,10 @@ public final class ContactMultiselectionControllerParams {
public let openProfile: ((EnginePeer) -> Void)?
public let sendMessage: ((EnginePeer) -> Void)?
public init(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil, mode: ContactMultiselectionControllerMode, options: Signal<[ContactListAdditionalOption], NoError> = .single([]), filters: [ContactListFilter] = [.excludeSelf], onlyWriteable: Bool = false, isGroupInvitation: Bool = false, isPeerEnabled: ((EnginePeer) -> Bool)? = nil, attemptDisabledItemSelection: ((EnginePeer, ChatListDisabledPeerReason) -> Void)? = nil, alwaysEnabled: Bool = false, limit: Int32? = nil, reachedLimit: ((Int32) -> Void)? = nil, openProfile: ((EnginePeer) -> Void)? = nil, sendMessage: ((EnginePeer) -> Void)? = nil) {
public init(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil, title: String? = nil, mode: ContactMultiselectionControllerMode, options: Signal<[ContactListAdditionalOption], NoError> = .single([]), filters: [ContactListFilter] = [.excludeSelf], onlyWriteable: Bool = false, isGroupInvitation: Bool = false, isPeerEnabled: ((EnginePeer) -> Bool)? = nil, attemptDisabledItemSelection: ((EnginePeer, ChatListDisabledPeerReason) -> Void)? = nil, alwaysEnabled: Bool = false, limit: Int32? = nil, reachedLimit: ((Int32) -> Void)? = nil, openProfile: ((EnginePeer) -> Void)? = nil, sendMessage: ((EnginePeer) -> Void)? = nil) {
self.context = context
self.updatedPresentationData = updatedPresentationData
self.title = title
self.mode = mode
self.options = options
self.filters = filters

View File

@ -173,7 +173,7 @@ public protocol PresentationCall: AnyObject {
func setCurrentAudioOutput(_ output: AudioSessionOutput)
func debugInfo() -> Signal<(String, String), NoError>
func upgradeToConference(completion: @escaping (PresentationGroupCall) -> Void) -> Disposable
func upgradeToConference(invitePeerIds: [EnginePeer.Id], completion: @escaping (PresentationGroupCall) -> Void) -> Disposable
func makeOutgoingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void)
}
@ -413,6 +413,8 @@ public protocol PresentationGroupCall: AnyObject {
var schedulePending: Bool { get }
var isStream: Bool { get }
var isConference: Bool { get }
var encryptionKeyValue: Data? { get }
var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> { get }

View File

@ -388,7 +388,7 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
}
}
private func contactListNodeEntries(accountPeer: EnginePeer?, peers: [ContactListPeer], presences: [EnginePeer.Id: EnginePeer.Presence], presentation: ContactListPresentation, selectionState: ContactListNodeGroupSelectionState?, theme: PresentationTheme, strings: PresentationStrings, dateTimeFormat: PresentationDateTimeFormat, sortOrder: PresentationPersonNameOrder, displayOrder: PresentationPersonNameOrder, disabledPeerIds: Set<EnginePeer.Id>, peerRequiresPremiumForMessaging: [EnginePeer.Id: Bool], peersWithStories: [EnginePeer.Id: PeerStoryStats], authorizationStatus: AccessType, warningSuppressed: (Bool, Bool), displaySortOptions: Bool, displayCallIcons: Bool, storySubscriptions: EngineStorySubscriptions?, topPeers: [EnginePeer], topPeersPresentation: ContactListPresentation.TopPeers, interaction: ContactListNodeInteraction) -> [ContactListNodeEntry] {
private func contactListNodeEntries(accountPeer: EnginePeer?, peers: [ContactListPeer], presences: [EnginePeer.Id: EnginePeer.Presence], presentation: ContactListPresentation, selectionState: ContactListNodeGroupSelectionState?, theme: PresentationTheme, strings: PresentationStrings, dateTimeFormat: PresentationDateTimeFormat, sortOrder: PresentationPersonNameOrder, displayOrder: PresentationPersonNameOrder, disabledPeerIds: Set<EnginePeer.Id>, peerRequiresPremiumForMessaging: [EnginePeer.Id: Bool], peersWithStories: [EnginePeer.Id: PeerStoryStats], authorizationStatus: AccessType, warningSuppressed: (Bool, Bool), displaySortOptions: Bool, displayCallIcons: Bool, storySubscriptions: EngineStorySubscriptions?, topPeers: [EnginePeer], topPeersPresentation: ContactListPresentation.TopPeers, isPeerEnabled: ((EnginePeer) -> Bool)?, interaction: ContactListNodeInteraction) -> [ContactListNodeEntry] {
var entries: [ContactListNodeEntry] = []
var commonHeader: ListViewItemHeader?
@ -778,6 +778,10 @@ private func contactListNodeEntries(accountPeer: EnginePeer?, peers: [ContactLis
if requiresPremiumForMessaging {
enabled = false
}
if let isPeerEnabled, !isPeerEnabled(EnginePeer(peer)) {
enabled = false
}
default:
enabled = true
}
@ -1638,7 +1642,7 @@ public final class ContactListNode: ASDisplayNode {
peers.append(.deviceContact(stableId, contact.0))
}
let entries = contactListNodeEntries(accountPeer: nil, peers: peers, presences: localPeersAndStatuses.1, presentation: presentation, selectionState: selectionState, theme: presentationData.theme, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, sortOrder: presentationData.nameSortOrder, displayOrder: presentationData.nameDisplayOrder, disabledPeerIds: disabledPeerIds, peerRequiresPremiumForMessaging: peerRequiresPremiumForMessaging, peersWithStories: [:], authorizationStatus: .allowed, warningSuppressed: (true, true), displaySortOptions: false, displayCallIcons: displayCallIcons, storySubscriptions: nil, topPeers: [], topPeersPresentation: .none, interaction: interaction)
let entries = contactListNodeEntries(accountPeer: nil, peers: peers, presences: localPeersAndStatuses.1, presentation: presentation, selectionState: selectionState, theme: presentationData.theme, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, sortOrder: presentationData.nameSortOrder, displayOrder: presentationData.nameDisplayOrder, disabledPeerIds: disabledPeerIds, peerRequiresPremiumForMessaging: peerRequiresPremiumForMessaging, peersWithStories: [:], authorizationStatus: .allowed, warningSuppressed: (true, true), displaySortOptions: false, displayCallIcons: displayCallIcons, storySubscriptions: nil, topPeers: [], topPeersPresentation: .none, isPeerEnabled: isPeerEnabled, interaction: interaction)
let previous = previousEntries.swap(entries)
return .single(preparedContactListNodeTransition(context: context, presentationData: presentationData, from: previous ?? [], to: entries, interaction: interaction, firstTime: previous == nil, isEmpty: false, generateIndexSections: generateSections, animation: .none, isSearch: isSearch))
}
@ -1840,7 +1844,7 @@ public final class ContactListNode: ASDisplayNode {
isEmpty = true
}
let entries = contactListNodeEntries(accountPeer: view.1, peers: peers, presences: presences, presentation: presentation, selectionState: selectionState, theme: presentationData.theme, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, sortOrder: presentationData.nameSortOrder, displayOrder: presentationData.nameDisplayOrder, disabledPeerIds: disabledPeerIds, peerRequiresPremiumForMessaging: view.2, peersWithStories: view.3, authorizationStatus: authorizationStatus, warningSuppressed: warningSuppressed, displaySortOptions: displaySortOptions, displayCallIcons: displayCallIcons, storySubscriptions: storySubscriptions, topPeers: topPeers.map { $0.peer }, topPeersPresentation: displayTopPeers, interaction: interaction)
let entries = contactListNodeEntries(accountPeer: view.1, peers: peers, presences: presences, presentation: presentation, selectionState: selectionState, theme: presentationData.theme, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, sortOrder: presentationData.nameSortOrder, displayOrder: presentationData.nameDisplayOrder, disabledPeerIds: disabledPeerIds, peerRequiresPremiumForMessaging: view.2, peersWithStories: view.3, authorizationStatus: authorizationStatus, warningSuppressed: warningSuppressed, displaySortOptions: displaySortOptions, displayCallIcons: displayCallIcons, storySubscriptions: storySubscriptions, topPeers: topPeers.map { $0.peer }, topPeersPresentation: displayTopPeers, isPeerEnabled: isPeerEnabled, interaction: interaction)
let previous = previousEntries.swap(entries)
let previousSelection = previousSelectionState.swap(selectionState)
let previousPendingRemovalPeerIds = previousPendingRemovalPeerIds.swap(pendingRemovalPeerIds)

View File

@ -1444,7 +1444,7 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
transition.updateFrame(node: strongSelf.titleNode, frame: titleFrame)
strongSelf.titleNode.alpha = item.enabled ? 1.0 : 0.4
strongSelf.statusNode.textNode.alpha = item.enabled ? 1.0 : 1.0
strongSelf.statusNode.textNode.alpha = item.enabled ? 1.0 : 0.4
strongSelf.statusNode.visibilityRect = strongSelf.visibilityStatus == false ? CGRect.zero : CGRect.infinite
let _ = statusApply(TextNodeWithEntities.Arguments(

View File

@ -115,6 +115,7 @@ swift_library(
"//submodules/TelegramUI/Components/LottieComponent",
"//submodules/TelegramUI/Components/Stories/PeerListItemComponent",
"//submodules/TelegramUI/Components/BackButtonComponent",
"//submodules/TelegramUI/Components/AlertComponent",
"//submodules/DirectMediaImageCache",
"//submodules/FastBlur",
],

View File

@ -4,6 +4,7 @@
#import <Foundation/Foundation.h>
NSString *randomCallsEmoji();
NSData *dataForEmojiRawKey(NSData *data);
NSArray<NSString *> *stringForEmojiHashOfData(NSData *data, NSInteger count);
#endif /* CallsEmoji_h */

View File

@ -1,6 +1,8 @@
#import <Foundation/Foundation.h>
#import <CallsEmoji/CallsEmoji.h>
#import <CommonCrypto/CommonCrypto.h>
static int32_t positionExtractor(uint8_t *bytes, int32_t i, int32_t count) {
int offset = i * 8;
int64_t num = (((int64_t)bytes[offset] & 0x7F) << 56) | (((int64_t)bytes[offset+1] & 0xFF) << 48) | (((int64_t)bytes[offset+2] & 0xFF) << 40) | (((int64_t)bytes[offset+3] & 0xFF) << 32) | (((int64_t)bytes[offset+4] & 0xFF) << 24) | (((int64_t)bytes[offset+5] & 0xFF) << 16) | (((int64_t)bytes[offset+6] & 0xFF) << 8) | (((int64_t)bytes[offset+7] & 0xFF));
@ -16,6 +18,21 @@ NSString *randomCallsEmoji() {
return emojis[arc4random() % emojis.count];
}
NSData *dataForEmojiRawKey(NSData *data) {
if (!data) {
return nil; // Return nil if the input data is nil
}
// Create a buffer to hold the hash
uint8_t hash[CC_SHA256_DIGEST_LENGTH];
// Compute the SHA-256 hash
CC_SHA256(data.bytes, (CC_LONG)data.length, hash);
// Return the hash as NSData
return [NSData dataWithBytes:hash length:CC_SHA256_DIGEST_LENGTH];
}
NSArray<NSString *> *stringForEmojiHashOfData(NSData *data, NSInteger count) {
if (data.length != 32) {
return @[];

View File

@ -425,15 +425,18 @@ public final class CallController: ViewController {
}
final class AnimateOutToGroupChat {
let containerView: UIView
let incomingPeerId: EnginePeer.Id
let incomingVideoLayer: CALayer?
let incomingVideoPlaceholder: VideoSource.Output?
init(
containerView: UIView,
incomingPeerId: EnginePeer.Id,
incomingVideoLayer: CALayer?,
incomingVideoPlaceholder: VideoSource.Output?
) {
self.containerView = containerView
self.incomingPeerId = incomingPeerId
self.incomingVideoLayer = incomingVideoLayer
self.incomingVideoPlaceholder = incomingVideoPlaceholder
@ -487,6 +490,7 @@ public final class CallController: ViewController {
let controller = self.call.context.sharedContext.makeContactMultiselectionController(ContactMultiselectionControllerParams(
context: self.call.context,
updatedPresentationData: (initial: presentationData, signal: .single(presentationData)),
title: "Invite Members",
mode: .peerSelection(searchChatList: true, searchGroups: false, searchChannels: false),
isPeerEnabled: { peer in
guard case let .user(user) = peer else {
@ -516,21 +520,19 @@ public final class CallController: ViewController {
return
}
controller?.displayProgress = true
let call = self.call
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) {
controller?.dismiss()
}
controller?.dismiss()
let invitePeerIds = peerIds.compactMap { item -> EnginePeer.Id? in
if case let .peer(peerId) = item {
return peerId
} else {
return nil
}
}
let _ = self.call.upgradeToConference(completion: { [weak call] _ in
guard let call else {
return
}
for peerId in peerIds {
if case let .peer(peerId) = peerId {
let _ = (call as? PresentationCallImpl)?.requestAddToConference(peerId: peerId)
}
}
let _ = self.call.upgradeToConference(invitePeerIds: invitePeerIds, completion: { _ in
})
})

View File

@ -689,6 +689,7 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
let takenIncomingVideoLayer = self.callScreen.takeIncomingVideoLayer()
return CallController.AnimateOutToGroupChat(
containerView: self.containerView,
incomingPeerId: self.call.peerId,
incomingVideoLayer: takenIncomingVideoLayer?.0,
incomingVideoPlaceholder: takenIncomingVideoLayer?.1

View File

@ -15,6 +15,154 @@ import AccountContext
import DeviceProximity
import PhoneNumberFormat
final class SharedCallAudioContext {
let audioDevice: OngoingCallContext.AudioDevice?
let callKitIntegration: CallKitIntegration?
private var audioSessionDisposable: Disposable?
private var audioSessionShouldBeActiveDisposable: Disposable?
private var isAudioSessionActiveDisposable: Disposable?
private(set) var audioSessionControl: ManagedAudioSessionControl?
private let isAudioSessionActivePromise = Promise<Bool>(false)
private var isAudioSessionActive: Signal<Bool, NoError> {
return self.isAudioSessionActivePromise.get()
}
private let audioOutputStatePromise = Promise<([AudioSessionOutput], AudioSessionOutput?)>(([], nil))
private var audioOutputStateValue: ([AudioSessionOutput], AudioSessionOutput?) = ([], nil)
private var currentAudioOutputValue: AudioSessionOutput = .builtin
private var didSetCurrentAudioOutputValue: Bool = false
var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> {
return self.audioOutputStatePromise.get()
}
private let audioSessionShouldBeActive = Promise<Bool>(true)
init(audioSession: ManagedAudioSession, callKitIntegration: CallKitIntegration?) {
self.callKitIntegration = callKitIntegration
self.audioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: false)
var didReceiveAudioOutputs = false
self.audioSessionDisposable = audioSession.push(audioSessionType: .voiceCall, manualActivate: { [weak self] control in
Queue.mainQueue().async {
guard let self else {
return
}
let previousControl = self.audioSessionControl
self.audioSessionControl = control
if previousControl == nil, let audioSessionControl = self.audioSessionControl {
if let callKitIntegration = self.callKitIntegration {
if self.didSetCurrentAudioOutputValue {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
}
} else {
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
audioSessionControl.setup(synchronous: true)
}
}
}
}, deactivate: { [weak self] _ in
return Signal { subscriber in
Queue.mainQueue().async {
if let self {
self.isAudioSessionActivePromise.set(.single(false))
self.audioSessionControl = nil
}
subscriber.putCompletion()
}
return EmptyDisposable
}
}, availableOutputsChanged: { [weak self] availableOutputs, currentOutput in
Queue.mainQueue().async {
guard let self else {
return
}
self.audioOutputStateValue = (availableOutputs, currentOutput)
if let currentOutput = currentOutput {
self.currentAudioOutputValue = currentOutput
self.didSetCurrentAudioOutputValue = true
}
var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput))
if !didReceiveAudioOutputs {
didReceiveAudioOutputs = true
if currentOutput == .speaker {
signal = .single((availableOutputs, .builtin))
|> then(
signal
|> delay(1.0, queue: Queue.mainQueue())
)
}
}
self.audioOutputStatePromise.set(signal)
}
})
self.audioSessionShouldBeActive.set(.single(true))
self.audioSessionShouldBeActiveDisposable = (self.audioSessionShouldBeActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let self else {
return
}
if value {
if let audioSessionControl = self.audioSessionControl {
let audioSessionActive: Signal<Bool, NoError>
if let callKitIntegration = self.callKitIntegration {
audioSessionActive = callKitIntegration.audioSessionActive
} else {
audioSessionControl.activate({ _ in })
audioSessionActive = .single(true)
}
self.isAudioSessionActivePromise.set(audioSessionActive)
} else {
self.isAudioSessionActivePromise.set(.single(false))
}
} else {
self.isAudioSessionActivePromise.set(.single(false))
}
})
self.isAudioSessionActiveDisposable = (self.isAudioSessionActive
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let self else {
return
}
let _ = self
})
}
deinit {
self.audioSessionDisposable?.dispose()
self.audioSessionShouldBeActiveDisposable?.dispose()
self.isAudioSessionActiveDisposable?.dispose()
}
func setCurrentAudioOutput(_ output: AudioSessionOutput) {
guard self.currentAudioOutputValue != output else {
return
}
self.currentAudioOutputValue = output
self.didSetCurrentAudioOutputValue = true
self.audioOutputStatePromise.set(.single((self.audioOutputStateValue.0, output))
|> then(
.single(self.audioOutputStateValue)
|> delay(1.0, queue: Queue.mainQueue())
))
if let audioSessionControl = self.audioSessionControl {
if let callKitIntegration = self.callKitIntegration {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
} else {
audioSessionControl.setOutputMode(.custom(output))
}
}
}
}
public final class PresentationCallImpl: PresentationCall {
public let context: AccountContext
private let audioSession: ManagedAudioSession
@ -43,6 +191,8 @@ public final class PresentationCallImpl: PresentationCall {
private let currentNetworkType: NetworkType
private let updatedNetworkType: Signal<NetworkType, NoError>
private var sharedAudioContext: SharedCallAudioContext?
private var sessionState: CallSession?
private var callContextState: OngoingCallContextState?
private var ongoingContext: OngoingCallContext?
@ -50,7 +200,6 @@ public final class PresentationCallImpl: PresentationCall {
private var ongoingContextIsFailedDisposable: Disposable?
private var ongoingContextIsDroppedDisposable: Disposable?
private var didDropCall = false
private var sharedAudioDevice: OngoingCallContext.AudioDevice?
private var requestedVideoAspect: Float?
private var reception: Int32?
private var receptionDisposable: Disposable?
@ -90,6 +239,10 @@ public final class PresentationCallImpl: PresentationCall {
private var currentAudioOutputValue: AudioSessionOutput = .builtin
private var didSetCurrentAudioOutputValue: Bool = false
public var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> {
if let sharedAudioContext = self.sharedAudioContext {
return sharedAudioContext.audioOutputState
}
return self.audioOutputStatePromise.get()
}
@ -157,6 +310,8 @@ public final class PresentationCallImpl: PresentationCall {
return self.conferenceStatePromise.get()
}
public private(set) var pendingInviteToConferencePeerIds: [EnginePeer.Id] = []
private var localVideoEndpointId: String?
private var remoteVideoEndpointId: String?
@ -242,91 +397,94 @@ public final class PresentationCallImpl: PresentationCall {
}
})
self.audioSessionDisposable = audioSession.push(audioSessionType: .voiceCall, manualActivate: { [weak self] control in
Queue.mainQueue().async {
if let strongSelf = self {
if let sessionState = strongSelf.sessionState {
strongSelf.updateSessionState(sessionState: sessionState, callContextState: strongSelf.callContextState, reception: strongSelf.reception, audioSessionControl: control)
} else {
strongSelf.audioSessionControl = control
}
}
}
}, deactivate: { [weak self] _ in
return Signal { subscriber in
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
self.sharedAudioContext = nil
} else {
self.sharedAudioContext = SharedCallAudioContext(audioSession: audioSession, callKitIntegration: callKitIntegration)
}
if let _ = self.sharedAudioContext {
} else {
self.audioSessionDisposable = audioSession.push(audioSessionType: .voiceCall, manualActivate: { [weak self] control in
Queue.mainQueue().async {
if let strongSelf = self {
strongSelf.updateIsAudioSessionActive(false)
if let sessionState = strongSelf.sessionState {
strongSelf.updateSessionState(sessionState: sessionState, callContextState: strongSelf.callContextState, reception: strongSelf.reception, audioSessionControl: nil)
strongSelf.updateSessionState(sessionState: sessionState, callContextState: strongSelf.callContextState, reception: strongSelf.reception, audioSessionControl: control)
} else {
strongSelf.audioSessionControl = nil
strongSelf.audioSessionControl = control
}
}
subscriber.putCompletion()
}
return EmptyDisposable
}
}, availableOutputsChanged: { [weak self] availableOutputs, currentOutput in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
strongSelf.audioOutputStateValue = (availableOutputs, currentOutput)
if let currentOutput = currentOutput {
strongSelf.currentAudioOutputValue = currentOutput
strongSelf.didSetCurrentAudioOutputValue = true
}
var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput))
if !didReceiveAudioOutputs {
didReceiveAudioOutputs = true
if currentOutput == .speaker {
signal = .single((availableOutputs, .builtin))
|> then(
signal
|> delay(1.0, queue: Queue.mainQueue())
)
}
}
strongSelf.audioOutputStatePromise.set(signal)
}
})
self.audioSessionShouldBeActiveDisposable = (self.audioSessionShouldBeActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
if value {
if let audioSessionControl = strongSelf.audioSessionControl {
let audioSessionActive: Signal<Bool, NoError>
if let callKitIntegration = strongSelf.callKitIntegration {
audioSessionActive = callKitIntegration.audioSessionActive
} else {
audioSessionControl.activate({ _ in })
audioSessionActive = .single(true)
}, deactivate: { [weak self] _ in
return Signal { subscriber in
Queue.mainQueue().async {
if let strongSelf = self {
strongSelf.updateIsAudioSessionActive(false)
if let sessionState = strongSelf.sessionState {
strongSelf.updateSessionState(sessionState: sessionState, callContextState: strongSelf.callContextState, reception: strongSelf.reception, audioSessionControl: nil)
} else {
strongSelf.audioSessionControl = nil
}
}
subscriber.putCompletion()
}
return EmptyDisposable
}
}, availableOutputsChanged: { [weak self] availableOutputs, currentOutput in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
strongSelf.audioOutputStateValue = (availableOutputs, currentOutput)
if let currentOutput = currentOutput {
strongSelf.currentAudioOutputValue = currentOutput
strongSelf.didSetCurrentAudioOutputValue = true
}
var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput))
if !didReceiveAudioOutputs {
didReceiveAudioOutputs = true
if currentOutput == .speaker {
signal = .single((availableOutputs, .builtin))
|> then(
signal
|> delay(1.0, queue: Queue.mainQueue())
)
}
}
strongSelf.audioOutputStatePromise.set(signal)
}
})
self.audioSessionShouldBeActiveDisposable = (self.audioSessionShouldBeActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
if value {
if let audioSessionControl = strongSelf.audioSessionControl {
let audioSessionActive: Signal<Bool, NoError>
if let callKitIntegration = strongSelf.callKitIntegration {
audioSessionActive = callKitIntegration.audioSessionActive
} else {
audioSessionControl.activate({ _ in })
audioSessionActive = .single(true)
}
strongSelf.audioSessionActive.set(audioSessionActive)
} else {
strongSelf.audioSessionActive.set(.single(false))
}
strongSelf.audioSessionActive.set(audioSessionActive)
} else {
strongSelf.audioSessionActive.set(.single(false))
}
} else {
strongSelf.audioSessionActive.set(.single(false))
}
}
})
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
self.sharedAudioDevice = nil
} else {
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: false)
})
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
strongSelf.updateIsAudioSessionActive(value)
}
})
}
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
strongSelf.updateIsAudioSessionActive(value)
}
})
let screencastCapturer = OngoingCallVideoCapturer(isCustom: true)
self.screencastCapturer = screencastCapturer
@ -414,9 +572,9 @@ public final class PresentationCallImpl: PresentationCall {
let reception = self.reception
if previousControl != nil && audioSessionControl == nil {
/*if previousControl != nil && audioSessionControl == nil {
print("updateSessionState \(sessionState.state) \(audioSessionControl != nil)")
}
}*/
var presentationState: PresentationCallState?
@ -433,14 +591,16 @@ public final class PresentationCallImpl: PresentationCall {
}
}
if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil {
if let callKitIntegration = self.callKitIntegration {
if self.didSetCurrentAudioOutputValue {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
if self.sharedAudioContext == nil {
if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil {
if let callKitIntegration = self.callKitIntegration {
if self.didSetCurrentAudioOutputValue {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
}
} else {
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
audioSessionControl.setup(synchronous: true)
}
} else {
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
audioSessionControl.setup(synchronous: true)
}
}
@ -637,11 +797,13 @@ public final class PresentationCallImpl: PresentationCall {
encryptionKey: (key, 1),
conferenceFromCallId: conferenceFromCallId,
isConference: true,
sharedAudioDevice: self.sharedAudioDevice
sharedAudioContext: self.sharedAudioContext
)
self.conferenceCallImpl = conferenceCall
conferenceCall.upgradedConferenceCall = self
conferenceCall.setInvitedPeers(self.pendingInviteToConferencePeerIds)
conferenceCall.setIsMuted(action: self.isMutedValue ? .muted(isPushToTalkActive: false) : .unmuted)
if let videoCapturer = self.videoCapturer {
conferenceCall.requestVideo(capturer: videoCapturer)
@ -746,12 +908,19 @@ public final class PresentationCallImpl: PresentationCall {
self.callKitIntegration?.reportOutgoingCallConnected(uuid: sessionState.id, at: Date())
}
} else {
if let _ = audioSessionControl, !wasActive || previousControl == nil {
if (self.sharedAudioContext != nil || audioSessionControl != nil), !wasActive || (self.sharedAudioContext == nil && previousControl == nil) {
let logName = "\(id.id)_\(id.accessHash)"
let updatedConnections = connections
let ongoingContext = OngoingCallContext(account: self.context.account, callSessionManager: self.callSessionManager, callId: id, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: updatedConnections, maxLayer: maxLayer, version: version, customParameters: customParameters, allowP2P: allowsP2P, enableTCP: self.enableTCP, enableStunMarking: self.enableStunMarking, audioSessionActive: self.audioSessionActive.get(), logName: logName, preferredVideoCodec: self.preferredVideoCodec, audioDevice: self.sharedAudioDevice)
let contextAudioSessionActive: Signal<Bool, NoError>
if self.sharedAudioContext != nil {
contextAudioSessionActive = .single(true)
} else {
contextAudioSessionActive = self.audioSessionActive.get()
}
let ongoingContext = OngoingCallContext(account: self.context.account, callSessionManager: self.callSessionManager, callId: id, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: updatedConnections, maxLayer: maxLayer, version: version, customParameters: customParameters, allowP2P: allowsP2P, enableTCP: self.enableTCP, enableStunMarking: self.enableStunMarking, audioSessionActive: contextAudioSessionActive, logName: logName, preferredVideoCodec: self.preferredVideoCodec, audioDevice: self.sharedAudioContext?.audioDevice)
self.ongoingContext = ongoingContext
ongoingContext.setIsMuted(self.isMutedValue)
if let requestedVideoAspect = self.requestedVideoAspect {
@ -957,7 +1126,7 @@ public final class PresentationCallImpl: PresentationCall {
}
if tone != self.currentTone {
self.currentTone = tone
self.sharedAudioDevice?.setTone(tone: tone.flatMap(presentationCallToneData).flatMap { data in
self.sharedAudioContext?.audioDevice?.setTone(tone: tone.flatMap(presentationCallToneData).flatMap { data in
return OngoingCallContext.Tone(samples: data, sampleRate: 48000, loopCount: tone?.loopCount ?? 1000000)
})
}
@ -967,7 +1136,6 @@ public final class PresentationCallImpl: PresentationCall {
if self.isAudioSessionActive != value {
self.isAudioSessionActive = value
}
self.sharedAudioDevice?.setIsAudioSessionActive(value)
}
public func answer() {
@ -1143,13 +1311,29 @@ public final class PresentationCallImpl: PresentationCall {
self.videoCapturer?.setIsVideoEnabled(!isPaused)
}
public func upgradeToConference(completion: @escaping (PresentationGroupCall) -> Void) -> Disposable {
public func upgradeToConference(invitePeerIds: [EnginePeer.Id], completion: @escaping (PresentationGroupCall) -> Void) -> Disposable {
if let conferenceCall = self.conferenceCall {
completion(conferenceCall)
for peerId in invitePeerIds {
let _ = self.requestAddToConference(peerId: peerId)
}
return EmptyDisposable
}
let index = self.upgradedToConferenceCompletions.add(completion)
self.pendingInviteToConferencePeerIds = invitePeerIds
let index = self.upgradedToConferenceCompletions.add({ [weak self] call in
completion(call)
if let self {
for peerId in invitePeerIds {
let _ = self.requestAddToConference(peerId: peerId)
}
}
})
self.conferenceStateValue = .preparing
self.callSessionManager.createConferenceIfNecessary(internalId: self.internalId)
return ActionDisposable { [weak self] in
@ -1162,7 +1346,7 @@ public final class PresentationCallImpl: PresentationCall {
}
}
public func requestAddToConference(peerId: EnginePeer.Id) -> Disposable {
private func requestAddToConference(peerId: EnginePeer.Id) -> Disposable {
var conferenceCall: (conference: GroupCallReference, encryptionKey: Data)?
if let sessionState = self.sessionState {
switch sessionState.state {
@ -1189,6 +1373,11 @@ public final class PresentationCallImpl: PresentationCall {
}
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
if let sharedAudioContext = self.sharedAudioContext {
sharedAudioContext.setCurrentAudioOutput(output)
return
}
guard self.currentAudioOutputValue != output else {
return
}

View File

@ -706,7 +706,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
encryptionKey: nil,
conferenceFromCallId: nil,
isConference: false,
sharedAudioDevice: nil
sharedAudioContext: nil
)
call.schedule(timestamp: timestamp)
@ -749,7 +749,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
encryptionKey: nil,
conferenceFromCallId: nil,
isConference: false,
sharedAudioDevice: nil
sharedAudioContext: nil
)
strongSelf.updateCurrentGroupCall(call)
strongSelf.currentGroupCallPromise.set(.single(call))
@ -937,7 +937,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
encryptionKey: nil,
conferenceFromCallId: nil,
isConference: false,
sharedAudioDevice: nil
sharedAudioContext: nil
)
strongSelf.updateCurrentGroupCall(call)
strongSelf.currentGroupCallPromise.set(.single(call))

View File

@ -16,6 +16,7 @@ import AccountContext
import DeviceProximity
import UndoUI
import TemporaryCachedPeerDataManager
import CallsEmoji
private extension GroupCallParticipantsContext.Participant {
var allSsrcs: Set<UInt32> {
@ -818,6 +819,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var audioOutputStateValue: ([AudioSessionOutput], AudioSessionOutput?) = ([], nil)
private var currentSelectedAudioOutputValue: AudioSessionOutput = .builtin
public var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> {
if let sharedAudioContext = self.sharedAudioContext {
return sharedAudioContext.audioOutputState
}
return self.audioOutputStatePromise.get()
}
@ -995,10 +999,17 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
public let isStream: Bool
private let encryptionKey: (key: Data, fingerprint: Int64)?
private let sharedAudioDevice: OngoingCallContext.AudioDevice?
private let sharedAudioContext: SharedCallAudioContext?
private let conferenceFromCallId: CallId?
private let isConference: Bool
public let isConference: Bool
public var encryptionKeyValue: Data? {
if let key = self.encryptionKey?.key {
return dataForEmojiRawKey(key)
} else {
return nil
}
}
var internal_isRemoteConnected = Promise<Bool>()
private var internal_isRemoteConnectedDisposable: Disposable?
@ -1024,7 +1035,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
encryptionKey: (key: Data, fingerprint: Int64)?,
conferenceFromCallId: CallId?,
isConference: Bool,
sharedAudioDevice: OngoingCallContext.AudioDevice?
sharedAudioContext: SharedCallAudioContext?
) {
self.account = accountContext.account
self.accountContext = accountContext
@ -1053,9 +1064,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.conferenceFromCallId = conferenceFromCallId
self.isConference = isConference
self.encryptionKey = encryptionKey
self.sharedAudioDevice = sharedAudioDevice
self.sharedAudioContext = sharedAudioContext
if self.sharedAudioDevice == nil && !accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
if self.sharedAudioContext == nil && !accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
var didReceiveAudioOutputs = false
if !audioSession.getIsHeadsetPluggedIn() {
@ -1139,20 +1150,22 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
})
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
strongSelf.updateIsAudioSessionActive(value)
}
})
self.audioOutputStateDisposable = (self.audioOutputStatePromise.get()
|> deliverOnMainQueue).start(next: { [weak self] availableOutputs, currentOutput in
guard let strongSelf = self else {
return
}
strongSelf.updateAudioOutputs(availableOutputs: availableOutputs, currentOutput: currentOutput)
})
if self.sharedAudioContext == nil {
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
strongSelf.updateIsAudioSessionActive(value)
}
})
self.audioOutputStateDisposable = (self.audioOutputStatePromise.get()
|> deliverOnMainQueue).start(next: { [weak self] availableOutputs, currentOutput in
guard let strongSelf = self else {
return
}
strongSelf.updateAudioOutputs(availableOutputs: availableOutputs, currentOutput: currentOutput)
})
}
}
self.groupCallParticipantUpdatesDisposable = (self.account.stateManager.groupCallParticipantUpdates
@ -1768,7 +1781,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.internalState = internalState
self.internalStatePromise.set(.single(internalState))
if !self.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2, let audioSessionControl = audioSessionControl, previousControl == nil {
if self.sharedAudioContext == nil, !self.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2, let audioSessionControl = audioSessionControl, previousControl == nil {
if self.isStream {
audioSessionControl.setOutputMode(.system)
} else {
@ -1846,8 +1859,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
var encryptionKey: Data?
encryptionKey = self.encryptionKey?.key
let contextAudioSessionActive: Signal<Bool, NoError>
if self.sharedAudioContext != nil {
contextAudioSessionActive = .single(true)
} else {
contextAudioSessionActive = self.audioSessionActive.get()
}
genericCallContext = .call(OngoingGroupCallContext(audioSessionActive: self.audioSessionActive.get(), video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in
genericCallContext = .call(OngoingGroupCallContext(audioSessionActive: contextAudioSessionActive, video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in
let disposable = MetaDisposable()
Queue.mainQueue().async {
guard let strongSelf = self else {
@ -1872,7 +1892,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
strongSelf.onMutedSpeechActivityDetected?(value)
}
}, encryptionKey: encryptionKey, isConference: self.isConference, isStream: self.isStream, sharedAudioDevice: self.sharedAudioDevice))
}, encryptionKey: encryptionKey, isConference: self.isConference, isStream: self.isStream, sharedAudioDevice: self.sharedAudioContext?.audioDevice))
}
self.genericCallContext = genericCallContext
@ -3349,7 +3369,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
if self.sharedAudioDevice != nil {
if let sharedAudioContext = self.sharedAudioContext {
sharedAudioContext.setCurrentAudioOutput(output)
return
}
guard self.currentSelectedAudioOutputValue != output else {
@ -3567,6 +3588,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return true
}
func setInvitedPeers(_ peerIds: [PeerId]) {
self.invitedPeersValue = peerIds
}
public func removedPeer(_ peerId: PeerId) {
var updatedInvitedPeers = self.invitedPeersValue
updatedInvitedPeers.removeAll(where: { $0 == peerId})

View File

@ -128,6 +128,7 @@ final class VideoChatParticipantsComponent: Component {
let call: VideoChatCall
let participants: Participants?
let invitedPeers: [EnginePeer]
let speakingParticipants: Set<EnginePeer.Id>
let expandedVideoState: ExpandedVideoState?
let maxVideoQuality: Int
@ -147,6 +148,7 @@ final class VideoChatParticipantsComponent: Component {
init(
call: VideoChatCall,
participants: Participants?,
invitedPeers: [EnginePeer],
speakingParticipants: Set<EnginePeer.Id>,
expandedVideoState: ExpandedVideoState?,
maxVideoQuality: Int,
@ -165,6 +167,7 @@ final class VideoChatParticipantsComponent: Component {
) {
self.call = call
self.participants = participants
self.invitedPeers = invitedPeers
self.speakingParticipants = speakingParticipants
self.expandedVideoState = expandedVideoState
self.maxVideoQuality = maxVideoQuality
@ -189,6 +192,9 @@ final class VideoChatParticipantsComponent: Component {
if lhs.participants != rhs.participants {
return false
}
if lhs.invitedPeers != rhs.invitedPeers {
return false
}
if lhs.speakingParticipants != rhs.speakingParticipants {
return false
}
@ -1183,50 +1189,37 @@ final class VideoChatParticipantsComponent: Component {
let clippedVisibleListItemRange = itemLayout.visibleListItemRange(for: clippedScrollViewBounds)
if visibleListItemRange.maxIndex >= visibleListItemRange.minIndex {
for i in visibleListItemRange.minIndex ... visibleListItemRange.maxIndex {
let participant = self.listParticipants[i]
validListItemIds.append(participant.peer.id)
if i >= clippedVisibleListItemRange.minIndex && i <= clippedVisibleListItemRange.maxIndex {
visibleParticipants.append(participant.peer.id)
}
var itemTransition = transition
let itemView: ListItem
if let current = self.listItemViews[participant.peer.id] {
itemView = current
} else {
itemTransition = itemTransition.withAnimation(.none)
itemView = ListItem()
self.listItemViews[participant.peer.id] = itemView
}
let itemFrame = itemLayout.listItemFrame(at: i)
let subtitle: PeerListItemComponent.Subtitle
if participant.peer.id == component.call.accountContext.account.peerId {
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_You, color: .accent)
} else if component.speakingParticipants.contains(participant.peer.id) {
if let volume = participant.volume, volume / 100 != 100 {
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_StatusSpeakingVolume("\(volume / 100)%").string, color: .constructive)
let participantPeerId: EnginePeer.Id
let peerItemComponent: PeerListItemComponent
if i < self.listParticipants.count {
let participant = self.listParticipants[i]
participantPeerId = participant.peer.id
let subtitle: PeerListItemComponent.Subtitle
if participant.peer.id == component.call.accountContext.account.peerId {
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_You, color: .accent)
} else if component.speakingParticipants.contains(participant.peer.id) {
if let volume = participant.volume, volume / 100 != 100 {
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_StatusSpeakingVolume("\(volume / 100)%").string, color: .constructive)
} else {
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_StatusSpeaking, color: .constructive)
}
} else if let about = participant.about, !about.isEmpty {
subtitle = PeerListItemComponent.Subtitle(text: about, color: .neutral)
} else {
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_StatusSpeaking, color: .constructive)
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_StatusListening, color: .neutral)
}
} else if let about = participant.about, !about.isEmpty {
subtitle = PeerListItemComponent.Subtitle(text: about, color: .neutral)
} else {
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_StatusListening, color: .neutral)
}
let rightAccessoryComponent: AnyComponent<Empty> = AnyComponent(VideoChatParticipantStatusComponent(
muteState: participant.muteState,
hasRaiseHand: participant.hasRaiseHand,
isSpeaking: component.speakingParticipants.contains(participant.peer.id),
theme: component.theme
))
let _ = itemView.view.update(
transition: itemTransition,
component: AnyComponent(PeerListItemComponent(
let rightAccessoryComponent: AnyComponent<Empty> = AnyComponent(VideoChatParticipantStatusComponent(
muteState: participant.muteState,
hasRaiseHand: participant.hasRaiseHand,
isSpeaking: component.speakingParticipants.contains(participant.peer.id),
theme: component.theme
))
peerItemComponent = PeerListItemComponent(
context: component.call.accountContext,
theme: component.theme,
strings: component.strings,
@ -1263,7 +1256,63 @@ final class VideoChatParticipantsComponent: Component {
}
component.openParticipantContextMenu(peer.id, sourceView, gesture)
}
)),
)
} else {
let invitedPeer = component.invitedPeers[i - self.listParticipants.count]
participantPeerId = invitedPeer.id
let subtitle: PeerListItemComponent.Subtitle
subtitle = PeerListItemComponent.Subtitle(text: component.strings.VoiceChat_StatusInvited, color: .neutral)
peerItemComponent = PeerListItemComponent(
context: component.call.accountContext,
theme: component.theme,
strings: component.strings,
style: .generic,
sideInset: 0.0,
title: invitedPeer.displayTitle(strings: component.strings, displayOrder: .firstLast),
avatarComponent: AnyComponent(VideoChatParticipantAvatarComponent(
call: component.call,
peer: invitedPeer,
myPeerId: component.participants?.myPeerId ?? component.call.accountContext.account.peerId,
isSpeaking: false,
theme: component.theme
)),
peer: invitedPeer,
subtitle: subtitle,
subtitleAccessory: .none,
presence: nil,
rightAccessoryComponent: nil,
selectionState: .none,
hasNext: false,
extractedTheme: PeerListItemComponent.ExtractedTheme(
inset: 2.0,
background: UIColor(white: 0.1, alpha: 1.0)
),
action: nil,
contextAction: nil
)
}
validListItemIds.append(participantPeerId)
if i >= clippedVisibleListItemRange.minIndex && i <= clippedVisibleListItemRange.maxIndex {
visibleParticipants.append(participantPeerId)
}
var itemTransition = transition
let itemView: ListItem
if let current = self.listItemViews[participantPeerId] {
itemView = current
} else {
itemTransition = itemTransition.withAnimation(.none)
itemView = ListItem()
self.listItemViews[participantPeerId] = itemView
}
let _ = itemView.view.update(
transition: itemTransition,
component: AnyComponent(peerItemComponent),
environment: {},
containerSize: itemFrame.size
)
@ -1363,12 +1412,6 @@ final class VideoChatParticipantsComponent: Component {
isPresentation: participant.isPresentation
))
}
/*for participant in self.listParticipants {
thumbnailParticipants.append(VideoChatExpandedParticipantThumbnailsComponent.Participant(
participant: participant,
isPresentation: false
))
}*/
let expandedControlsAlpha: CGFloat = (expandedVideoState.isUIHidden || self.isPinchToZoomActive) ? 0.0 : 1.0
let expandedThumbnailsAlpha: CGFloat = expandedControlsAlpha
@ -1770,7 +1813,7 @@ final class VideoChatParticipantsComponent: Component {
expandedInsets: component.expandedInsets,
safeInsets: component.safeInsets,
gridItemCount: gridParticipants.count,
listItemCount: listParticipants.count,
listItemCount: listParticipants.count + component.invitedPeers.count,
listItemHeight: measureListItemSize.height,
listTrailingItemHeight: inviteListItemSize.height
)

View File

@ -241,6 +241,9 @@ final class VideoChatScreenComponent: Component {
var members: PresentationGroupCallMembers?
var membersDisposable: Disposable?
var invitedPeers: [EnginePeer] = []
var invitedPeersDisposable: Disposable?
var speakingParticipantPeers: [EnginePeer] = []
var visibleParticipants: Set<EnginePeer.Id> = Set()
@ -285,6 +288,7 @@ final class VideoChatScreenComponent: Component {
deinit {
self.stateDisposable?.dispose()
self.membersDisposable?.dispose()
self.invitedPeersDisposable?.dispose()
self.applicationStateDisposable?.dispose()
self.reconnectedAsEventsDisposable?.dispose()
self.memberEventsDisposable?.dispose()
@ -305,12 +309,17 @@ final class VideoChatScreenComponent: Component {
}
func animateIn(sourceCallController: CallController) {
let sourceCallControllerView = sourceCallController.view
var isAnimationFinished = false
let animateOutData = sourceCallController.animateOutToGroupChat(completion: { [weak sourceCallControllerView] in
var sourceCallControllerAnimatedOut: (() -> Void)?
let animateOutData = sourceCallController.animateOutToGroupChat(completion: {
isAnimationFinished = true
sourceCallControllerView?.removeFromSuperview()
sourceCallControllerAnimatedOut?()
})
let sourceCallControllerView = animateOutData?.containerView
sourceCallControllerView?.isUserInteractionEnabled = false
sourceCallControllerAnimatedOut = { [weak sourceCallControllerView] in
sourceCallControllerView?.removeFromSuperview()
}
var expandedPeer: (id: EnginePeer.Id, isPresentation: Bool)?
if let animateOutData, animateOutData.incomingVideoLayer != nil {
@ -327,11 +336,11 @@ final class VideoChatScreenComponent: Component {
self.state?.updated(transition: .immediate)
if !isAnimationFinished {
if !isAnimationFinished, let sourceCallControllerView {
if let participantsView = self.participants.view {
self.containerView.insertSubview(sourceCallController.view, belowSubview: participantsView)
self.containerView.insertSubview(sourceCallControllerView, belowSubview: participantsView)
} else {
self.containerView.addSubview(sourceCallController.view)
self.containerView.addSubview(sourceCallControllerView)
}
}
@ -380,7 +389,15 @@ final class VideoChatScreenComponent: Component {
self.state?.updated(transition: .spring(duration: 0.5))
}
@objc public func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRequireFailureOf otherGestureRecognizer: UIGestureRecognizer) -> Bool {
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
guard let result = super.hitTest(point, with: event) else {
return nil
}
return result
}
@objc func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRequireFailureOf otherGestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer is UITapGestureRecognizer {
if otherGestureRecognizer is UIPanGestureRecognizer {
return true
@ -409,7 +426,6 @@ final class VideoChatScreenComponent: Component {
}
}
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
switch recognizer.state {
case .began, .changed:
@ -952,6 +968,103 @@ final class VideoChatScreenComponent: Component {
}
}
static func groupCallStateForConferenceSource(conferenceSource: PresentationCall) -> Signal<(state: PresentationGroupCallState, invitedPeers: [EnginePeer]), NoError> {
let invitedPeers = conferenceSource.context.engine.data.subscribe(
EngineDataList((conferenceSource as! PresentationCallImpl).pendingInviteToConferencePeerIds.map { TelegramEngine.EngineData.Item.Peer.Peer(id: $0) })
)
let accountPeerId = conferenceSource.context.account.peerId
let conferenceSourcePeerId = conferenceSource.peerId
return combineLatest(queue: .mainQueue(),
conferenceSource.state,
conferenceSource.isMuted,
invitedPeers
)
|> mapToSignal { state, isMuted, invitedPeers -> Signal<(state: PresentationGroupCallState, invitedPeers: [EnginePeer]), NoError> in
let mappedNetworkState: PresentationGroupCallState.NetworkState
switch state.state {
case .active:
mappedNetworkState = .connected
default:
mappedNetworkState = .connecting
}
let callState = PresentationGroupCallState(
myPeerId: accountPeerId,
networkState: mappedNetworkState,
canManageCall: false,
adminIds: Set([accountPeerId, conferenceSourcePeerId]),
muteState: isMuted ? GroupCallParticipantsContext.Participant.MuteState(canUnmute: true, mutedByYou: true) : nil,
defaultParticipantMuteState: nil,
recordingStartTimestamp: nil,
title: nil,
raisedHand: false,
scheduleTimestamp: nil,
subscribedToScheduled: false,
isVideoEnabled: true,
isVideoWatchersLimitReached: false
)
return .single((callState, invitedPeers.compactMap({ $0 })))
}
}
static func groupCallMembersForConferenceSource(conferenceSource: PresentationCall) -> Signal<PresentationGroupCallMembers, NoError> {
return combineLatest(queue: .mainQueue(),
conferenceSource.context.engine.data.subscribe(
TelegramEngine.EngineData.Item.Peer.Peer(id: conferenceSource.context.account.peerId),
TelegramEngine.EngineData.Item.Peer.Peer(id: conferenceSource.peerId)
),
conferenceSource.state
)
|> map { peers, state in
var participants: [GroupCallParticipantsContext.Participant] = []
let (myPeer, remotePeer) = peers
if let myPeer {
participants.append(GroupCallParticipantsContext.Participant(
peer: myPeer._asPeer(),
ssrc: nil,
videoDescription: nil,
presentationDescription: nil,
joinTimestamp: 0,
raiseHandRating: nil,
hasRaiseHand: false,
activityTimestamp: nil,
activityRank: nil,
muteState: nil,
volume: nil,
about: nil,
joinedVideo: false
))
}
if let remotePeer {
participants.append(GroupCallParticipantsContext.Participant(
peer: remotePeer._asPeer(),
ssrc: nil,
videoDescription: nil,
presentationDescription: nil,
joinTimestamp: 0,
raiseHandRating: nil,
hasRaiseHand: false,
activityTimestamp: nil,
activityRank: nil,
muteState: nil,
volume: nil,
about: nil,
joinedVideo: false
))
}
let members = PresentationGroupCallMembers(
participants: participants,
speakingParticipants: Set(),
totalCount: 2,
loadMoreToken: nil
)
return members
}
}
func update(component: VideoChatScreenComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<ViewControllerComponentContainer.Environment>, transition: ComponentTransition) -> CGSize {
self.isUpdating = true
defer {
@ -971,6 +1084,10 @@ final class VideoChatScreenComponent: Component {
if self.component == nil {
self.peer = component.initialData.peer
self.members = component.initialData.members
self.invitedPeers = component.initialData.invitedPeers
if let members = self.members {
self.invitedPeers.removeAll(where: { invitedPeer in members.participants.contains(where: { $0.peer.id == invitedPeer.id }) })
}
self.callState = component.initialData.callState
}
@ -1004,6 +1121,9 @@ final class VideoChatScreenComponent: Component {
}
self.members = members
if let members {
self.invitedPeers.removeAll(where: { invitedPeer in members.participants.contains(where: { $0.peer.id == invitedPeer.id }) })
}
if let members, let expandedParticipantsVideoState = self.expandedParticipantsVideoState, !expandedParticipantsVideoState.isUIHidden {
var videoCount = 0
@ -1103,6 +1223,35 @@ final class VideoChatScreenComponent: Component {
}
})
self.invitedPeersDisposable?.dispose()
let accountContext = groupCall.accountContext
self.invitedPeersDisposable = (groupCall.invitedPeers
|> mapToSignal { invitedPeers in
return accountContext.engine.data.get(
EngineDataList(invitedPeers.map({ TelegramEngine.EngineData.Item.Peer.Peer(id: $0) }))
)
|> map { peers in
return peers.compactMap { $0 }
}
}
|> deliverOnMainQueue).startStrict(next: { [weak self] invitedPeers in
guard let self else {
return
}
var invitedPeers = invitedPeers
if let members {
invitedPeers.removeAll(where: { invitedPeer in members.participants.contains(where: { $0.peer.id == invitedPeer.id }) })
}
if self.invitedPeers != invitedPeers {
self.invitedPeers = invitedPeers
if !self.isUpdating {
self.state?.updated(transition: .spring(duration: 0.4))
}
}
})
self.stateDisposable?.dispose()
self.stateDisposable = (groupCall.state
|> deliverOnMainQueue).startStrict(next: { [weak self] callState in
@ -1243,76 +1392,25 @@ final class VideoChatScreenComponent: Component {
}
case let .conferenceSource(conferenceSource):
self.membersDisposable?.dispose()
self.membersDisposable = (combineLatest(queue: .mainQueue(),
conferenceSource.context.engine.data.subscribe(
TelegramEngine.EngineData.Item.Peer.Peer(id: conferenceSource.context.account.peerId),
TelegramEngine.EngineData.Item.Peer.Peer(id: conferenceSource.peerId)
),
conferenceSource.state
)
|> deliverOnMainQueue).startStrict(next: { [weak self] peers, state in
self.membersDisposable = (View.groupCallMembersForConferenceSource(conferenceSource: conferenceSource)
|> deliverOnMainQueue).startStrict(next: { [weak self] members in
guard let self else {
return
}
var participants: [GroupCallParticipantsContext.Participant] = []
let (myPeer, remotePeer) = peers
if let myPeer {
participants.append(GroupCallParticipantsContext.Participant(
peer: myPeer._asPeer(),
ssrc: nil,
videoDescription: nil,
presentationDescription: nil,
joinTimestamp: 0,
raiseHandRating: nil,
hasRaiseHand: false,
activityTimestamp: nil,
activityRank: nil,
muteState: nil,
volume: nil,
about: nil,
joinedVideo: false
))
}
if let remotePeer {
participants.append(GroupCallParticipantsContext.Participant(
peer: remotePeer._asPeer(),
ssrc: nil,
videoDescription: nil,
presentationDescription: nil,
joinTimestamp: 0,
raiseHandRating: nil,
hasRaiseHand: false,
activityTimestamp: nil,
activityRank: nil,
muteState: nil,
volume: nil,
about: nil,
joinedVideo: false
))
}
let members: PresentationGroupCallMembers? = PresentationGroupCallMembers(
participants: participants,
speakingParticipants: Set(),
totalCount: 2,
loadMoreToken: nil
)
if self.members != members {
var members = members
if let membersValue = members {
let participants = membersValue.participants
members = PresentationGroupCallMembers(
participants: participants,
speakingParticipants: membersValue.speakingParticipants,
totalCount: membersValue.totalCount,
loadMoreToken: membersValue.loadMoreToken
)
}
let membersValue = members
let participants = membersValue.participants
members = PresentationGroupCallMembers(
participants: participants,
speakingParticipants: membersValue.speakingParticipants,
totalCount: membersValue.totalCount,
loadMoreToken: membersValue.loadMoreToken
)
self.members = members
if let members, let expandedParticipantsVideoState = self.expandedParticipantsVideoState, !expandedParticipantsVideoState.isUIHidden {
if let expandedParticipantsVideoState = self.expandedParticipantsVideoState, !expandedParticipantsVideoState.isUIHidden {
var videoCount = 0
for participant in members.participants {
if participant.presentationDescription != nil {
@ -1330,7 +1428,7 @@ final class VideoChatScreenComponent: Component {
}
}
if let expandedParticipantsVideoState = self.expandedParticipantsVideoState, let members {
if let expandedParticipantsVideoState = self.expandedParticipantsVideoState {
if CFAbsoluteTimeGetCurrent() > self.focusedSpeakerAutoSwitchDeadline, !expandedParticipantsVideoState.isMainParticipantPinned, let participant = members.participants.first(where: { participant in
if let callState = self.callState, participant.peer.id == callState.myPeerId {
return false
@ -1396,7 +1494,7 @@ final class VideoChatScreenComponent: Component {
}
var speakingParticipantPeers: [EnginePeer] = []
if let members, !members.speakingParticipants.isEmpty {
if !members.speakingParticipants.isEmpty {
for participant in members.participants {
if members.speakingParticipants.contains(participant.peer.id) {
speakingParticipantPeers.append(EnginePeer(participant.peer))
@ -1410,43 +1508,27 @@ final class VideoChatScreenComponent: Component {
}
})
self.invitedPeersDisposable?.dispose()
self.invitedPeersDisposable = nil
self.stateDisposable?.dispose()
self.stateDisposable = (combineLatest(queue: .mainQueue(),
conferenceSource.state,
conferenceSource.isMuted
)
|> deliverOnMainQueue).startStrict(next: { [weak self] state, isMuted in
guard let self, case let .conferenceSource(conferenceSource) = self.currentCall else {
self.stateDisposable = (View.groupCallStateForConferenceSource(conferenceSource: conferenceSource)
|> deliverOnMainQueue).startStrict(next: { [weak self] callState, invitedPeers in
guard let self else {
return
}
let mappedNetworkState: PresentationGroupCallState.NetworkState
switch state.state {
case .active:
mappedNetworkState = .connected
default:
mappedNetworkState = .connecting
}
let callState = PresentationGroupCallState(
myPeerId: conferenceSource.context.account.peerId,
networkState: mappedNetworkState,
canManageCall: false,
adminIds: Set([conferenceSource.context.account.peerId, conferenceSource.peerId]),
muteState: isMuted ? GroupCallParticipantsContext.Participant.MuteState(canUnmute: true, mutedByYou: true) : nil,
defaultParticipantMuteState: nil,
recordingStartTimestamp: nil,
title: nil,
raisedHand: false,
scheduleTimestamp: nil,
subscribedToScheduled: false,
isVideoEnabled: true,
isVideoWatchersLimitReached: false
)
var isUpdated = false
if self.callState != callState {
self.callState = callState
isUpdated = true
}
if self.invitedPeers != invitedPeers {
self.invitedPeers = invitedPeers
isUpdated = true
}
if isUpdated {
if !self.isUpdating {
self.state?.updated(transition: .spring(duration: 0.4))
}
@ -1987,6 +2069,7 @@ final class VideoChatScreenComponent: Component {
component: AnyComponent(VideoChatParticipantsComponent(
call: call,
participants: mappedParticipants,
invitedPeers: self.invitedPeers,
speakingParticipants: self.members?.speakingParticipants ?? Set(),
expandedVideoState: self.expandedParticipantsVideoState,
maxVideoQuality: self.maxVideoQuality,
@ -2368,15 +2451,18 @@ final class VideoChatScreenV2Impl: ViewControllerComponentContainer, VoiceChatCo
let peer: EnginePeer?
let members: PresentationGroupCallMembers?
let callState: PresentationGroupCallState
let invitedPeers: [EnginePeer]
init(
peer: EnginePeer?,
members: PresentationGroupCallMembers?,
callState: PresentationGroupCallState
callState: PresentationGroupCallState,
invitedPeers: [EnginePeer]
) {
self.peer = peer
self.members = members
self.callState = callState
self.invitedPeers = invitedPeers
}
}
@ -2424,6 +2510,8 @@ final class VideoChatScreenV2Impl: ViewControllerComponentContainer, VoiceChatCo
presentationMode: .default,
theme: .custom(theme)
)
self.flatReceivesModalTransition = true
}
required init(coder aDecoder: NSCoder) {
@ -2521,39 +2609,40 @@ final class VideoChatScreenV2Impl: ViewControllerComponentContainer, VoiceChatCo
} else {
callPeer = .single(nil)
}
let accountContext = groupCall.accountContext
let invitedPeers = groupCall.invitedPeers |> take(1) |> mapToSignal { invitedPeers in
return accountContext.engine.data.get(
EngineDataList(invitedPeers.map({ TelegramEngine.EngineData.Item.Peer.Peer(id: $0) }))
)
}
return combineLatest(
callPeer,
groupCall.members |> take(1),
groupCall.state |> take(1)
groupCall.state |> take(1),
invitedPeers
)
|> map { peer, members, callState -> InitialData in
|> map { peer, members, callState, invitedPeers -> InitialData in
return InitialData(
peer: peer,
members: members,
callState: callState
callState: callState,
invitedPeers: invitedPeers.compactMap { $0 }
)
}
case let .conferenceSource(conferenceSource):
//TODO:release move initialization from component
return .single(InitialData(
peer: nil,
members: nil,
callState: PresentationGroupCallState(
myPeerId: conferenceSource.context.account.peerId,
networkState: .connected,
canManageCall: false,
adminIds: Set(),
muteState: nil,
defaultParticipantMuteState: nil,
recordingStartTimestamp: nil,
title: nil,
raisedHand: false,
scheduleTimestamp: nil,
subscribedToScheduled: false,
isVideoEnabled: true,
isVideoWatchersLimitReached: false
return combineLatest(
VideoChatScreenComponent.View.groupCallStateForConferenceSource(conferenceSource: conferenceSource) |> take(1),
VideoChatScreenComponent.View.groupCallMembersForConferenceSource(conferenceSource: conferenceSource) |> take(1)
)
|> map { stateAndInvitedPeers, members in
let (state, invitedPeers) = stateAndInvitedPeers
return InitialData(
peer: nil,
members: members,
callState: state,
invitedPeers: invitedPeers
)
))
}
}
}
}

View File

@ -14,6 +14,131 @@ import LegacyMediaPickerUI
import AvatarNode
import PresentationDataUtils
import AccountContext
import CallsEmoji
import AlertComponent
import TelegramPresentationData
import ComponentFlow
import MultilineTextComponent
private func resolvedEmojiKey(data: Data) -> [String] {
let resolvedKey = stringForEmojiHashOfData(data, 4) ?? []
return resolvedKey
}
private final class EmojiKeyAlertComponet: CombinedComponent {
let theme: PresentationTheme
let emojiKey: [String]
let title: String
let text: String
init(theme: PresentationTheme, emojiKey: [String], title: String, text: String) {
self.theme = theme
self.emojiKey = emojiKey
self.title = title
self.text = text
}
static func ==(lhs: EmojiKeyAlertComponet, rhs: EmojiKeyAlertComponet) -> Bool {
if lhs.theme !== rhs.theme {
return false
}
if lhs.emojiKey != rhs.emojiKey {
return false
}
if lhs.title != rhs.title {
return false
}
if lhs.text != rhs.text {
return false
}
return true
}
public static var body: Body {
//let emojiKeyItems = ChildMap(environment: MultilineTextComponent.self, keyedBy: Int.self)
let emojiKey = Child(MultilineTextComponent.self)
let title = Child(MultilineTextComponent.self)
let text = Child(MultilineTextComponent.self)
return { context in
/*let emojiKeyItems = context.component.emojiKey.map { item in
return emojiKeyItems[item].update(
component: AnyComponent(MultilineTextComponent(
text: .plain(NSAttributedString(string: context.component.emojiKey.joined(separator: ""), font: Font.semibold(40.0), textColor: context.component.theme.actionSheet.primaryTextColor)),
horizontalAlignment: .center
)),
environment: {},
availableSize: CGSize(width: 100.0, height: 100.0),
transition: .immediate
)
}*/
let emojiKey = emojiKey.update(
component: MultilineTextComponent(
text: .plain(NSAttributedString(string: context.component.emojiKey.joined(separator: ""), font: Font.semibold(40.0), textColor: context.component.theme.actionSheet.primaryTextColor)),
horizontalAlignment: .center
),
availableSize: CGSize(width: context.availableSize.width, height: 10000.0),
transition: .immediate
)
let title = title.update(
component: MultilineTextComponent(
text: .plain(NSAttributedString(string: context.component.title, font: Font.semibold(16.0), textColor: context.component.theme.actionSheet.primaryTextColor)),
horizontalAlignment: .center,
maximumNumberOfLines: 0,
lineSpacing: 0.2
),
availableSize: CGSize(width: context.availableSize.width, height: 10000.0),
transition: .immediate
)
let text = text.update(
component: MultilineTextComponent(
text: .plain(NSAttributedString(string: context.component.text, font: Font.regular(13.0), textColor: context.component.theme.actionSheet.primaryTextColor)),
horizontalAlignment: .center,
maximumNumberOfLines: 0,
lineSpacing: 0.2
),
availableSize: CGSize(width: context.availableSize.width, height: 10000.0),
transition: .immediate
)
var size = CGSize(width: 0.0, height: 0.0)
size.width = max(size.width, emojiKey.size.width)
size.width = max(size.width, title.size.width)
size.width = max(size.width, text.size.width)
let titleSpacing: CGFloat = 10.0
let textSpacing: CGFloat = 10.0
size.height += emojiKey.size.height
size.height += titleSpacing
size.height += title.size.height
size.height += textSpacing
size.height += text.size.height
var contentHeight: CGFloat = 0.0
let emojiKeyFrame = CGRect(origin: CGPoint(x: floor((size.width - emojiKey.size.width) * 0.5), y: contentHeight), size: emojiKey.size)
contentHeight += emojiKey.size.height + titleSpacing
let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - title.size.width) * 0.5), y: contentHeight), size: title.size)
contentHeight += title.size.height + textSpacing
let textFrame = CGRect(origin: CGPoint(x: floor((size.width - text.size.width) * 0.5), y: contentHeight), size: text.size)
contentHeight += text.size.height + 5.0
context.add(emojiKey
.position(emojiKeyFrame.center)
)
context.add(title
.position(titleFrame.center)
)
context.add(text
.position(textFrame.center)
)
return size
}
}
}
extension VideoChatScreenComponent.View {
func openMoreMenu() {
@ -50,6 +175,35 @@ extension VideoChatScreenComponent.View {
}
}
if case let .group(groupCall) = currentCall, let encryptionKey = groupCall.encryptionKeyValue {
//TODO:localize
let emojiKey = resolvedEmojiKey(data: encryptionKey)
items.append(.action(ContextMenuActionItem(text: "Encryption Key", textLayout: .secondLineWithValue(emojiKey.joined(separator: "")), icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Lock"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] c, _ in
c?.dismiss(completion: nil)
guard let self, let environment = self.environment else {
return
}
let alertController = componentAlertController(
theme: AlertControllerTheme(presentationTheme: defaultDarkPresentationTheme, fontSize: .regular),
content: AnyComponent(EmojiKeyAlertComponet(
theme: defaultDarkPresentationTheme,
emojiKey: emojiKey,
title: "This call is end-to-end encrypted",
text: "If the emojis on everyone's screens are the same, this call is 100% secure."
)),
actions: [ComponentAlertAction(type: .defaultAction, title: environment.strings.Common_OK, action: {})],
actionLayout: .horizontal
)
environment.controller()?.present(alertController, in: .window(.root))
})))
items.append(.separator)
}
if let (availableOutputs, currentOutput) = self.audioOutputState, availableOutputs.count > 1 {
var currentOutputTitle = ""
for output in availableOutputs {

View File

@ -751,15 +751,18 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
}
self.backgroundLayer.update(stateIndex: backgroundStateIndex, isEnergySavingEnabled: params.state.isEnergySavingEnabled, transition: transition)
genericAlphaTransition.setAlpha(layer: self.backgroundLayer, alpha: self.isAnimatedOutToGroupCall ? 0.0 : 1.0, completion: { [weak self] _ in
guard let self else {
return
}
if let animateOutToGroupCallCompletion = self.animateOutToGroupCallCompletion {
self.animateOutToGroupCallCompletion = nil
animateOutToGroupCallCompletion()
}
})
let backgroundAlpha = self.isAnimatedOutToGroupCall ? 0.0 : 1.0
if CGFloat(self.backgroundLayer.opacity) != backgroundAlpha {
genericAlphaTransition.setAlpha(layer: self.backgroundLayer, alpha: backgroundAlpha, completion: { [weak self] _ in
guard let self else {
return
}
if let animateOutToGroupCallCompletion = self.animateOutToGroupCallCompletion {
self.animateOutToGroupCallCompletion = nil
animateOutToGroupCallCompletion()
}
})
}
transition.setFrame(view: self.buttonGroupView, frame: CGRect(origin: CGPoint(), size: params.size))
@ -914,7 +917,6 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
transition.setFrame(view: self.backButtonView, frame: backButtonFrame)
genericAlphaTransition.setAlpha(view: self.backButtonView, alpha: (currentAreControlsHidden || self.isAnimatedOutToGroupCall) ? 0.0 : 1.0)
var isConferencePossible = false
if case .active = params.state.lifecycleState, params.state.isConferencePossible {
isConferencePossible = true
@ -952,7 +954,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
let conferenceButtonFrame = CGRect(origin: CGPoint(x: params.size.width - params.insets.right - 10.0 - conferenceButtonSize.width, y: conferenceButtonY), size: conferenceButtonSize)
conferenceButtonTransition.setFrame(view: conferenceButtonView, frame: conferenceButtonFrame)
genericAlphaTransition.setAlpha(view: conferenceButtonView, alpha: 1.0)
genericAlphaTransition.setAlpha(view: conferenceButtonView, alpha: (currentAreControlsHidden || self.isAnimatedOutToGroupCall) ? 0.0 : 1.0)
} else {
if let conferenceButtonView = self.conferenceButtonView {
self.conferenceButtonView = nil
@ -1291,8 +1293,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
}
self.avatarLayer.update(size: collapsedAvatarFrame.size, isExpanded: havePrimaryVideo, cornerRadius: avatarCornerRadius, transition: transition)
transition.setAlpha(layer: self.avatarLayer, alpha: (expandedEmojiKeyOverlapsAvatar && !havePrimaryVideo) ? 0.0 : 1.0)
transition.setScale(layer: self.avatarLayer, scale: expandedEmojiKeyOverlapsAvatar ? 0.001 : 1.0)
transition.setAlpha(layer: self.avatarLayer, alpha: (self.isAnimatedOutToGroupCall || (expandedEmojiKeyOverlapsAvatar && !havePrimaryVideo)) ? 0.0 : 1.0)
transition.setScale(layer: self.avatarLayer, scale: (self.isAnimatedOutToGroupCall || expandedEmojiKeyOverlapsAvatar) ? 0.001 : 1.0)
transition.setPosition(view: self.videoContainerBackgroundView, position: avatarFrame.center)
transition.setBounds(view: self.videoContainerBackgroundView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
@ -1347,8 +1349,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
transition.setScale(layer: self.avatarTransformLayer, scale: 1.0)
transition.setScale(layer: self.blobTransformLayer, scale: 1.0)
} else {
genericAlphaTransition.setAlpha(layer: self.blobLayer, alpha: (expandedEmojiKeyOverlapsAvatar && !havePrimaryVideo) ? 0.0 : 1.0)
transition.setScale(layer: self.blobLayer, scale: expandedEmojiKeyOverlapsAvatar ? 0.001 : 1.0)
genericAlphaTransition.setAlpha(layer: self.blobLayer, alpha: (self.isAnimatedOutToGroupCall || (expandedEmojiKeyOverlapsAvatar && !havePrimaryVideo)) ? 0.0 : 1.0)
transition.setScale(layer: self.blobLayer, scale: (self.isAnimatedOutToGroupCall || expandedEmojiKeyOverlapsAvatar) ? 0.001 : 1.0)
if !havePrimaryVideo {
self.canAnimateAudioLevel = true
}

View File

@ -245,7 +245,7 @@ public final class PeerListItemComponent: Component {
let hasNext: Bool
let extractedTheme: ExtractedTheme?
let insets: UIEdgeInsets?
let action: (EnginePeer, EngineMessage.Id?, PeerListItemComponent.View) -> Void
let action: ((EnginePeer, EngineMessage.Id?, PeerListItemComponent.View) -> Void)?
let inlineActions: InlineActionsState?
let contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?
let openStories: ((EnginePeer, AvatarNode) -> Void)?
@ -276,7 +276,7 @@ public final class PeerListItemComponent: Component {
hasNext: Bool,
extractedTheme: ExtractedTheme? = nil,
insets: UIEdgeInsets? = nil,
action: @escaping (EnginePeer, EngineMessage.Id?, PeerListItemComponent.View) -> Void,
action: ((EnginePeer, EngineMessage.Id?, PeerListItemComponent.View) -> Void)?,
inlineActions: InlineActionsState? = nil,
contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)? = nil,
openStories: ((EnginePeer, AvatarNode) -> Void)? = nil
@ -391,6 +391,12 @@ public final class PeerListItemComponent: Component {
if lhs.inlineActions != rhs.inlineActions {
return false
}
if (lhs.action == nil) != (rhs.action == nil) {
return false
}
if (lhs.contextAction == nil) != (rhs.contextAction == nil) {
return false
}
return true
}
@ -568,7 +574,7 @@ public final class PeerListItemComponent: Component {
guard let component = self.component, let peer = component.peer else {
return
}
component.action(peer, component.message?.id, self)
component.action?(peer, component.message?.id, self)
}
@objc private func avatarButtonPressed() {
@ -631,7 +637,7 @@ public final class PeerListItemComponent: Component {
if let hint = transition.userData(TransitionHint.self) {
synchronousLoad = hint.synchronousLoad
}
self.isGestureEnabled = component.contextAction != nil
let themeUpdated = self.component?.theme !== component.theme
@ -673,6 +679,7 @@ public final class PeerListItemComponent: Component {
self.state = state
self.containerButton.alpha = component.isEnabled ? 1.0 : 0.3
self.containerButton.isEnabled = component.action != nil
self.avatarButtonView.isUserInteractionEnabled = component.storyStats != nil && component.openStories != nil

View File

@ -250,7 +250,7 @@ class ContactMultiselectionControllerImpl: ViewController, ContactMultiselection
case let .chats(chatsNode):
count = chatsNode.currentState.selectedPeerIds.count
}
self.titleView.title = CounterControllerTitle(title: self.presentationData.strings.Compose_NewGroupTitle, counter: "\(count)/\(maxCount)")
self.titleView.title = CounterControllerTitle(title: self.params.title ?? self.presentationData.strings.Compose_NewGroupTitle, counter: "\(count)/\(maxCount)")
if self.rightNavigationButton == nil {
let rightNavigationButton = UIBarButtonItem(title: self.presentationData.strings.Common_Next, style: .done, target: self, action: #selector(self.rightNavigationButtonPressed))
self.rightNavigationButton = rightNavigationButton
@ -262,23 +262,23 @@ class ContactMultiselectionControllerImpl: ViewController, ContactMultiselection
if case let .contacts(contactsNode) = self.contactsNode.contentNode {
count = contactsNode.selectionState?.selectedPeerIndices.count ?? 0
}
self.titleView.title = CounterControllerTitle(title: hasActions ? self.presentationData.strings.Premium_Gift_ContactSelection_Title : self.presentationData.strings.Stars_Purchase_GiftStars, counter: "\(count)/\(maxCount)")
self.titleView.title = CounterControllerTitle(title: self.params.title ?? (hasActions ? self.presentationData.strings.Premium_Gift_ContactSelection_Title : self.presentationData.strings.Stars_Purchase_GiftStars), counter: "\(count)/\(maxCount)")
case .requestedUsersSelection:
let maxCount: Int32 = self.limit ?? 10
var count = 0
if case let .contacts(contactsNode) = self.contactsNode.contentNode {
count = contactsNode.selectionState?.selectedPeerIndices.count ?? 0
}
self.titleView.title = CounterControllerTitle(title: self.presentationData.strings.RequestPeer_SelectUsers, counter: "\(count)/\(maxCount)")
self.titleView.title = CounterControllerTitle(title: self.params.title ?? self.presentationData.strings.RequestPeer_SelectUsers, counter: "\(count)/\(maxCount)")
case .channelCreation:
self.titleView.title = CounterControllerTitle(title: self.presentationData.strings.GroupInfo_AddParticipantTitle, counter: "")
self.titleView.title = CounterControllerTitle(title: self.params.title ?? self.presentationData.strings.GroupInfo_AddParticipantTitle, counter: "")
if self.rightNavigationButton == nil {
let rightNavigationButton = UIBarButtonItem(title: self.presentationData.strings.Common_Next, style: .done, target: self, action: #selector(self.rightNavigationButtonPressed))
self.rightNavigationButton = rightNavigationButton
self.navigationItem.rightBarButtonItem = self.rightNavigationButton
}
case .peerSelection:
self.titleView.title = CounterControllerTitle(title: self.presentationData.strings.PrivacyLastSeenSettings_EmpryUsersPlaceholder, counter: "")
self.titleView.title = CounterControllerTitle(title: self.params.title ?? self.presentationData.strings.PrivacyLastSeenSettings_EmpryUsersPlaceholder, counter: "")
if self.rightNavigationButton == nil {
let rightNavigationButton = UIBarButtonItem(title: self.presentationData.strings.Common_Done, style: .done, target: self, action: #selector(self.rightNavigationButtonPressed))
self.rightNavigationButton = rightNavigationButton
@ -286,7 +286,7 @@ class ContactMultiselectionControllerImpl: ViewController, ContactMultiselection
self.navigationItem.rightBarButtonItem = self.rightNavigationButton
}
case let .chatSelection(chatSelection):
self.titleView.title = CounterControllerTitle(title: chatSelection.title, counter: "")
self.titleView.title = CounterControllerTitle(title: self.params.title ?? chatSelection.title, counter: "")
if self.rightNavigationButton == nil {
let rightNavigationButton = UIBarButtonItem(title: self.presentationData.strings.Common_Done, style: .done, target: self, action: #selector(self.rightNavigationButtonPressed))
self.rightNavigationButton = rightNavigationButton

View File

@ -241,7 +241,7 @@ final class ContactMultiselectionControllerNode: ASDisplayNode {
return .natural(options: options, includeChatList: includeChatList, topPeers: displayTopPeers)
}
let contactListNode = ContactListNode(context: context, updatedPresentationData: updatedPresentationData, presentation: presentation, filters: filters, onlyWriteable: onlyWriteable, isGroupInvitation: isGroupInvitation, selectionState: ContactListNodeGroupSelectionState())
let contactListNode = ContactListNode(context: context, updatedPresentationData: updatedPresentationData, presentation: presentation, filters: filters, onlyWriteable: onlyWriteable, isGroupInvitation: isGroupInvitation, isPeerEnabled: isPeerEnabled, selectionState: ContactListNodeGroupSelectionState())
self.contentNode = .contacts(contactListNode)
if !selectedPeers.isEmpty {

View File

@ -843,7 +843,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
guard let callController = self.callController, callController.call === call else {
return
}
if call.conferenceCall != nil {
if call.conferenceStateValue != nil {
self.callState.set(.single(nil))
self.presentControllerWithCurrentCall()
}