Merge commit 'e750750ac16ae985e0526252ddf5329220994c9d'

This commit is contained in:
Ali 2020-08-07 22:15:16 +04:00
commit 94a542a1a8
74 changed files with 4941 additions and 3949 deletions

View File

@ -4,7 +4,7 @@
"NSLocationWhenInUseUsageDescription" = "When you send your location to your friends, Telegram needs access to show them a map.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "When you choose to share your Live Location with friends in a chat, Telegram needs background access to your location to keep them updated for the duration of the live sharing.";
"NSLocationAlwaysUsageDescription" = "When you choose to share your live location with friends in a chat, Telegram needs background access to your location to keep them updated for the duration of the live sharing. You also need this to send locations from an Apple Watch.";
"NSCameraUsageDescription" = "We need this so that you can take and share photos and videos.";
"NSCameraUsageDescription" = "We need this so that you can take and share photos and videos, as well as make video calls.";
"NSPhotoLibraryUsageDescription" = "We need this so that you can share photos and videos from your photo library.";
"NSPhotoLibraryAddUsageDescription" = "We need this so that you can save photos and videos to your photo library.";
"NSMicrophoneUsageDescription" = "We need this so that you can record and share voice messages and videos with sound.";

View File

@ -3028,7 +3028,7 @@ Unused sets are archived when you add more.";
"InfoPlist.NSContactsUsageDescription" = "Telegram will continuously upload your contacts to its heavily encrypted cloud servers to let you connect with your friends across all your devices.";
"InfoPlist.NSLocationWhenInUseUsageDescription" = "When you send your location to your friends, Telegram needs access to show them a map.";
"InfoPlist.NSCameraUsageDescription" = "We need this so that you can take and share photos and videos.";
"InfoPlist.NSCameraUsageDescription" = "We need this so that you can take and share photos and videos, as well as make video calls.";
"InfoPlist.NSPhotoLibraryUsageDescription" = "We need this so that you can share photos and videos from your photo library.";
"InfoPlist.NSPhotoLibraryAddUsageDescription" = "We need this so that you can save photos and videos to your photo library.";
"InfoPlist.NSMicrophoneUsageDescription" = "We need this so that you can record and share voice messages and videos with sound.";
@ -5721,3 +5721,15 @@ Any member of this group will be able to see messages in the channel.";
"Stats.MessageOverview" = "Overview";
"Stats.MessageInteractionsTitle" = "Interactions";
"Stats.MessagePublicForwardsTitle" = "Public Shares";
"Call.CameraTooltip" = "Tap here to turn on your camera";
"Call.CameraConfirmationText" = "Switch to video call?";
"Call.CameraConfirmationConfirm" = "Switch";
"Call.YourMicrophoneOff" = "Your microphone is off";
"Call.MicrophoneOff" = "%@'s microphone is off";
"Call.CameraOff" = "%@'s camera is off";
"Call.BatteryLow" = "%@'s battery level is low";
"Call.Audio" = "audio";
"Call.AudioRouteMute" = "Mute Yourself";

View File

@ -394,6 +394,12 @@ public enum ContactListPeerId: Hashable {
case deviceContact(DeviceContactStableId)
}
public enum ContactListAction: Equatable {
case generic
case voiceCall
case videoCall
}
public enum ContactListPeer: Equatable {
case peer(peer: Peer, isGlobal: Bool, participantCount: Int32?)
case deviceContact(DeviceContactStableId, DeviceContactBasicData)
@ -440,14 +446,16 @@ public final class ContactSelectionControllerParams {
public let title: (PresentationStrings) -> String
public let options: [ContactListAdditionalOption]
public let displayDeviceContacts: Bool
public let displayCallIcons: Bool
public let confirmation: (ContactListPeer) -> Signal<Bool, NoError>
public init(context: AccountContext, autoDismiss: Bool = true, title: @escaping (PresentationStrings) -> String, options: [ContactListAdditionalOption] = [], displayDeviceContacts: Bool = false, confirmation: @escaping (ContactListPeer) -> Signal<Bool, NoError> = { _ in .single(true) }) {
public init(context: AccountContext, autoDismiss: Bool = true, title: @escaping (PresentationStrings) -> String, options: [ContactListAdditionalOption] = [], displayDeviceContacts: Bool = false, displayCallIcons: Bool = false, confirmation: @escaping (ContactListPeer) -> Signal<Bool, NoError> = { _ in .single(true) }) {
self.context = context
self.autoDismiss = autoDismiss
self.title = title
self.options = options
self.displayDeviceContacts = displayDeviceContacts
self.displayCallIcons = displayCallIcons
self.confirmation = confirmation
}
}

View File

@ -3,7 +3,7 @@ import Display
import SwiftSignalKit
public protocol ContactSelectionController: ViewController {
var result: Signal<ContactListPeer?, NoError> { get }
var result: Signal<(ContactListPeer, ContactListAction)?, NoError> { get }
var displayProgress: Bool { get set }
var dismissed: (() -> Void)? { get set }

View File

@ -62,16 +62,23 @@ public struct PresentationCallState: Equatable {
case muted
}
public enum RemoteBatteryLevel: Equatable {
case normal
case low
}
public var state: State
public var videoState: VideoState
public var remoteVideoState: RemoteVideoState
public var remoteAudioState: RemoteAudioState
public var remoteBatteryLevel: RemoteBatteryLevel
public init(state: State, videoState: VideoState, remoteVideoState: RemoteVideoState, remoteAudioState: RemoteAudioState) {
public init(state: State, videoState: VideoState, remoteVideoState: RemoteVideoState, remoteAudioState: RemoteAudioState, remoteBatteryLevel: RemoteBatteryLevel) {
self.state = state
self.videoState = videoState
self.remoteVideoState = remoteVideoState
self.remoteAudioState = remoteAudioState
self.remoteBatteryLevel = remoteBatteryLevel
}
}

View File

@ -368,6 +368,7 @@ class CallListCallItemNode: ItemListRevealOptionsItemNode {
var hasMissed = false
var hasIncoming = false
var hasOutgoing = false
var isVideo = false
var hadDuration = false
var callDuration: Int32?
@ -375,7 +376,8 @@ class CallListCallItemNode: ItemListRevealOptionsItemNode {
for message in item.messages {
inner: for media in message.media {
if let action = media as? TelegramMediaAction {
if case let .phoneCall(_, discardReason, duration, _) = action.action {
if case let .phoneCall(_, discardReason, duration, video) = action.action {
isVideo = video
if message.flags.contains(.Incoming) {
hasIncoming = true
@ -459,9 +461,12 @@ class CallListCallItemNode: ItemListRevealOptionsItemNode {
let nodeLayout = ListViewItemNodeLayout(contentSize: CGSize(width: params.width, height: titleLayout.size.height + titleSpacing + statusLayout.size.height + verticalInset * 2.0), insets: UIEdgeInsets(top: firstWithHeader ? 29.0 : 0.0, left: 0.0, bottom: 0.0, right: 0.0))
let outgoingIcon = PresentationResourcesCallList.outgoingIcon(item.presentationData.theme)
let outgoingVoiceIcon = PresentationResourcesCallList.outgoingIcon(item.presentationData.theme)
let outgoingVideoIcon = PresentationResourcesCallList.outgoingVideoIcon(item.presentationData.theme)
let infoIcon = PresentationResourcesCallList.infoButton(item.presentationData.theme)
let outgoingIcon = isVideo ? outgoingVideoIcon : outgoingVoiceIcon
let contentSize = nodeLayout.contentSize
return (nodeLayout, { [weak self] synchronousLoads in
@ -582,7 +587,7 @@ class CallListCallItemNode: ItemListRevealOptionsItemNode {
if strongSelf.typeIconNode.image !== outgoingIcon {
strongSelf.typeIconNode.image = outgoingIcon
}
transition.updateFrameAdditive(node: strongSelf.typeIconNode, frame: CGRect(origin: CGPoint(x: revealOffset + leftInset - 81.0, y: floor((nodeLayout.contentSize.height - outgoingIcon.size.height) / 2.0)), size: outgoingIcon.size))
transition.updateFrameAdditive(node: strongSelf.typeIconNode, frame: CGRect(origin: CGPoint(x: revealOffset + leftInset - 79.0, y: floor((nodeLayout.contentSize.height - outgoingIcon.size.height) / 2.0)), size: outgoingIcon.size))
}
strongSelf.typeIconNode.isHidden = !hasOutgoing

View File

@ -11,7 +11,6 @@ import ItemListUI
import PresentationDataUtils
import AccountContext
import AlertUI
import PresentationDataUtils
import AppBundle
import LocalizedPeerData
@ -201,18 +200,18 @@ public final class CallListController: ViewController {
}
@objc func callPressed() {
self.beginCallImpl(isVideo: false)
self.beginCallImpl()
}
private func beginCallImpl(isVideo: Bool) {
let controller = self.context.sharedContext.makeContactSelectionController(ContactSelectionControllerParams(context: self.context, title: { $0.Calls_NewCall }))
private func beginCallImpl() {
let controller = self.context.sharedContext.makeContactSelectionController(ContactSelectionControllerParams(context: self.context, title: { $0.Calls_NewCall }, displayCallIcons: true))
controller.navigationPresentation = .modal
self.createActionDisposable.set((controller.result
|> take(1)
|> deliverOnMainQueue).start(next: { [weak controller, weak self] peer in
controller?.dismissSearch()
if let strongSelf = self, let contactPeer = peer, case let .peer(peer, _, _) = contactPeer {
strongSelf.call(peer.id, isVideo: isVideo, began: {
if let strongSelf = self, let (contactPeer, action) = peer, case let .peer(peer, _, _) = contactPeer {
strongSelf.call(peer.id, isVideo: action == .videoCall, began: {
if let strongSelf = self {
let _ = (strongSelf.context.sharedContext.hasOngoingCall.get()
|> filter { $0 }

View File

@ -101,12 +101,12 @@ private final class ContactListNodeInteraction {
fileprivate let openSortMenu: () -> Void
fileprivate let authorize: () -> Void
fileprivate let suppressWarning: () -> Void
fileprivate let openPeer: (ContactListPeer) -> Void
fileprivate let openPeer: (ContactListPeer, ContactListAction) -> Void
fileprivate let contextAction: ((Peer, ASDisplayNode, ContextGesture?) -> Void)?
let itemHighlighting = ContactItemHighlighting()
init(activateSearch: @escaping () -> Void, openSortMenu: @escaping () -> Void, authorize: @escaping () -> Void, suppressWarning: @escaping () -> Void, openPeer: @escaping (ContactListPeer) -> Void, contextAction: ((Peer, ASDisplayNode, ContextGesture?) -> Void)?) {
init(activateSearch: @escaping () -> Void, openSortMenu: @escaping () -> Void, authorize: @escaping () -> Void, suppressWarning: @escaping () -> Void, openPeer: @escaping (ContactListPeer, ContactListAction) -> Void, contextAction: ((Peer, ASDisplayNode, ContextGesture?) -> Void)?) {
self.activateSearch = activateSearch
self.openSortMenu = openSortMenu
self.authorize = authorize
@ -128,7 +128,7 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
case permissionInfo(PresentationTheme, String, String, Bool)
case permissionEnable(PresentationTheme, String)
case option(Int, ContactListAdditionalOption, ListViewItemHeader?, PresentationTheme, PresentationStrings)
case peer(Int, ContactListPeer, PeerPresence?, ListViewItemHeader?, ContactsPeerItemSelection, PresentationTheme, PresentationStrings, PresentationDateTimeFormat, PresentationPersonNameOrder, PresentationPersonNameOrder, Bool)
case peer(Int, ContactListPeer, PeerPresence?, ListViewItemHeader?, ContactsPeerItemSelection, PresentationTheme, PresentationStrings, PresentationDateTimeFormat, PresentationPersonNameOrder, PresentationPersonNameOrder, Bool, Bool)
var stableId: ContactListNodeEntryId {
switch self {
@ -142,7 +142,7 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
return .permission(action: true)
case let .option(index, _, _, _, _):
return .option(index: index)
case let .peer(_, peer, _, _, _, _, _, _, _, _, _):
case let .peer(_, peer, _, _, _, _, _, _, _, _, _, _):
switch peer {
case let .peer(peer, _, _):
return .peerId(peer.id.toInt64())
@ -176,7 +176,7 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
})
case let .option(_, option, header, theme, _):
return ContactListActionItem(presentationData: ItemListPresentationData(presentationData), title: option.title, icon: option.icon, clearHighlightAutomatically: false, header: header, action: option.action)
case let .peer(_, peer, presence, header, selection, theme, strings, dateTimeFormat, nameSortOrder, nameDisplayOrder, enabled):
case let .peer(_, peer, presence, header, selection, theme, strings, dateTimeFormat, nameSortOrder, nameDisplayOrder, displayCallIcons, enabled):
var status: ContactsPeerItemStatus
let itemPeer: ContactsPeerItemPeer
var isContextActionEnabled = false
@ -230,8 +230,18 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
}
}
}
return ContactsPeerItem(presentationData: ItemListPresentationData(presentationData), sortOrder: nameSortOrder, displayOrder: nameDisplayOrder, context: context, peerMode: isSearch ? .generalSearch : .peer, peer: itemPeer, status: status, enabled: enabled, selection: selection, editing: ContactsPeerItemEditing(editable: false, editing: false, revealed: false), index: nil, header: header, action: { _ in
interaction.openPeer(peer)
var additionalActions: [ContactsPeerItemAction] = []
if displayCallIcons {
additionalActions = [ContactsPeerItemAction(icon: .voiceCall, action: { _ in
interaction.openPeer(peer, .voiceCall)
}), ContactsPeerItemAction(icon: .videoCall, action: { _ in
interaction.openPeer(peer, .videoCall)
})]
}
return ContactsPeerItem(presentationData: ItemListPresentationData(presentationData), sortOrder: nameSortOrder, displayOrder: nameDisplayOrder, context: context, peerMode: isSearch ? .generalSearch : .peer, peer: itemPeer, status: status, enabled: enabled, selection: selection, editing: ContactsPeerItemEditing(editable: false, editing: false, revealed: false), additionalActions: additionalActions, index: nil, header: header, action: { _ in
interaction.openPeer(peer, .generic)
}, itemHighlighting: interaction.itemHighlighting, contextAction: itemContextAction)
}
}
@ -268,9 +278,9 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
} else {
return false
}
case let .peer(lhsIndex, lhsPeer, lhsPresence, lhsHeader, lhsSelection, lhsTheme, lhsStrings, lhsTimeFormat, lhsSortOrder, lhsDisplayOrder, lhsEnabled):
case let .peer(lhsIndex, lhsPeer, lhsPresence, lhsHeader, lhsSelection, lhsTheme, lhsStrings, lhsTimeFormat, lhsSortOrder, lhsDisplayOrder, lhsDisplayCallIcons, lhsEnabled):
switch rhs {
case let .peer(rhsIndex, rhsPeer, rhsPresence, rhsHeader, rhsSelection, rhsTheme, rhsStrings, rhsTimeFormat, rhsSortOrder, rhsDisplayOrder, rhsEnabled):
case let .peer(rhsIndex, rhsPeer, rhsPresence, rhsHeader, rhsSelection, rhsTheme, rhsStrings, rhsTimeFormat, rhsSortOrder, rhsDisplayOrder, rhsDisplayCallIcons, rhsEnabled):
if lhsIndex != rhsIndex {
return false
}
@ -305,6 +315,9 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
if lhsDisplayOrder != rhsDisplayOrder {
return false
}
if lhsDisplayCallIcons != rhsDisplayCallIcons {
return false
}
if lhsEnabled != rhsEnabled {
return false
}
@ -349,11 +362,11 @@ private enum ContactListNodeEntry: Comparable, Identifiable {
case .peer:
return true
}
case let .peer(lhsIndex, _, _, _, _, _, _, _, _, _, _):
case let .peer(lhsIndex, _, _, _, _, _, _, _, _, _, _, _):
switch rhs {
case .search, .sort, .permissionInfo, .permissionEnable, .option:
return false
case let .peer(rhsIndex, _, _, _, _, _, _, _, _, _, _):
case let .peer(rhsIndex, _, _, _, _, _, _, _, _, _, _, _):
return lhsIndex < rhsIndex
}
}
@ -426,7 +439,7 @@ private extension PeerIndexNameRepresentation {
}
}
private func contactListNodeEntries(accountPeer: Peer?, peers: [ContactListPeer], presences: [PeerId: PeerPresence], presentation: ContactListPresentation, selectionState: ContactListNodeGroupSelectionState?, theme: PresentationTheme, strings: PresentationStrings, dateTimeFormat: PresentationDateTimeFormat, sortOrder: PresentationPersonNameOrder, displayOrder: PresentationPersonNameOrder, disabledPeerIds:Set<PeerId>, authorizationStatus: AccessType, warningSuppressed: (Bool, Bool), displaySortOptions: Bool) -> [ContactListNodeEntry] {
private func contactListNodeEntries(accountPeer: Peer?, peers: [ContactListPeer], presences: [PeerId: PeerPresence], presentation: ContactListPresentation, selectionState: ContactListNodeGroupSelectionState?, theme: PresentationTheme, strings: PresentationStrings, dateTimeFormat: PresentationDateTimeFormat, sortOrder: PresentationPersonNameOrder, displayOrder: PresentationPersonNameOrder, disabledPeerIds:Set<PeerId>, authorizationStatus: AccessType, warningSuppressed: (Bool, Bool), displaySortOptions: Bool, displayCallIcons: Bool) -> [ContactListNodeEntry] {
var entries: [ContactListNodeEntry] = []
var commonHeader: ListViewItemHeader?
@ -606,7 +619,7 @@ private func contactListNodeEntries(accountPeer: Peer?, peers: [ContactListPeer]
default:
enabled = true
}
entries.append(.peer(i, orderedPeers[i], presence, header, selection, theme, strings, dateTimeFormat, sortOrder, displayOrder, enabled))
entries.append(.peer(i, orderedPeers[i], presence, header, selection, theme, strings, dateTimeFormat, sortOrder, displayOrder, displayCallIcons, enabled))
}
return entries
}
@ -629,7 +642,7 @@ private func preparedContactListNodeTransition(context: AccountContext, presenta
case .search:
//indexSections.apend(CollectionIndexNode.searchIndex)
break
case let .peer(_, _, _, header, _, _, _, _, _, _, _):
case let .peer(_, _, _, header, _, _, _, _, _, _, _, _):
if let header = header as? ContactListNameIndexHeader {
if !existingSections.contains(header.letter) {
existingSections.insert(header.letter)
@ -771,7 +784,7 @@ public final class ContactListNode: ASDisplayNode {
public var activateSearch: (() -> Void)?
public var openSortMenu: (() -> Void)?
public var openPeer: ((ContactListPeer) -> Void)?
public var openPeer: ((ContactListPeer, ContactListAction) -> Void)?
public var openPrivacyPolicy: (() -> Void)?
public var suppressPermissionWarning: (() -> Void)?
private let contextAction: ((Peer, ASDisplayNode, ContextGesture?) -> Void)?
@ -786,7 +799,7 @@ public final class ContactListNode: ASDisplayNode {
private var authorizationNode: PermissionContentNode
private let displayPermissionPlaceholder: Bool
public init(context: AccountContext, presentation: Signal<ContactListPresentation, NoError>, filters: [ContactListFilter] = [.excludeSelf], selectionState: ContactListNodeGroupSelectionState? = nil, displayPermissionPlaceholder: Bool = true, displaySortOptions: Bool = false, contextAction: ((Peer, ASDisplayNode, ContextGesture?) -> Void)? = nil, isSearch: Bool = false) {
public init(context: AccountContext, presentation: Signal<ContactListPresentation, NoError>, filters: [ContactListFilter] = [.excludeSelf], selectionState: ContactListNodeGroupSelectionState? = nil, displayPermissionPlaceholder: Bool = true, displaySortOptions: Bool = false, displayCallIcons: Bool = false, contextAction: ((Peer, ASDisplayNode, ContextGesture?) -> Void)? = nil, isSearch: Bool = false) {
self.context = context
self.filters = filters
self.displayPermissionPlaceholder = displayPermissionPlaceholder
@ -856,8 +869,8 @@ public final class ContactListNode: ASDisplayNode {
authorizeImpl?()
}, suppressWarning: { [weak self] in
self?.suppressPermissionWarning?()
}, openPeer: { [weak self] peer in
self?.openPeer?(peer)
}, openPeer: { [weak self] peer, action in
self?.openPeer?(peer, action)
}, contextAction: contextAction)
self.indexNode.indexSelected = { [weak self] section in
@ -885,7 +898,7 @@ public final class ContactListNode: ASDisplayNode {
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.PreferSynchronousDrawing, .PreferSynchronousResourceLoading], scrollToItem: ListViewScrollToItem(index: index, position: .top(-navigationBarSearchContentHeight), animated: false, curve: .Default(duration: nil), directionHint: .Down), additionalScrollDistance: 0.0, updateSizeAndInsets: updateSizeAndInsets, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })
break loop
}
case let .peer(_, _, _, header, _, _, _, _, _, _, _):
case let .peer(_, _, _, header, _, _, _, _, _, _, _, _):
if let header = header as? ContactListNameIndexHeader {
if let scalar = UnicodeScalar(header.letter) {
let title = "\(Character(scalar))"
@ -1113,7 +1126,7 @@ public final class ContactListNode: ASDisplayNode {
peers.append(.deviceContact(stableId, contact.0))
}
let entries = contactListNodeEntries(accountPeer: nil, peers: peers, presences: localPeersAndStatuses.1, presentation: presentation, selectionState: selectionState, theme: presentationData.theme, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, sortOrder: presentationData.nameSortOrder, displayOrder: presentationData.nameDisplayOrder, disabledPeerIds: disabledPeerIds, authorizationStatus: .allowed, warningSuppressed: (true, true), displaySortOptions: false)
let entries = contactListNodeEntries(accountPeer: nil, peers: peers, presences: localPeersAndStatuses.1, presentation: presentation, selectionState: selectionState, theme: presentationData.theme, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, sortOrder: presentationData.nameSortOrder, displayOrder: presentationData.nameDisplayOrder, disabledPeerIds: disabledPeerIds, authorizationStatus: .allowed, warningSuppressed: (true, true), displaySortOptions: false, displayCallIcons: displayCallIcons)
let previous = previousEntries.swap(entries)
return .single(preparedContactListNodeTransition(context: context, presentationData: presentationData, from: previous ?? [], to: entries, interaction: interaction, firstTime: previous == nil, isEmpty: false, generateIndexSections: generateSections, animation: .none, isSearch: isSearch))
}
@ -1191,7 +1204,7 @@ public final class ContactListNode: ASDisplayNode {
if (authorizationStatus == .notDetermined || authorizationStatus == .denied) && peers.isEmpty {
isEmpty = true
}
let entries = contactListNodeEntries(accountPeer: view.accountPeer, peers: peers, presences: view.peerPresences, presentation: presentation, selectionState: selectionState, theme: presentationData.theme, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, sortOrder: presentationData.nameSortOrder, displayOrder: presentationData.nameDisplayOrder, disabledPeerIds: disabledPeerIds, authorizationStatus: authorizationStatus, warningSuppressed: warningSuppressed, displaySortOptions: displaySortOptions)
let entries = contactListNodeEntries(accountPeer: view.accountPeer, peers: peers, presences: view.peerPresences, presentation: presentation, selectionState: selectionState, theme: presentationData.theme, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, sortOrder: presentationData.nameSortOrder, displayOrder: presentationData.nameDisplayOrder, disabledPeerIds: disabledPeerIds, authorizationStatus: authorizationStatus, warningSuppressed: warningSuppressed, displaySortOptions: displaySortOptions, displayCallIcons: displayCallIcons)
let previous = previousEntries.swap(entries)
var hadPermissionInfo = false

View File

@ -275,7 +275,7 @@ public class ContactsController: ViewController {
self?.activateSearch()
}
self.contactsNode.contactListNode.openPeer = { peer in
self.contactsNode.contactListNode.openPeer = { peer, _ in
openPeer(peer, false)
}

View File

@ -75,6 +75,18 @@ public struct ContactsPeerItemBadge {
public enum ContactsPeerItemActionIcon {
case none
case add
case voiceCall
case videoCall
}
public struct ContactsPeerItemAction {
public let icon: ContactsPeerItemActionIcon
public let action: ((ContactsPeerItemPeer) -> Void)?
public init(icon: ContactsPeerItemActionIcon, action: @escaping (ContactsPeerItemPeer) -> Void) {
self.icon = icon
self.action = action
}
}
public enum ContactsPeerItemPeer: Equatable {
@ -120,6 +132,7 @@ public class ContactsPeerItem: ItemListItem, ListViewItemWithHeader {
let selection: ContactsPeerItemSelection
let editing: ContactsPeerItemEditing
let options: [ItemListPeerItemRevealOption]
let additionalActions: [ContactsPeerItemAction]
let actionIcon: ContactsPeerItemActionIcon
let action: (ContactsPeerItemPeer) -> Void
let disabledAction: ((ContactsPeerItemPeer) -> Void)?
@ -134,7 +147,7 @@ public class ContactsPeerItem: ItemListItem, ListViewItemWithHeader {
public let header: ListViewItemHeader?
public init(presentationData: ItemListPresentationData, style: ItemListStyle = .plain, sectionId: ItemListSectionId = 0, sortOrder: PresentationPersonNameOrder, displayOrder: PresentationPersonNameOrder, context: AccountContext, peerMode: ContactsPeerItemPeerMode, peer: ContactsPeerItemPeer, status: ContactsPeerItemStatus, badge: ContactsPeerItemBadge? = nil, enabled: Bool, selection: ContactsPeerItemSelection, editing: ContactsPeerItemEditing, options: [ItemListPeerItemRevealOption] = [], actionIcon: ContactsPeerItemActionIcon = .none, index: PeerNameIndex?, header: ListViewItemHeader?, action: @escaping (ContactsPeerItemPeer) -> Void, disabledAction: ((ContactsPeerItemPeer) -> Void)? = nil, setPeerIdWithRevealedOptions: ((PeerId?, PeerId?) -> Void)? = nil, deletePeer: ((PeerId) -> Void)? = nil, itemHighlighting: ContactItemHighlighting? = nil, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil) {
public init(presentationData: ItemListPresentationData, style: ItemListStyle = .plain, sectionId: ItemListSectionId = 0, sortOrder: PresentationPersonNameOrder, displayOrder: PresentationPersonNameOrder, context: AccountContext, peerMode: ContactsPeerItemPeerMode, peer: ContactsPeerItemPeer, status: ContactsPeerItemStatus, badge: ContactsPeerItemBadge? = nil, enabled: Bool, selection: ContactsPeerItemSelection, editing: ContactsPeerItemEditing, options: [ItemListPeerItemRevealOption] = [], additionalActions: [ContactsPeerItemAction] = [], actionIcon: ContactsPeerItemActionIcon = .none, index: PeerNameIndex?, header: ListViewItemHeader?, action: @escaping (ContactsPeerItemPeer) -> Void, disabledAction: ((ContactsPeerItemPeer) -> Void)? = nil, setPeerIdWithRevealedOptions: ((PeerId?, PeerId?) -> Void)? = nil, deletePeer: ((PeerId) -> Void)? = nil, itemHighlighting: ContactItemHighlighting? = nil, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil) {
self.presentationData = presentationData
self.style = style
self.sectionId = sectionId
@ -149,6 +162,7 @@ public class ContactsPeerItem: ItemListItem, ListViewItemWithHeader {
self.selection = selection
self.editing = editing
self.options = options
self.additionalActions = additionalActions
self.actionIcon = actionIcon
self.action = action
self.disabledAction = disabledAction
@ -303,7 +317,7 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
private var badgeBackgroundNode: ASImageNode?
private var badgeTextNode: TextNode?
private var selectionNode: CheckNode?
private var actionIconNode: ASImageNode?
private var actionButtonNodes: [HighlightableButtonNode]?
private var isHighlighted: Bool = false
@ -325,7 +339,7 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
public var item: ContactsPeerItem? {
return self.layoutParams?.0
}
required public init() {
self.backgroundNode = ASDisplayNode()
self.backgroundNode.isLayerBacked = true
@ -489,12 +503,32 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
break
}
let actionIconImage: UIImage?
switch item.actionIcon {
case .none:
actionIconImage = nil
case .add:
actionIconImage = PresentationResourcesItemList.plusIconImage(item.presentationData.theme)
var actionButtons: [ActionButton]?
struct ActionButton {
let image: UIImage?
let action: ((ContactsPeerItemPeer) -> Void)?
init(theme: PresentationTheme, icon: ContactsPeerItemActionIcon, action: ((ContactsPeerItemPeer) -> Void)?) {
let image: UIImage?
switch icon {
case .none:
image = nil
case .add:
image = PresentationResourcesItemList.plusIconImage(theme)
case .voiceCall:
image = PresentationResourcesItemList.voiceCallIcon(theme)
case .videoCall:
image = PresentationResourcesItemList.videoCallIcon(theme)
}
self.image = image
self.action = action
}
}
if item.actionIcon != .none {
actionButtons = [ActionButton(theme: item.presentationData.theme, icon: item.actionIcon, action: nil)]
} else if !item.additionalActions.isEmpty {
actionButtons = item.additionalActions.map { ActionButton(theme: item.presentationData.theme, icon: $0.icon, action: $0.action) }
}
var titleAttributedString: NSAttributedString?
@ -620,8 +654,13 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
if let verificationIconImage = verificationIconImage {
additionalTitleInset += 3.0 + verificationIconImage.size.width
}
if let actionIconImage = actionIconImage {
additionalTitleInset += 3.0 + actionIconImage.size.width
if let actionButtons = actionButtons {
additionalTitleInset += 3.0
for actionButton in actionButtons {
if let image = actionButton.image {
additionalTitleInset += image.size.width + 12.0
}
}
}
additionalTitleInset += badgeSize
@ -784,23 +823,37 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
verificationIconNode.removeFromSupernode()
}
if let actionIconImage = actionIconImage {
if strongSelf.actionIconNode == nil {
let actionIconNode = ASImageNode()
actionIconNode.isLayerBacked = true
actionIconNode.displayWithoutProcessing = true
actionIconNode.displaysAsynchronously = false
strongSelf.actionIconNode = actionIconNode
strongSelf.containerNode.addSubnode(actionIconNode)
if let actionButtons = actionButtons {
if strongSelf.actionButtonNodes == nil {
var actionButtonNodes: [HighlightableButtonNode] = []
for action in actionButtons {
let actionButtonNode = HighlightableButtonNode()
actionButtonNode.isUserInteractionEnabled = action.action != nil
actionButtonNode.addTarget(strongSelf, action: #selector(strongSelf.actionButtonPressed(_:)), forControlEvents: .touchUpInside)
strongSelf.containerNode.addSubnode(actionButtonNode)
actionButtonNodes.append(actionButtonNode)
}
strongSelf.actionButtonNodes = actionButtonNodes
}
if let actionIconNode = strongSelf.actionIconNode {
actionIconNode.image = actionIconImage
transition.updateFrame(node: actionIconNode, frame: CGRect(origin: CGPoint(x: revealOffset + params.width - params.rightInset - 12.0 - actionIconImage.size.width, y: floor((nodeLayout.contentSize.height - actionIconImage.size.height) / 2.0)), size: actionIconImage.size))
if let actionButtonNodes = strongSelf.actionButtonNodes {
var offset: CGFloat = 0.0
if actionButtons.count > 1 {
offset += 12.0
}
for (actionButtonNode, actionButton) in zip(actionButtonNodes, actionButtons).reversed() {
guard let actionButtonImage = actionButton.image else {
continue
}
actionButtonNode.setImage(actionButton.image, for: .normal)
transition.updateFrame(node: actionButtonNode, frame: CGRect(origin: CGPoint(x: revealOffset + params.width - params.rightInset - 12.0 - actionButtonImage.size.width - offset, y: floor((nodeLayout.contentSize.height - actionButtonImage.size.height) / 2.0)), size: actionButtonImage.size))
offset += actionButtonImage.size.width + 12.0
}
}
} else if let actionIconNode = strongSelf.actionIconNode {
strongSelf.actionIconNode = nil
actionIconNode.removeFromSupernode()
} else if let actionButtonNodes = strongSelf.actionButtonNodes {
strongSelf.actionButtonNodes = nil
actionButtonNodes.forEach { $0.removeFromSupernode() }
}
let badgeBackgroundWidth: CGFloat
@ -893,6 +946,13 @@ public class ContactsPeerItemNode: ItemListRevealOptionsItemNode {
}
}
@objc private func actionButtonPressed(_ sender: HighlightableButtonNode) {
guard let actionButtonNodes = self.actionButtonNodes, let index = actionButtonNodes.firstIndex(of: sender), let item = self.item, index < item.additionalActions.count else {
return
}
item.additionalActions[index].action?(item.peer)
}
override public func updateRevealOffset(offset: CGFloat, transition: ContainedViewLayoutTransition) {
super.updateRevealOffset(offset: offset, transition: transition)

View File

@ -383,6 +383,29 @@ public func generateGradientTintedImage(image: UIImage?, colors: [UIColor]) -> U
return tintedImage
}
public func generateGradientImage(size: CGSize, colors: [UIColor], locations: [CGFloat]) -> UIImage? {
guard colors.count == locations.count else {
return nil
}
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
if let context = UIGraphicsGetCurrentContext() {
let gradientColors = colors.map { $0.cgColor } as CFArray
let colorSpace = CGColorSpaceCreateDeviceRGB()
var locations = locations
let gradient = CGGradient(colorsSpace: colorSpace, colors: gradientColors, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(x: 0.0, y: 0.0), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
context.restoreGState()
}
let image = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return image
}
public func generateScaledImage(image: UIImage?, size: CGSize, opaque: Bool = true, scale: CGFloat? = nil) -> UIImage? {
guard let image = image else {
return nil

View File

@ -1341,7 +1341,7 @@ private func addContactToExisting(context: AccountContext, parentController: Vie
(parentController.navigationController as? NavigationController)?.pushViewController(contactsController)
let _ = (contactsController.result
|> deliverOnMainQueue).start(next: { peer in
if let peer = peer {
if let (peer, _) = peer {
let dataSignal: Signal<(Peer?, DeviceContactStableId?), NoError>
switch peer {
case let .peer(contact, _, _):

View File

@ -1896,7 +1896,7 @@ public func groupInfoController(context: AccountContext, peerId originalPeerId:
if let contactsController = contactsController as? ContactSelectionController {
selectAddMemberDisposable.set((contactsController.result
|> deliverOnMainQueue).start(next: { [weak contactsController] memberPeer in
guard let memberPeer = memberPeer else {
guard let (memberPeer, _) = memberPeer else {
return
}

View File

@ -68,14 +68,30 @@ private func stringForCallType(message: Message, strings: PresentationStrings) -
switch media {
case let action as TelegramMediaAction:
switch action.action {
case let .phoneCall(_, discardReason, _, _):
case let .phoneCall(_, discardReason, _, isVideo):
let incoming = message.flags.contains(.Incoming)
if let discardReason = discardReason {
switch discardReason {
case .busy, .disconnect:
string = strings.Notification_CallCanceled
if isVideo {
string = strings.Notification_VideoCallCanceled
} else {
string = strings.Notification_CallCanceled
}
case .missed:
string = incoming ? strings.Notification_CallMissed : strings.Notification_CallCanceled
if incoming {
if isVideo {
string = strings.Notification_VideoCallMissed
} else {
string = strings.Notification_CallMissed
}
} else {
if isVideo {
string = strings.Notification_VideoCallCanceled
} else {
string = strings.Notification_CallCanceled
}
}
case .hangup:
break
}
@ -83,9 +99,17 @@ private func stringForCallType(message: Message, strings: PresentationStrings) -
if string.isEmpty {
if incoming {
string = strings.Notification_CallIncoming
if isVideo {
string = strings.Notification_VideoCallIncoming
} else {
string = strings.Notification_CallIncoming
}
} else {
string = strings.Notification_CallOutgoing
if isVideo {
string = strings.Notification_VideoCallOutgoing
} else {
string = strings.Notification_CallOutgoing
}
}
}
default:

View File

@ -8,6 +8,10 @@ public func textAlertController(context: AccountContext, title: String?, text: S
return textAlertController(alertContext: AlertControllerContext(theme: AlertControllerTheme(presentationData: context.sharedContext.currentPresentationData.with { $0 }), themeSignal: context.sharedContext.presentationData |> map { presentationData in AlertControllerTheme(presentationData: presentationData) }), title: title, text: text, actions: actions, actionLayout: actionLayout, allowInputInset: allowInputInset, dismissOnOutsideTap: dismissOnOutsideTap)
}
public func textAlertController(sharedContext: SharedAccountContext, title: String?, text: String, actions: [TextAlertAction], actionLayout: TextAlertContentActionLayout = .horizontal, allowInputInset: Bool = true, dismissOnOutsideTap: Bool = true) -> AlertController {
return textAlertController(alertContext: AlertControllerContext(theme: AlertControllerTheme(presentationData: sharedContext.currentPresentationData.with { $0 }), themeSignal: sharedContext.presentationData |> map { presentationData in AlertControllerTheme(presentationData: presentationData) }), title: title, text: text, actions: actions, actionLayout: actionLayout, allowInputInset: allowInputInset, dismissOnOutsideTap: dismissOnOutsideTap)
}
public func richTextAlertController(context: AccountContext, title: NSAttributedString?, text: NSAttributedString, actions: [TextAlertAction], actionLayout: TextAlertContentActionLayout = .horizontal, allowInputInset: Bool = true, dismissAutomatically: Bool = true) -> AlertController {
return richTextAlertController(alertContext: AlertControllerContext(theme: AlertControllerTheme(presentationData: context.sharedContext.currentPresentationData.with { $0 }), themeSignal: context.sharedContext.presentationData |> map { presentationData in AlertControllerTheme(presentationData: presentationData) }), title: title, text: text, actions: actions, actionLayout: actionLayout, allowInputInset: allowInputInset, dismissAutomatically: dismissAutomatically)
}

View File

@ -8,6 +8,7 @@ public enum ManagedAudioSessionType: Equatable {
case playWithPossiblePortOverride
case record(speaker: Bool)
case voiceCall
case videoCall
var isPlay: Bool {
switch self {
@ -23,7 +24,7 @@ private func nativeCategoryForType(_ type: ManagedAudioSessionType, headphones:
switch type {
case .play:
return .playback
case .record, .voiceCall:
case .record, .voiceCall, .videoCall:
return .playAndRecord
case .playWithPossiblePortOverride:
if headphones {
@ -244,6 +245,7 @@ public final class ManagedAudioSession {
if let availableInputs = audioSession.availableInputs {
var hasHeadphones = false
var hasBluetoothHeadphones = false
var headphonesAreActive = false
loop: for currentOutput in audioSession.currentRoute.outputs {
@ -251,6 +253,7 @@ public final class ManagedAudioSession {
case .headphones, .bluetoothA2DP, .bluetoothHFP:
headphonesAreActive = true
hasHeadphones = true
hasBluetoothHeadphones = [.bluetoothA2DP, .bluetoothHFP].contains(currentOutput.portType)
activeOutput = .headphones
break loop
default:
@ -296,7 +299,7 @@ public final class ManagedAudioSession {
availableOutputs.insert(.speaker, at: 0)
}
if hasHeadphones {
if hasHeadphones && !hasBluetoothHeadphones {
availableOutputs.insert(.headphones, at: 0)
}
availableOutputs.insert(.builtin, at: 0)
@ -672,15 +675,24 @@ public final class ManagedAudioSession {
options.insert(.allowBluetooth)
}
}
case .record, .voiceCall:
case .record, .voiceCall, .videoCall:
options.insert(.allowBluetooth)
}
print("ManagedAudioSession setting active true")
let mode: AVAudioSession.Mode
switch type {
case .voiceCall:
mode = .voiceChat
case .videoCall:
mode = .videoChat
default:
mode = .default
}
if #available(iOSApplicationExtension 11.0, iOS 11.0, *) {
try AVAudioSession.sharedInstance().setCategory(nativeCategory, mode: type == .voiceCall ? .voiceChat : .default, policy: .default, options: options)
try AVAudioSession.sharedInstance().setCategory(nativeCategory, mode: mode, policy: .default, options: options)
} else {
AVAudioSession.sharedInstance().perform(NSSelectorFromString("setCategory:error:"), with: nativeCategory)
try AVAudioSession.sharedInstance().setMode(type == .voiceCall ? .voiceChat : .default)
try AVAudioSession.sharedInstance().setMode(mode)
}
} catch let error {
print("ManagedAudioSession setup error \(error)")

View File

@ -23,6 +23,7 @@ swift_library(
"//submodules/PresentationDataUtils:PresentationDataUtils",
"//submodules/TelegramCallsUI/CallsEmoji:CallsEmoji",
"//submodules/SemanticStatusNode:SemanticStatusNode",
"//submodules/TooltipUI:TooltipUI",
],
visibility = [
"//visibility:public",

View File

@ -19,11 +19,12 @@ protocol CallControllerNodeProtocol: class {
var toggleMute: (() -> Void)? { get set }
var setCurrentAudioOutput: ((AudioSessionOutput) -> Void)? { get set }
var beginAudioOuputSelection: (() -> Void)? { get set }
var beginAudioOuputSelection: ((Bool) -> Void)? { get set }
var acceptCall: (() -> Void)? { get set }
var endCall: (() -> Void)? { get set }
var back: (() -> Void)? { get set }
var presentCallRating: ((CallId) -> Void)? { get set }
var present: ((ViewController) -> Void)? { get set }
var callEnded: ((Bool) -> Void)? { get set }
var dismissedInteractively: (() -> Void)? { get set }
@ -148,7 +149,7 @@ public final class CallController: ViewController {
self?.call.setCurrentAudioOutput(output)
}
self.controllerNode.beginAudioOuputSelection = { [weak self] in
self.controllerNode.beginAudioOuputSelection = { [weak self] hasMute in
guard let strongSelf = self, let (availableOutputs, currentOutput) = strongSelf.audioOutputState else {
return
}
@ -173,13 +174,20 @@ public final class CallController: ViewController {
title = UIDevice.current.model
case .speaker:
title = strongSelf.presentationData.strings.Call_AudioRouteSpeaker
icon = UIImage(bundleImageName: "Call/CallRouteSpeaker")
icon = generateScaledImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), size: CGSize(width: 48.0, height: 48.0), opaque: false)
case .headphones:
title = strongSelf.presentationData.strings.Call_AudioRouteHeadphones
case let .port(port):
title = port.name
if port.type == .bluetooth {
icon = UIImage(bundleImageName: "Call/CallRouteBluetooth")
var image = UIImage(bundleImageName: "Call/CallBluetoothButton")
let portName = port.name.lowercased()
if portName.contains("airpods pro") {
image = UIImage(bundleImageName: "Call/CallAirpodsProButton")
} else if portName.contains("airpods") {
image = UIImage(bundleImageName: "Call/CallAirpodsButton")
}
icon = generateScaledImage(image: image, size: CGSize(width: 48.0, height: 48.0), opaque: false)
}
}
items.append(CallRouteActionSheetItem(title: title, icon: icon, selected: output == currentOutput, action: { [weak actionSheet] in
@ -188,8 +196,15 @@ public final class CallController: ViewController {
}))
}
if hasMute {
items.append(CallRouteActionSheetItem(title: strongSelf.presentationData.strings.Call_AudioRouteMute, icon: generateScaledImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), size: CGSize(width: 48.0, height: 48.0), opaque: false), selected: strongSelf.isMuted, action: { [weak actionSheet] in
actionSheet?.dismissAnimated()
self?.call.toggleIsMuted()
}))
}
actionSheet.setItemGroups([ActionSheetItemGroup(items: items), ActionSheetItemGroup(items: [
ActionSheetButtonItem(title: strongSelf.presentationData.strings.Common_Cancel, color: .accent, font: .bold, action: { [weak actionSheet] in
ActionSheetButtonItem(title: strongSelf.presentationData.strings.Call_AudioRouteHide, color: .accent, font: .bold, action: { [weak actionSheet] in
actionSheet?.dismissAnimated()
})
])
@ -231,6 +246,12 @@ public final class CallController: ViewController {
}
}
self.controllerNode.present = { [weak self] controller in
if let strongSelf = self {
strongSelf.present(controller, in: .window(.root))
}
}
self.controllerNode.callEnded = { [weak self] didPresentRating in
if let strongSelf = self, !didPresentRating {
let _ = (combineLatest(strongSelf.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.callListSettings]), ApplicationSpecificNotice.getCallsTabTip(accountManager: strongSelf.sharedContext.accountManager))

View File

@ -18,6 +18,14 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
case blurred(isFilled: Bool)
case color(Color)
var isFilled: Bool {
if case let .blurred(isFilled) = self {
return isFilled
} else {
return false
}
}
}
enum Image {
@ -26,6 +34,8 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
case flipCamera
case bluetooth
case speaker
case airpods
case airpodsPro
case accept
case end
}
@ -150,7 +160,7 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
self.effectView.isHidden = true
}
self.alpha = content.isEnabled ? 1.0 : 0.7
transition.updateAlpha(node: self, alpha: content.isEnabled ? 1.0 : 0.4)
self.isUserInteractionEnabled = content.isEnabled
let contentBackgroundImage: UIImage? = nil
@ -204,6 +214,10 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: imageColor)
case .speaker:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: imageColor)
case .airpods:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAirpodsButton"), color: imageColor)
case .airpodsPro:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAirpodsProButton"), color: imageColor)
case .accept:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: imageColor)
case .end:
@ -227,6 +241,11 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
}
})
// if transition.isAnimated, let previousContent = previousContent, content.image == .camera, !previousContent.appearance.isFilled && content.appearance.isFilled {
// self.contentBackgroundNode.image = contentBackgroundImage
// self.contentBackgroundNode.layer.animateSpring(from: 0.01 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 1.25, damping: 105.0)
// }
if transition.isAnimated, let contentBackgroundImage = contentBackgroundImage, let previousContent = self.contentBackgroundNode.image {
self.contentBackgroundNode.image = contentBackgroundImage
self.contentBackgroundNode.layer.animate(from: previousContent.cgImage!, to: contentBackgroundImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)

View File

@ -6,12 +6,18 @@ import SwiftSignalKit
import MediaPlayer
import TelegramPresentationData
enum CallControllerButtonsSpeakerMode {
enum CallControllerButtonsSpeakerMode: Equatable {
enum BluetoothType: Equatable {
case generic
case airpods
case airpodsPro
}
case none
case builtin
case speaker
case headphones
case bluetooth
case bluetooth(BluetoothType)
}
enum CallControllerButtonsMode: Equatable {
@ -23,9 +29,9 @@ enum CallControllerButtonsMode: Equatable {
var isInitializingCamera: Bool
}
case active(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case incoming(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case outgoingRinging(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case active(speakerMode: CallControllerButtonsSpeakerMode, hasAudioRouteMenu: Bool, videoState: VideoState)
case incoming(speakerMode: CallControllerButtonsSpeakerMode, hasAudioRouteMenu: Bool, videoState: VideoState)
case outgoingRinging(speakerMode: CallControllerButtonsSpeakerMode, hasAudioRouteMenu: Bool, videoState: VideoState)
}
private enum ButtonDescription: Equatable {
@ -43,6 +49,8 @@ private enum ButtonDescription: Equatable {
case builtin
case speaker
case bluetooth
case airpods
case airpodsPro
}
enum EndType {
@ -54,7 +62,7 @@ private enum ButtonDescription: Equatable {
case accept
case end(EndType)
case enableCamera(Bool, Bool, Bool)
case switchCamera
case switchCamera(Bool)
case soundOutput(SoundOutput)
case mute(Bool)
@ -159,10 +167,12 @@ final class CallControllerButtonsNode: ASDisplayNode {
let speakerMode: CallControllerButtonsSpeakerMode
var videoState: CallControllerButtonsMode.VideoState
let hasAudioRouteMenu: Bool
switch mode {
case .incoming(let speakerModeValue, let videoStateValue), .outgoingRinging(let speakerModeValue, let videoStateValue), .active(let speakerModeValue, let videoStateValue):
case .incoming(let speakerModeValue, let hasAudioRouteMenuValue, let videoStateValue), .outgoingRinging(let speakerModeValue, let hasAudioRouteMenuValue, let videoStateValue), .active(let speakerModeValue, let hasAudioRouteMenuValue, let videoStateValue):
speakerMode = speakerModeValue
videoState = videoStateValue
hasAudioRouteMenu = hasAudioRouteMenuValue
}
enum MappedState {
@ -177,7 +187,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
mappedState = .incomingRinging
case .outgoingRinging:
mappedState = .outgoingRinging
case let .active(_, videoStateValue):
case let .active(_, _, videoStateValue):
mappedState = .active
videoState = videoStateValue
}
@ -190,14 +200,21 @@ final class CallControllerButtonsNode: ASDisplayNode {
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin:
soundOutput = .builtin
case .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case .bluetooth:
soundOutput = .bluetooth
case .none, .builtin:
soundOutput = .builtin
case .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case let .bluetooth(type):
switch type {
case .generic:
soundOutput = .bluetooth
case .airpods:
soundOutput = .airpods
case .airpodsPro:
soundOutput = .airpodsPro
}
}
if videoState.isAvailable {
@ -213,12 +230,17 @@ final class CallControllerButtonsNode: ASDisplayNode {
isCameraEnabled = videoState.canChangeStatus
isCameraInitializing = videoState.isInitializingCamera
}
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
topButtons.append(.mute(self.isMuted))
if videoState.hasVideo {
topButtons.append(.switchCamera)
} else {
topButtons.append(.enableCamera(isCameraActive, false, isCameraInitializing))
if !videoState.hasVideo {
topButtons.append(.mute(self.isMuted))
topButtons.append(.soundOutput(soundOutput))
} else {
if hasAudioRouteMenu {
topButtons.append(.soundOutput(soundOutput))
} else {
topButtons.append(.mute(self.isMuted))
}
topButtons.append(.switchCamera(isCameraActive && !isCameraInitializing))
}
} else {
topButtons.append(.mute(self.isMuted))
@ -272,19 +294,30 @@ final class CallControllerButtonsNode: ASDisplayNode {
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin:
soundOutput = .builtin
case .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .builtin
case .bluetooth:
soundOutput = .bluetooth
case .none, .builtin:
soundOutput = .builtin
case .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .builtin
case let .bluetooth(type):
switch type {
case .generic:
soundOutput = .bluetooth
case .airpods:
soundOutput = .airpods
case .airpodsPro:
soundOutput = .airpodsPro
}
}
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
topButtons.append(.mute(isMuted))
topButtons.append(.switchCamera)
if hasAudioRouteMenu {
topButtons.append(.soundOutput(soundOutput))
} else {
topButtons.append(.mute(isMuted))
}
topButtons.append(.switchCamera(isCameraActive && !isCameraInitializing))
topButtons.append(.end(.end))
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
@ -317,14 +350,21 @@ final class CallControllerButtonsNode: ASDisplayNode {
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin:
soundOutput = .builtin
case .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case .bluetooth:
soundOutput = .bluetooth
case .none, .builtin:
soundOutput = .builtin
case .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case let .bluetooth(type):
switch type {
case .generic:
soundOutput = .bluetooth
case .airpods:
soundOutput = .airpods
case .airpodsPro:
soundOutput = .airpodsPro
}
}
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
@ -404,15 +444,17 @@ final class CallControllerButtonsNode: ASDisplayNode {
hasProgress: isInitializing
)
buttonText = strings.Call_Camera
case .switchCamera:
case let .switchCamera(isEnabled):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: false),
image: .flipCamera
image: .flipCamera,
isEnabled: isEnabled
)
buttonText = strings.Call_Flip
case let .soundOutput(value):
let image: CallControllerButtonItemNode.Content.Image
var isFilled = false
var title: String = strings.Call_Speaker
switch value {
case .builtin:
image = .speaker
@ -421,12 +463,19 @@ final class CallControllerButtonsNode: ASDisplayNode {
isFilled = true
case .bluetooth:
image = .bluetooth
title = strings.Call_Audio
case .airpods:
image = .airpods
title = strings.Call_Audio
case .airpodsPro:
image = .airpodsPro
title = strings.Call_Audio
}
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isFilled),
image: image
)
buttonText = strings.Call_Speaker
buttonText = title
case let .mute(isMuted):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isMuted),

View File

@ -83,7 +83,7 @@ final class CallControllerKeyPreviewNode: ASDisplayNode {
}
func animateOut(to rect: CGRect, toNode: ASDisplayNode, completion: @escaping () -> Void) {
self.keyTextNode.layer.animatePosition(from: self.keyTextNode.layer.position, to: CGPoint(x: rect.midX, y: rect.midY), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { _ in
self.keyTextNode.layer.animatePosition(from: self.keyTextNode.layer.position, to: CGPoint(x: rect.midX + 2.0, y: rect.midY), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { _ in
completion()
})
self.keyTextNode.layer.animateScale(from: 1.0, to: rect.size.width / self.keyTextNode.frame.size.width, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false)

View File

@ -13,6 +13,9 @@ import AccountContext
import LocalizedPeerData
import PhotoResources
import CallsEmoji
import TooltipUI
import AlertUI
import PresentationDataUtils
private func interpolateFrame(from fromValue: CGRect, to toValue: CGRect, t: CGFloat) -> CGRect {
return CGRect(x: floorToScreenPixels(toValue.origin.x * t + fromValue.origin.x * (1.0 - t)), y: floorToScreenPixels(toValue.origin.y * t + fromValue.origin.y * (1.0 - t)), width: floorToScreenPixels(toValue.size.width * t + fromValue.size.width * (1.0 - t)), height: floorToScreenPixels(toValue.size.height * t + fromValue.size.height * (1.0 - t)))
@ -52,10 +55,15 @@ private final class CallVideoNode: ASDisplayNode {
super.init()
if #available(iOS 13.0, *) {
self.layer.cornerCurve = .continuous
self.videoTransformContainer.layer.cornerCurve = .continuous
}
self.videoTransformContainer.view.addSubview(self.videoView.view)
self.addSubnode(self.videoTransformContainer)
self.videoView.setOnFirstFrameReceived { [weak self] _ in
self.videoView.setOnFirstFrameReceived { [weak self] aspectRatio in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
@ -219,7 +227,7 @@ private final class CallVideoNode: ASDisplayNode {
transition.updateCornerRadius(layer: self.layer, cornerRadius: self.currentCornerRadius)
}
func updateIsBlurred(isBlurred: Bool) {
func updateIsBlurred(isBlurred: Bool, light: Bool = false, animated: Bool = true) {
if self.isBlurred == isBlurred {
return
}
@ -231,12 +239,16 @@ private final class CallVideoNode: ASDisplayNode {
effectView.clipsToBounds = true
effectView.layer.cornerRadius = self.currentCornerRadius
self.effectView = effectView
effectView.frame = self.videoView.view.frame
self.view.addSubview(effectView)
effectView.frame = self.videoTransformContainer.bounds
self.videoTransformContainer.view.addSubview(effectView)
}
if animated {
UIView.animate(withDuration: 0.3, animations: {
self.effectView?.effect = UIBlurEffect(style: light ? .light : .dark)
})
} else {
self.effectView?.effect = UIBlurEffect(style: light ? .light : .dark)
}
UIView.animate(withDuration: 0.3, animations: {
self.effectView?.effect = UIBlurEffect(style: .dark)
})
} else if let effectView = self.effectView {
self.effectView = nil
UIView.animate(withDuration: 0.3, animations: {
@ -246,6 +258,22 @@ private final class CallVideoNode: ASDisplayNode {
})
}
}
func flip(withBackground: Bool) {
if withBackground {
self.backgroundColor = .black
}
UIView.transition(with: self.videoTransformContainer.view, duration: 0.4, options: [.transitionFlipFromLeft, .curveEaseOut], animations: {
UIView.performWithoutAnimation {
self.updateIsBlurred(isBlurred: true, light: true, animated: false)
}
}) { finished in
self.backgroundColor = nil
Queue.mainQueue().after(0.5) {
self.updateIsBlurred(isBlurred: false)
}
}
}
}
final class CallControllerNode: ViewControllerTracingNode, CallControllerNodeProtocol {
@ -272,7 +300,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private let containerNode: ASDisplayNode
private let imageNode: TransformImageNode
private let dimNode: ASDisplayNode
private let dimNode: ASImageNode
private var candidateIncomingVideoNodeValue: CallVideoNode?
private var incomingVideoNodeValue: CallVideoNode?
@ -287,6 +315,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private var isRequestingVideo: Bool = false
private var animateRequestedVideoOnce: Bool = false
private var displayedCameraTooltip: Bool = false
private var expandedVideoNode: CallVideoNode?
private var minimizedVideoNode: CallVideoNode?
private var disableAnimationForExpandedVideoOnce: Bool = false
@ -297,6 +327,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private let backButtonNode: HighlightableButtonNode
private let statusNode: CallControllerStatusNode
private let videoPausedNode: ImmediateTextNode
private let toastNode: CallControllerToastContainerNode
private let buttonsNode: CallControllerButtonsNode
private var keyPreviewNode: CallControllerKeyPreviewNode?
@ -324,14 +355,16 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var toggleMute: (() -> Void)?
var setCurrentAudioOutput: ((AudioSessionOutput) -> Void)?
var beginAudioOuputSelection: (() -> Void)?
var beginAudioOuputSelection: ((Bool) -> Void)?
var acceptCall: (() -> Void)?
var endCall: (() -> Void)?
var back: (() -> Void)?
var presentCallRating: ((CallId) -> Void)?
var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)?
var present: ((ViewController) -> Void)?
private var toastContent: CallControllerToastContent?
private var buttonsMode: CallControllerButtonsMode?
private var isUIHidden: Bool = false
@ -367,9 +400,10 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.imageNode = TransformImageNode()
self.imageNode.contentAnimations = [.subsequentUpdates]
self.dimNode = ASDisplayNode()
self.dimNode = ASImageNode()
self.dimNode.contentMode = .scaleToFill
self.dimNode.isUserInteractionEnabled = false
self.dimNode.backgroundColor = UIColor(white: 0.0, alpha: 0.4)
self.dimNode.backgroundColor = UIColor(white: 0.0, alpha: 0.3)
self.backButtonArrowNode = ASImageNode()
self.backButtonArrowNode.displayWithoutProcessing = true
@ -383,6 +417,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.videoPausedNode.alpha = 0.0
self.buttonsNode = CallControllerButtonsNode(strings: self.presentationData.strings)
self.toastNode = CallControllerToastContainerNode(strings: self.presentationData.strings)
self.keyButtonNode = CallControllerKeyButton()
super.init()
@ -415,6 +450,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.containerNode.addSubnode(self.statusNode)
self.containerNode.addSubnode(self.videoPausedNode)
self.containerNode.addSubnode(self.buttonsNode)
self.containerNode.addSubnode(self.toastNode)
self.containerNode.addSubnode(self.keyButtonNode)
self.containerNode.addSubnode(self.backButtonArrowNode)
self.containerNode.addSubnode(self.backButtonNode)
@ -424,7 +460,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
self.buttonsNode.speaker = { [weak self] in
self?.beginAudioOuputSelection?()
self?.beginAudioOuputSelection?(true)
}
self.buttonsNode.acceptOrEnd = { [weak self] in
@ -454,14 +490,20 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
switch callState.state {
case .active:
if strongSelf.outgoingVideoNodeValue == nil {
switch callState.videoState {
case .inactive:
strongSelf.isRequestingVideo = true
strongSelf.updateButtonsMode()
default:
break
let proceed = {
switch callState.videoState {
case .inactive:
strongSelf.isRequestingVideo = true
strongSelf.updateButtonsMode()
default:
break
}
strongSelf.call.requestVideo()
}
strongSelf.call.requestVideo()
strongSelf.present?(textAlertController(sharedContext: strongSelf.sharedContext, title: nil, text: strongSelf.presentationData.strings.Call_CameraConfirmationText, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Call_CameraConfirmationConfirm, action: {
proceed()
})]))
} else {
strongSelf.call.disableVideo()
}
@ -471,9 +513,13 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
self.buttonsNode.rotateCamera = { [weak self] in
guard let strongSelf = self else {
guard let strongSelf = self, !strongSelf.areUserActionsDisabledNow() else {
return
}
strongSelf.disableActionsUntilTimestamp = CACurrentMediaTime() + 1.0
if let outgoingVideoNode = strongSelf.outgoingVideoNodeValue, let (layout, _) = strongSelf.validLayout {
outgoingVideoNode.flip(withBackground: outgoingVideoNode.frame.width == layout.size.width)
}
strongSelf.call.switchVideoCamera()
if let _ = strongSelf.outgoingVideoNodeValue {
if let (layout, navigationBarHeight) = strongSelf.validLayout {
@ -495,6 +541,18 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
func displayCameraTooltip() {
guard let location = self.buttonsNode.videoButtonFrame().flatMap({ frame -> CGRect in
return self.buttonsNode.view.convert(frame, to: self.view)
}) else {
return
}
self.present?(TooltipScreen(text: self.presentationData.strings.Call_CameraTooltip, style: .light, icon: nil, location: .point(location.offsetBy(dx: 0.0, dy: -14.0)), displayDuration: .custom(5.0), shouldDismissOnTouch: { _ in
return .dismiss(consume: false)
}))
}
override func didLoad() {
super.didLoad()
@ -526,11 +584,12 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.dimNode.isHidden = true
}
self.toastNode.title = peer.compactDisplayTitle
self.statusNode.title = peer.displayTitle(strings: self.presentationData.strings, displayOrder: self.presentationData.nameDisplayOrder)
if hasOther {
self.statusNode.subtitle = self.presentationData.strings.Call_AnsweringWithAccount(accountPeer.displayTitle(strings: self.presentationData.strings, displayOrder: self.presentationData.nameDisplayOrder)).0
if let callState = callState {
if let callState = self.callState {
self.updateCallState(callState)
}
}
@ -597,7 +656,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
strongSelf.minimizedVideoNode = expandedVideoNode
}
strongSelf.expandedVideoNode = incomingVideoNode
strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode)
strongSelf.containerNode.insertSubnode(incomingVideoNode, belowSubnode: strongSelf.dimNode)
strongSelf.updateButtonsMode(transition: .animated(duration: 0.4, curve: .spring))
}
@ -616,7 +675,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}, isFlippedUpdated: { _ in
})
strongSelf.candidateIncomingVideoNodeValue = incomingVideoNode
strongSelf.setupAudioOutputs()
@ -678,7 +736,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: expandedVideoNode)
} else {
strongSelf.expandedVideoNode = outgoingVideoNode
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
strongSelf.containerNode.insertSubnode(outgoingVideoNode, belowSubnode: strongSelf.dimNode)
}
strongSelf.updateButtonsMode(transition: .animated(duration: 0.4, curve: .spring))
}
@ -772,7 +830,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
}
switch callState.state {
case .waiting, .connecting:
statusValue = .text(string: self.presentationData.strings.Call_StatusConnecting, displayLogo: false)
@ -864,7 +922,27 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
var toastContent: CallControllerToastContent = []
if case .inactive = callState.remoteVideoState {
toastContent.insert(.camera)
}
if case .muted = callState.remoteAudioState {
toastContent.insert(.microphone)
}
if case .low = callState.remoteBatteryLevel {
toastContent.insert(.battery)
}
self.toastContent = toastContent
self.updateButtonsMode()
self.updateDimVisibility()
if self.incomingVideoViewRequested && !self.outgoingVideoViewRequested && !self.displayedCameraTooltip {
self.displayedCameraTooltip = true
Queue.mainQueue().after(2.0) {
self.displayCameraTooltip()
}
}
if case let .terminated(id, _, reportRating) = callState.state, let callId = id {
let presentRating = reportRating || self.forceReportRating
@ -875,6 +953,33 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
private func updateDimVisibility(transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .easeInOut)) {
guard let callState = self.callState else {
return
}
var visible = true
if case .active = callState.state, self.incomingVideoNodeValue != nil || self.outgoingVideoNodeValue != nil {
visible = false
}
let currentVisible = self.dimNode.image == nil
if visible != currentVisible {
let color = visible ? UIColor(rgb: 0x000000, alpha: 0.3) : UIColor.clear
let image: UIImage? = visible ? nil : generateGradientImage(size: CGSize(width: 1.0, height: 640.0), colors: [UIColor.black.withAlphaComponent(0.3), UIColor.clear, UIColor.clear, UIColor.black.withAlphaComponent(0.3)], locations: [0.0, 0.22, 0.7, 1.0])
if transition.isAnimated {
UIView.transition(with: self.dimNode.view, duration: 0.3, options: .transitionCrossDissolve, animations: {
self.dimNode.backgroundColor = color
self.dimNode.image = image
}, completion: nil)
} else {
self.dimNode.backgroundColor = color
self.dimNode.image = image
}
self.statusNode.isHidden = !visible
}
}
private var buttonsTerminationMode: CallControllerButtonsMode?
private func updateButtonsMode(transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring)) {
@ -883,7 +988,9 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
var mode: CallControllerButtonsSpeakerMode = .none
var hasAudioRouteMenu: Bool = false
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
hasAudioRouteMenu = availableOutputs.count > 2
switch currentOutput {
case .builtin:
mode = .builtin
@ -891,8 +998,15 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
mode = .speaker
case .headphones:
mode = .headphones
case .port:
mode = .bluetooth
case let .port(port):
var type: CallControllerButtonsSpeakerMode.BluetoothType = .generic
let portName = port.name.lowercased()
if portName.contains("airpods pro") {
type = .airpodsPro
} else if portName.contains("airpods") {
type = .airpods
}
mode = .bluetooth(type)
}
if availableOutputs.count <= 1 {
mode = .none
@ -912,22 +1026,22 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
switch callState.state {
case .ringing:
self.buttonsMode = .incoming(speakerMode: mode, videoState: mappedVideoState)
self.buttonsMode = .incoming(speakerMode: mode, hasAudioRouteMenu: hasAudioRouteMenu, videoState: mappedVideoState)
self.buttonsTerminationMode = buttonsMode
case .waiting, .requesting:
self.buttonsMode = .outgoingRinging(speakerMode: mode, videoState: mappedVideoState)
self.buttonsMode = .outgoingRinging(speakerMode: mode, hasAudioRouteMenu: hasAudioRouteMenu, videoState: mappedVideoState)
self.buttonsTerminationMode = buttonsMode
case .active, .connecting, .reconnecting:
self.buttonsMode = .active(speakerMode: mode, videoState: mappedVideoState)
self.buttonsMode = .active(speakerMode: mode, hasAudioRouteMenu: hasAudioRouteMenu, videoState: mappedVideoState)
self.buttonsTerminationMode = buttonsMode
case .terminating, .terminated:
if let buttonsTerminationMode = self.buttonsTerminationMode {
self.buttonsMode = buttonsTerminationMode
} else {
self.buttonsMode = .active(speakerMode: mode, videoState: mappedVideoState)
self.buttonsMode = .active(speakerMode: mode, hasAudioRouteMenu: hasAudioRouteMenu, videoState: mappedVideoState)
}
}
if let (layout, navigationHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationHeight, transition: transition)
}
@ -978,6 +1092,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
uiDisplayTransition *= 1.0 - self.pictureInPictureTransitionFraction
let buttonsHeight: CGFloat = self.buttonsNode.bounds.height
let toastHeight: CGFloat = self.toastNode.bounds.height
var fullInsets = layout.insets(options: .statusBar)
@ -987,7 +1102,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
cleanInsets.right = 20.0
fullInsets.top += 44.0 + 8.0
fullInsets.bottom = buttonsHeight + 27.0
fullInsets.bottom = buttonsHeight + toastHeight + 27.0
fullInsets.left = 20.0
fullInsets.right = 20.0
@ -1070,6 +1185,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
let defaultButtonsOriginY = layout.size.height - buttonsHeight
let buttonsOriginY = interpolate(from: layout.size.height + 10.0, to: defaultButtonsOriginY, value: uiDisplayTransition)
let toastHeight = self.toastNode.updateLayout(strings: self.presentationData.strings, content: self.toastContent, constrainedWidth: layout.size.width, bottomInset: layout.intrinsicInsets.bottom + buttonsHeight, transition: transition)
var overlayAlpha: CGFloat = uiDisplayTransition
switch self.callState?.state {
@ -1137,7 +1254,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
transition.updateFrame(node: self.toastNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY - toastHeight), size: CGSize(width: layout.size.width, height: toastHeight)))
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
@ -1269,11 +1387,20 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self?.keyButtonNode.isHidden = false
keyPreviewNode?.removeFromSupernode()
})
} else if self.hasVideoNodes {
if let (layout, navigationHeight) = self.validLayout {
self.pictureInPictureTransitionFraction = 1.0
self.containerLayoutUpdated(layout, navigationBarHeight: navigationHeight, transition: .animated(duration: 0.4, curve: .spring))
}
} else {
self.back?()
}
}
private var hasVideoNodes: Bool {
return self.expandedVideoNode != nil || self.minimizedVideoNode != nil
}
private var debugTapCounter: (Double, Int) = (0.0, 0)
private func areUserActionsDisabledNow() -> Bool {
@ -1493,7 +1620,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
if self.pictureInPictureTransitionFraction.isZero, let expandedVideoNode = self.expandedVideoNode, let minimizedVideoNode = self.minimizedVideoNode, minimizedVideoNode.frame.contains(location), expandedVideoNode.frame != minimizedVideoNode.frame {
self.minimizedVideoInitialPosition = minimizedVideoNode.position
} else if let _ = self.expandedVideoNode, let _ = self.minimizedVideoNode {
} else if let _ = self.minimizedVideoNode {
self.minimizedVideoInitialPosition = nil
if !self.pictureInPictureTransitionFraction.isZero {
self.pictureInPictureGestureState = .dragging(initialPosition: self.containerTransformationNode.position, draggingPosition: self.containerTransformationNode.position)

View File

@ -3,35 +3,311 @@ import UIKit
import Display
import AsyncDisplayKit
import SwiftSignalKit
import TelegramPresentationData
private let labelFont = Font.regular(17.0)
final class CallControllerToastNode: ASDisplayNode {
struct Content: Equatable {
enum Image {
case cameraOff
private enum ToastDescription: Equatable {
enum Key: Hashable {
case camera
case microphone
case mute
case battery
}
case camera
case microphone
case mute
case battery
var key: Key {
switch self {
case .camera:
return .camera
case .microphone:
return .microphone
case .mute:
return .mute
case .battery:
return .battery
}
}
}
struct CallControllerToastContent: OptionSet {
public var rawValue: Int32
public init(rawValue: Int32) {
self.rawValue = rawValue
}
public static let camera = CallControllerToastContent(rawValue: 1 << 0)
public static let microphone = CallControllerToastContent(rawValue: 1 << 1)
public static let mute = CallControllerToastContent(rawValue: 1 << 2)
public static let battery = CallControllerToastContent(rawValue: 1 << 3)
}
final class CallControllerToastContainerNode: ASDisplayNode {
private var toastNodes: [ToastDescription.Key: CallControllerToastItemNode] = [:]
private let strings: PresentationStrings
private var validLayout: (CGFloat, CGFloat)?
private var content: CallControllerToastContent?
private var appliedContent: CallControllerToastContent?
var title: String = ""
init(strings: PresentationStrings) {
self.strings = strings
super.init()
}
private func updateToastsLayout(strings: PresentationStrings, content: CallControllerToastContent, width: CGFloat, bottomInset: CGFloat, animated: Bool) -> CGFloat {
let transition: ContainedViewLayoutTransition
if animated {
transition = .animated(duration: 0.3, curve: .spring)
} else {
transition = .immediate
}
let previousContent = self.appliedContent
self.appliedContent = content
let spacing: CGFloat = 18.0
let bottomSpacing: CGFloat = 22.0
var height: CGFloat = 0.0
var toasts: [ToastDescription] = []
if content.contains(.camera) {
toasts.append(.camera)
}
if content.contains(.microphone) {
toasts.append(.microphone)
}
if content.contains(.mute) {
toasts.append(.mute)
}
if content.contains(.battery) {
toasts.append(.battery)
}
var transitions: [ToastDescription.Key: (ContainedViewLayoutTransition, CGFloat, Bool)] = [:]
var validKeys: [ToastDescription.Key] = []
for toast in toasts {
validKeys.append(toast.key)
var toastTransition = transition
var animateIn = false
let toastNode: CallControllerToastItemNode
if let current = self.toastNodes[toast.key] {
toastNode = current
} else {
toastNode = CallControllerToastItemNode()
self.toastNodes[toast.key] = toastNode
self.addSubnode(toastNode)
toastTransition = .immediate
animateIn = transition.isAnimated
}
let toastContent: CallControllerToastItemNode.Content
switch toast {
case .camera:
toastContent = CallControllerToastItemNode.Content(
key: .camera,
image: .camera,
text: strings.Call_CameraOff(self.title).0
)
case .microphone:
toastContent = CallControllerToastItemNode.Content(
key: .microphone,
image: .microphone,
text: strings.Call_MicrophoneOff(self.title).0
)
case .mute:
toastContent = CallControllerToastItemNode.Content(
key: .mute,
image: .microphone,
text: strings.Call_YourMicrophoneOff
)
case .battery:
toastContent = CallControllerToastItemNode.Content(
key: .battery,
image: .battery,
text: strings.Call_BatteryLow(self.title).0
)
}
let toastHeight = toastNode.update(width: width, content: toastContent, transition: toastTransition)
transitions[toast.key] = (toastTransition, toastHeight, animateIn)
}
var removedKeys: [ToastDescription.Key] = []
for (key, toast) in self.toastNodes {
if !validKeys.contains(key) {
removedKeys.append(key)
if animated {
toast.animateOut(transition: transition) { [weak toast] in
toast?.removeFromSupernode()
}
} else {
toast.removeFromSupernode()
}
}
}
for key in removedKeys {
self.toastNodes.removeValue(forKey: key)
}
guard let subnodes = self.subnodes else {
return 0.0
}
for case let toastNode as CallControllerToastItemNode in subnodes.reversed() {
if let content = toastNode.currentContent, let (transition, toastHeight, animateIn) = transitions[content.key] {
transition.updateFrame(node: toastNode, frame: CGRect(x: 0.0, y: height, width: width, height: toastHeight))
height += toastHeight + spacing
if animateIn {
toastNode.animateIn()
}
}
}
if height > 0.0 {
height -= spacing
}
height += bottomSpacing
return height
}
func updateLayout(strings: PresentationStrings, content: CallControllerToastContent?, constrainedWidth: CGFloat, bottomInset: CGFloat, transition: ContainedViewLayoutTransition) -> CGFloat {
self.validLayout = (constrainedWidth, bottomInset)
self.content = content
if let content = self.content {
return self.updateToastsLayout(strings: strings, content: content, width: constrainedWidth, bottomInset: bottomInset, animated: transition.isAnimated)
} else {
return 0.0
}
}
}
private class CallControllerToastItemNode: ASDisplayNode {
struct Content: Equatable {
enum Image {
case camera
case microphone
case battery
}
var key: ToastDescription.Key
var image: Image
var text: String
init(image: Image, text: String) {
init(key: ToastDescription.Key, image: Image, text: String) {
self.key = key
self.image = image
self.text = text
}
}
let clipNode: ASDisplayNode
let effectView: UIVisualEffectView
let iconNode: ASImageNode
let textNode: ImmediateTextNode
private(set) var currentContent: Content?
private(set) var currentWidth: CGFloat?
private(set) var currentHeight: CGFloat?
override init() {
self.clipNode = ASDisplayNode()
self.clipNode.clipsToBounds = true
self.clipNode.layer.cornerRadius = 14.0
if #available(iOS 13.0, *) {
self.clipNode.layer.cornerCurve = .continuous
}
self.effectView = UIVisualEffectView()
self.effectView.effect = UIBlurEffect(style: .light)
self.effectView.layer.cornerRadius = 16.0
self.effectView.clipsToBounds = true
self.effectView.isUserInteractionEnabled = false
self.iconNode = ASImageNode()
self.iconNode.displaysAsynchronously = false
self.iconNode.displayWithoutProcessing = true
self.iconNode.contentMode = .center
self.textNode = ImmediateTextNode()
self.textNode.maximumNumberOfLines = 2
self.textNode.displaysAsynchronously = false
self.textNode.isUserInteractionEnabled = false
super.init()
self.view.addSubview(self.effectView)
self.addSubnode(self.clipNode)
self.clipNode.view.addSubview(self.effectView)
self.clipNode.addSubnode(self.iconNode)
self.clipNode.addSubnode(self.textNode)
}
func update(width: CGFloat, content: Content, transition: ContainedViewLayoutTransition) -> CGFloat {
let inset: CGFloat = 32.0
if self.currentContent != content || self.currentWidth != width {
let previousContent = self.currentContent
self.currentContent = content
self.currentWidth = width
var image: UIImage?
switch content.image {
case .camera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallToastCamera"), color: .white)
case .microphone:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallToastMicrophone"), color: .white)
case .battery:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallToastBattery"), color: .white)
}
if transition.isAnimated, let image = image, let previousContent = self.iconNode.image {
self.iconNode.image = image
self.iconNode.layer.animate(from: previousContent.cgImage!, to: image.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)
} else {
self.iconNode.image = image
}
if previousContent?.text != content.text {
self.textNode.attributedText = NSAttributedString(string: content.text, font: Font.regular(17.0), textColor: .white)
let iconSize = CGSize(width: 44.0, height: 28.0)
let iconSpacing: CGFloat = 2.0
let textSize = self.textNode.updateLayout(CGSize(width: width - inset * 2.0 - iconSize.width - iconSpacing, height: 100.0))
let backgroundSize = CGSize(width: iconSize.width + iconSpacing + textSize.width + 6.0 * 2.0, height: max(28.0, textSize.height + 4.0 * 2.0))
let backgroundFrame = CGRect(origin: CGPoint(x: floor((width - backgroundSize.width) / 2.0), y: 0.0), size: backgroundSize)
transition.updateFrame(node: self.clipNode, frame: backgroundFrame)
transition.updateFrame(view: self.effectView, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
self.iconNode.frame = CGRect(origin: CGPoint(), size: iconSize)
self.textNode.frame = CGRect(origin: CGPoint(x: iconSize.width + iconSpacing, y: 4.0), size: textSize)
self.currentHeight = backgroundSize.height
}
}
return self.currentHeight ?? 28.0
}
func animateIn() {
self.layer.animateSpring(from: 0.01 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.45, damping: 105.0, completion: { _ in
})
}
func animateOut(transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
transition.updateTransformScale(node: self, scale: 0.1)
transition.updateAlpha(node: self, alpha: 0.0, completion: { _ in
completion()
})
}
}

View File

@ -132,7 +132,7 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
private static func providerConfiguration() -> CXProviderConfiguration {
let providerConfiguration = CXProviderConfiguration(localizedName: "Telegram")
providerConfiguration.supportsVideo = false
providerConfiguration.supportsVideo = true
providerConfiguration.maximumCallsPerCallGroup = 1
providerConfiguration.maximumCallGroups = 1
providerConfiguration.supportedHandleTypes = [.phoneNumber, .generic]

View File

@ -57,7 +57,7 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
var toggleMute: (() -> Void)?
var setCurrentAudioOutput: ((AudioSessionOutput) -> Void)?
var beginAudioOuputSelection: (() -> Void)?
var beginAudioOuputSelection: ((Bool) -> Void)?
var acceptCall: (() -> Void)?
var endCall: (() -> Void)?
var setIsVideoPaused: ((Bool) -> Void)?
@ -65,6 +65,7 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
var presentCallRating: ((CallId) -> Void)?
var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)?
var present: ((ViewController) -> Void)?
init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) {
self.sharedContext = sharedContext
@ -139,7 +140,7 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
}
self.buttonsNode.speaker = { [weak self] in
self?.beginAudioOuputSelection?()
self?.beginAudioOuputSelection?(false)
}
self.buttonsNode.end = { [weak self] in

View File

@ -194,6 +194,7 @@ public final class PresentationCallImpl: PresentationCall {
private var previousVideoState: PresentationCallState.VideoState?
private var previousRemoteVideoState: PresentationCallState.RemoteVideoState?
private var previousRemoteAudioState: PresentationCallState.RemoteAudioState?
private var previousRemoteBatteryLevel: PresentationCallState.RemoteBatteryLevel?
private var sessionStateDisposable: Disposable?
@ -294,9 +295,9 @@ public final class PresentationCallImpl: PresentationCall {
self.enableHighBitrateVideoCalls = enableHighBitrateVideoCalls
if self.isVideo {
self.videoCapturer = OngoingCallVideoCapturer()
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .active, remoteVideoState: .inactive, remoteAudioState: .active))
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .active, remoteVideoState: .inactive, remoteAudioState: .active, remoteBatteryLevel: .normal))
} else {
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: self.isVideoPossible ? .inactive : .notAvailable, remoteVideoState: .inactive, remoteAudioState: .active))
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: self.isVideoPossible ? .inactive : .notAvailable, remoteVideoState: .inactive, remoteAudioState: .active, remoteBatteryLevel: .normal))
}
self.serializedData = serializedData
@ -447,7 +448,7 @@ public final class PresentationCallImpl: PresentationCall {
switch previous.state {
case .active:
wasActive = true
case .terminated:
case .terminated, .dropping:
wasTerminated = true
default:
break
@ -462,6 +463,7 @@ public final class PresentationCallImpl: PresentationCall {
let mappedVideoState: PresentationCallState.VideoState
let mappedRemoteVideoState: PresentationCallState.RemoteVideoState
let mappedRemoteAudioState: PresentationCallState.RemoteAudioState
let mappedRemoteBatteryLevel: PresentationCallState.RemoteBatteryLevel
if let callContextState = callContextState {
switch callContextState.videoState {
case .notAvailable:
@ -487,10 +489,16 @@ public final class PresentationCallImpl: PresentationCall {
case .muted:
mappedRemoteAudioState = .muted
}
switch callContextState.remoteBatteryLevel {
case .normal:
mappedRemoteBatteryLevel = .normal
case .low:
mappedRemoteBatteryLevel = .low
}
self.previousVideoState = mappedVideoState
self.previousRemoteVideoState = mappedRemoteVideoState
self.previousRemoteAudioState = mappedRemoteAudioState
self.previousRemoteBatteryLevel = mappedRemoteBatteryLevel
} else {
if let previousVideoState = self.previousVideoState {
mappedVideoState = previousVideoState
@ -509,11 +517,16 @@ public final class PresentationCallImpl: PresentationCall {
} else {
mappedRemoteAudioState = .active
}
if let previousRemoteBatteryLevel = self.previousRemoteBatteryLevel {
mappedRemoteBatteryLevel = previousRemoteBatteryLevel
} else {
mappedRemoteBatteryLevel = .normal
}
}
switch sessionState.state {
case .ringing:
presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
if previous == nil || previousControl == nil {
if !self.reportedIncomingCall {
self.reportedIncomingCall = true
@ -540,19 +553,19 @@ public final class PresentationCallImpl: PresentationCall {
}
case .accepting:
self.callWasActive = true
presentationState = PresentationCallState(state: .connecting(nil), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .connecting(nil), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case .dropping:
presentationState = PresentationCallState(state: .terminating, videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .terminating, videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case let .terminated(id, reason, options):
presentationState = PresentationCallState(state: .terminated(id, reason, self.callWasActive && (options.contains(.reportRating) || self.shouldPresentCallRating)), videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .terminated(id, reason, self.callWasActive && (options.contains(.reportRating) || self.shouldPresentCallRating)), videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case let .requesting(ringing):
presentationState = PresentationCallState(state: .requesting(ringing), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .requesting(ringing), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case let .active(_, _, keyVisualHash, _, _, _, _):
self.callWasActive = true
if let callContextState = callContextState {
switch callContextState.state {
case .initializing:
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case .failed:
presentationState = nil
self.callSessionManager.drop(internalId: self.internalId, reason: .disconnect, debugLog: .single(nil))
@ -564,7 +577,7 @@ public final class PresentationCallImpl: PresentationCall {
timestamp = CFAbsoluteTimeGetCurrent()
self.activeTimestamp = timestamp
}
presentationState = PresentationCallState(state: .active(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .active(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case .reconnecting:
let timestamp: Double
if let activeTimestamp = self.activeTimestamp {
@ -573,10 +586,10 @@ public final class PresentationCallImpl: PresentationCall {
timestamp = CFAbsoluteTimeGetCurrent()
self.activeTimestamp = timestamp
}
presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
}
} else {
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState)
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
}
}
@ -635,15 +648,22 @@ public final class PresentationCallImpl: PresentationCall {
self.ongoingContext?.stop(debugLogValue: debugLogValue)
}
}
if case .terminated = sessionState.state, !wasTerminated {
var terminating = false
if case .terminated = sessionState.state {
terminating = true
} else if case .dropping = sessionState.state {
terminating = true
}
if terminating, !wasTerminated {
if !self.didSetCanBeRemoved {
self.didSetCanBeRemoved = true
self.canBeRemovedPromise.set(.single(true) |> delay(2.4, queue: Queue.mainQueue()))
self.canBeRemovedPromise.set(.single(true) |> delay(2.0, queue: Queue.mainQueue()))
}
self.hungUpPromise.set(true)
if sessionState.isOutgoing {
if !self.droppedCall && self.dropCallKitCallTimer == nil {
let dropCallKitCallTimer = SwiftSignalKit.Timer(timeout: 2.4, repeat: false, completion: { [weak self] in
let dropCallKitCallTimer = SwiftSignalKit.Timer(timeout: 2.0, repeat: false, completion: { [weak self] in
if let strongSelf = self {
strongSelf.dropCallKitCallTimer = nil
if !strongSelf.droppedCall {

View File

@ -1332,11 +1332,13 @@ public final class AccountViewTracker {
if lhsTimestamp != rhsTimestamp {
return false
}
var lhsVideo = false
var lhsMissed = false
var lhsOther = false
inner: for media in lhs.media {
if let action = media as? TelegramMediaAction {
if case let .phoneCall(_, discardReason, _, _) = action.action {
if case let .phoneCall(_, discardReason, _, video) = action.action {
lhsVideo = video
if lhs.flags.contains(.Incoming), let discardReason = discardReason, case .missed = discardReason {
lhsMissed = true
} else {
@ -1346,11 +1348,13 @@ public final class AccountViewTracker {
}
}
}
var rhsVideo = false
var rhsMissed = false
var rhsOther = false
inner: for media in rhs.media {
if let action = media as? TelegramMediaAction {
if case let .phoneCall(_, discardReason, _, _) = action.action {
if case let .phoneCall(_, discardReason, _, video) = action.action {
rhsVideo = video
if rhs.flags.contains(.Incoming), let discardReason = discardReason, case .missed = discardReason {
rhsMissed = true
} else {
@ -1360,7 +1364,7 @@ public final class AccountViewTracker {
}
}
}
if lhsMissed != rhsMissed || lhsOther != rhsOther {
if lhsMissed != rhsMissed || lhsOther != rhsOther || lhsVideo != rhsVideo {
return false
}
return true

View File

@ -55,6 +55,9 @@ public enum PresentationResourceKey: Int32 {
case itemListCornersBottom
case itemListCornersBoth
case itemListVoiceCallIcon
case itemListVideoCallIcon
case chatListLockTopUnlockedImage
case chatListLockBottomUnlockedImage
case chatListPending
@ -207,8 +210,12 @@ public enum PresentationResourceKey: Int32 {
case chatBubbleIncomingCallButtonImage
case chatBubbleOutgoingCallButtonImage
case chatBubbleIncomingVideoCallButtonImage
case chatBubbleOutgoingVideoCallButtonImage
case callListOutgoingIcon
case callListOutgoingVideoIcon
case callListInfoButton
case genericSearchBarLoupeImage

View File

@ -10,6 +10,12 @@ public struct PresentationResourcesCallList {
})
}
public static func outgoingVideoIcon(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.callListOutgoingVideoIcon.rawValue, { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call List/OutgoingVideoIcon"), color: theme.list.disclosureArrowColor)
})
}
public static func infoButton(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.callListInfoButton.rawValue, { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call List/InfoButton"), color: theme.list.itemAccentColor)

View File

@ -723,6 +723,18 @@ public struct PresentationResourcesChat {
})
}
public static func chatBubbleIncomingVideoCallButtonImage(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.chatBubbleIncomingVideoCallButtonImage.rawValue, { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Info/VideoCallButton"), color: theme.chat.message.incoming.accentControlColor)
})
}
public static func chatBubbleOutgoingVideoCallButtonImage(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.chatBubbleOutgoingVideoCallButtonImage.rawValue, { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Info/VideoCallButton"), color: theme.chat.message.outgoing.accentControlColor)
})
}
public static func chatInputSearchPanelUpImage(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.chatInputSearchPanelUpImage.rawValue, { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Input/Search/UpButton"), color: theme.chat.inputPanel.panelControlAccentColor)

View File

@ -108,6 +108,18 @@ public struct PresentationResourcesItemList {
})
}
public static func voiceCallIcon(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.itemListVoiceCallIcon.rawValue, { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Info/CallButton"), color: theme.list.itemAccentColor)
})
}
public static func videoCallIcon(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.itemListVideoCallIcon.rawValue, { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Info/VideoCallButton"), color: theme.list.itemAccentColor)
})
}
public static func addPhoneIcon(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.itemListAddPhoneIcon.rawValue, { theme in
guard let image = generateTintedImage(image: UIImage(bundleImageName: "Item List/AddItemIcon"), color: theme.list.itemAccentColor) else {

View File

@ -1,12 +1,12 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "ic_outgoingcall.pdf"
"filename" : "ic_outvoice.pdf",
"idiom" : "universal"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
"author" : "xcode",
"version" : 1
}
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_outvideo (2).pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_call_audioairpods.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_call_audioairpodspro.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 618 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1022 B

View File

@ -1,22 +1,12 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallBluetoothIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallBluetoothIcon@3x.png",
"scale" : "3x"
"filename" : "ic_call_audiobt.pdf",
"idiom" : "universal"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
"author" : "xcode",
"version" : 1
}
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_call_camerahd.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_menu_hdoff.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_menu_hdon.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_call_batteryislow.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_call_cameraoff.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_call_microphoneoff.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_videocallchat.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -6699,7 +6699,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
self.effectiveNavigationController?.pushViewController(contactsController)
self.controllerNavigationDisposable.set((contactsController.result
|> deliverOnMainQueue).start(next: { [weak self] peer in
if let strongSelf = self, let peer = peer {
if let strongSelf = self, let (peer, _) = peer {
let dataSignal: Signal<(Peer?, DeviceContactExtendedData?), NoError>
switch peer {
case let .peer(contact, _, _):

View File

@ -151,9 +151,17 @@ class ChatMessageCallBubbleContentNode: ChatMessageBubbleContentNode {
var buttonImage: UIImage?
if incoming {
buttonImage = PresentationResourcesChat.chatBubbleIncomingCallButtonImage(item.presentationData.theme.theme)
if isVideo {
buttonImage = PresentationResourcesChat.chatBubbleIncomingVideoCallButtonImage(item.presentationData.theme.theme)
} else {
buttonImage = PresentationResourcesChat.chatBubbleIncomingCallButtonImage(item.presentationData.theme.theme)
}
} else {
buttonImage = PresentationResourcesChat.chatBubbleOutgoingCallButtonImage(item.presentationData.theme.theme)
if isVideo {
buttonImage = PresentationResourcesChat.chatBubbleOutgoingVideoCallButtonImage(item.presentationData.theme.theme)
} else {
buttonImage = PresentationResourcesChat.chatBubbleOutgoingCallButtonImage(item.presentationData.theme.theme)
}
}
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, reactionCount: 0)

View File

@ -116,7 +116,7 @@ public class ComposeController: ViewController {
self?.activateSearch()
}
self.contactsNode.contactListNode.openPeer = { [weak self] peer in
self.contactsNode.contactListNode.openPeer = { [weak self] peer, _ in
if case let .peer(peer, _, _) = peer {
self?.openPeer(peerId: peer.id)
}
@ -157,7 +157,7 @@ public class ComposeController: ViewController {
strongSelf.createActionDisposable.set((controller.result
|> take(1)
|> deliverOnMainQueue).start(next: { [weak controller] peer in
if let strongSelf = self, let contactPeer = peer, case let .peer(peer, _, _) = contactPeer {
if let strongSelf = self, let (contactPeer, _) = peer, case let .peer(peer, _, _) = contactPeer {
controller?.dismissSearch()
controller?.displayNavigationActivity = true
strongSelf.createActionDisposable.set((createSecretChat(account: strongSelf.context.account, peerId: peer.id) |> deliverOnMainQueue).start(next: { peerId in

View File

@ -119,7 +119,7 @@ final class ContactMultiselectionControllerNode: ASDisplayNode {
switch self.contentNode {
case let .contacts(contactsNode):
contactsNode.openPeer = { [weak self] peer in
contactsNode.openPeer = { [weak self] peer, _ in
self?.openPeer?(peer)
}
case let .chats(chatsNode):
@ -186,7 +186,7 @@ final class ContactMultiselectionControllerNode: ASDisplayNode {
globalSearch = false
}
let searchResultsNode = ContactListNode(context: context, presentation: .single(.search(signal: searchText.get(), searchChatList: searchChatList, searchDeviceContacts: false, searchGroups: searchGroups, searchChannels: searchChannels, globalSearch: globalSearch)), filters: filters, selectionState: selectionState, isSearch: true)
searchResultsNode.openPeer = { peer in
searchResultsNode.openPeer = { peer, _ in
self?.tokenListNode.setText("")
self?.openPeer?(peer)
}

View File

@ -34,14 +34,15 @@ class ContactSelectionControllerImpl: ViewController, ContactSelectionController
private let titleProducer: (PresentationStrings) -> String
private let options: [ContactListAdditionalOption]
private let displayDeviceContacts: Bool
private let displayCallIcons: Bool
private var _ready = Promise<Bool>()
override var ready: Promise<Bool> {
return self._ready
}
private let _result = Promise<ContactListPeer?>()
var result: Signal<ContactListPeer?, NoError> {
private let _result = Promise<(ContactListPeer, ContactListAction)?>()
var result: Signal<(ContactListPeer, ContactListAction)?, NoError> {
return self._result.get()
}
@ -74,6 +75,7 @@ class ContactSelectionControllerImpl: ViewController, ContactSelectionController
self.titleProducer = params.title
self.options = params.options
self.displayDeviceContacts = params.displayDeviceContacts
self.displayCallIcons = params.displayCallIcons
self.confirmation = params.confirmation
self.presentationData = context.sharedContext.currentPresentationData.with { $0 }
@ -143,7 +145,7 @@ class ContactSelectionControllerImpl: ViewController, ContactSelectionController
}
override func loadDisplayNode() {
self.displayNode = ContactSelectionControllerNode(context: self.context, options: self.options, displayDeviceContacts: self.displayDeviceContacts)
self.displayNode = ContactSelectionControllerNode(context: self.context, options: self.options, displayDeviceContacts: self.displayDeviceContacts, displayCallIcons: self.displayCallIcons)
self._ready.set(self.contactsNode.contactListNode.ready)
self.contactsNode.navigationBar = self.navigationBar
@ -153,15 +155,15 @@ class ContactSelectionControllerImpl: ViewController, ContactSelectionController
}
self.contactsNode.requestOpenPeerFromSearch = { [weak self] peer in
self?.openPeer(peer: peer)
self?.openPeer(peer: peer, action: .generic)
}
self.contactsNode.contactListNode.activateSearch = { [weak self] in
self?.activateSearch()
}
self.contactsNode.contactListNode.openPeer = { [weak self] peer in
self?.openPeer(peer: peer)
self.contactsNode.contactListNode.openPeer = { [weak self] peer, action in
self?.openPeer(peer: peer, action: action)
}
self.contactsNode.contactListNode.suppressPermissionWarning = { [weak self] in
@ -256,12 +258,12 @@ class ContactSelectionControllerImpl: ViewController, ContactSelectionController
}
}
private func openPeer(peer: ContactListPeer) {
private func openPeer(peer: ContactListPeer, action: ContactListAction) {
self.contactsNode.contactListNode.listNode.clearHighlightAnimated(true)
self.confirmationDisposable.set((self.confirmation(peer) |> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
if value {
strongSelf._result.set(.single(peer))
strongSelf._result.set(.single((peer, action)))
if strongSelf.autoDismiss {
strongSelf.dismiss()
}

View File

@ -21,7 +21,8 @@ final class ContactSelectionControllerNode: ASDisplayNode {
}
}
let displayDeviceContacts: Bool
private let displayDeviceContacts: Bool
private let displayCallIcons: Bool
let contactListNode: ContactListNode
private let dimNode: ASDisplayNode
@ -40,12 +41,13 @@ final class ContactSelectionControllerNode: ASDisplayNode {
var presentationData: PresentationData
var presentationDataDisposable: Disposable?
init(context: AccountContext, options: [ContactListAdditionalOption], displayDeviceContacts: Bool) {
init(context: AccountContext, options: [ContactListAdditionalOption], displayDeviceContacts: Bool, displayCallIcons: Bool) {
self.context = context
self.presentationData = context.sharedContext.currentPresentationData.with { $0 }
self.displayDeviceContacts = displayDeviceContacts
self.displayCallIcons = displayCallIcons
self.contactListNode = ContactListNode(context: context, presentation: .single(.natural(options: options, includeChatList: false)))
self.contactListNode = ContactListNode(context: context, presentation: .single(.natural(options: options, includeChatList: false)), displayCallIcons: displayCallIcons)
self.dimNode = ASDisplayNode()

View File

@ -4443,7 +4443,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
if let contactsController = contactsController as? ContactSelectionController {
selectAddMemberDisposable.set((contactsController.result
|> deliverOnMainQueue).start(next: { [weak contactsController] memberPeer in
guard let memberPeer = memberPeer else {
guard let (memberPeer, _) = memberPeer else {
return
}

View File

@ -317,7 +317,7 @@ final class PeerSelectionControllerNode: ASDisplayNode {
contactListNode.activateSearch = { [weak self] in
self?.requestActivateSearch?()
}
contactListNode.openPeer = { [weak self] peer in
contactListNode.openPeer = { [weak self] peer, _ in
if case let .peer(peer, _, _) = peer {
self?.requestOpenPeer?(peer.id)
}

View File

@ -124,10 +124,16 @@ public struct OngoingCallContextState: Equatable {
case muted
}
public enum RemoteBatteryLevel: Equatable {
case normal
case low
}
public let state: State
public let videoState: VideoState
public let remoteVideoState: RemoteVideoState
public let remoteAudioState: RemoteAudioState
public let remoteBatteryLevel: RemoteBatteryLevel
}
private final class OngoingCallThreadLocalContextQueueImpl: NSObject, OngoingCallThreadLocalContextQueue, OngoingCallThreadLocalContextQueueWebrtc /*, OngoingCallThreadLocalContextQueueWebrtcCustom*/ {
@ -586,7 +592,7 @@ public final class OngoingCallContext {
}, videoCapturer: video?.impl, preferredAspectRatio: Float(preferredAspectRatio), enableHighBitrateVideoCalls: enableHighBitrateVideoCalls)
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, _ in
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
queue.async {
guard let strongSelf = self else {
return
@ -623,11 +629,20 @@ public final class OngoingCallContext {
@unknown default:
mappedRemoteAudioState = .active
}
let mappedRemoteBatteryLevel: OngoingCallContextState.RemoteBatteryLevel
switch remoteBatteryLevel {
case .normal:
mappedRemoteBatteryLevel = .normal
case .low:
mappedRemoteBatteryLevel = .low
@unknown default:
mappedRemoteBatteryLevel = .normal
}
if case .active = mappedVideoState, !strongSelf.didReportCallAsVideo {
strongSelf.didReportCallAsVideo = true
callSessionManager?.updateCallType(internalId: internalId, type: .video)
}
strongSelf.contextState.set(.single(OngoingCallContextState(state: mappedState, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState)))
strongSelf.contextState.set(.single(OngoingCallContextState(state: mappedState, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)))
}
}
strongSelf.receptionPromise.set(.single(4))
@ -655,7 +670,7 @@ public final class OngoingCallContext {
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { state in
self?.contextState.set(.single(OngoingCallContextState(state: OngoingCallContextState.State(state), videoState: .notAvailable, remoteVideoState: .inactive, remoteAudioState: .active)))
self?.contextState.set(.single(OngoingCallContextState(state: OngoingCallContextState.State(state), videoState: .notAvailable, remoteVideoState: .inactive, remoteAudioState: .active, remoteBatteryLevel: .normal)))
}
context.signalBarsChanged = { signalBars in
self?.receptionPromise.set(.single(signalBars))

View File

@ -74,6 +74,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSaving) {
- (NSData * _Nonnull)getDerivedState;
- (void)setIsMuted:(bool)isMuted;
- (void)setIsLowBatteryLevel:(bool)isLowBatteryLevel;
- (void)setNetworkType:(OngoingCallNetworkType)networkType;
@end

View File

@ -419,6 +419,12 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)setIsLowBatteryLevel:(bool)isLowBatteryLevel {
if (_tgVoip) {
_tgVoip->setIsLowBatteryLevel(isLowBatteryLevel);
}
}
- (void)setNetworkType:(OngoingCallNetworkType)networkType {
if (_networkType != networkType) {
_networkType = networkType;

View File

@ -48,6 +48,11 @@ typedef NS_ENUM(int32_t, OngoingCallRemoteAudioStateWebrtc) {
OngoingCallRemoteAudioStateActive,
};
typedef NS_ENUM(int32_t, OngoingCallRemoteBatteryLevelWebrtc) {
OngoingCallRemoteBatteryLevelNormal,
OngoingCallRemoteBatteryLevelLow
};
typedef NS_ENUM(int32_t, OngoingCallVideoOrientationWebrtc) {
OngoingCallVideoOrientation0,
OngoingCallVideoOrientation90,
@ -115,7 +120,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
+ (int32_t)maxLayer;
+ (NSArray<NSString *> * _Nonnull)versionsWithIncludeReference:(bool)includeReference;
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc, OngoingCallRemoteAudioStateWebrtc, float);
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc, OngoingCallRemoteAudioStateWebrtc, OngoingCallRemoteBatteryLevelWebrtc, float);
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio enableHighBitrateVideoCalls:(bool)enableHighBitrateVideoCalls;

View File

@ -15,7 +15,7 @@
#import "platform/darwin/VideoMetalViewMac.h"
#define GLVideoView VideoMetalView
#define UIViewContentModeScaleAspectFill kCAGravityResizeAspectFill
#define UIViewContentModeScaleAspect kCAGravityResizeAspect
#define UIViewContentModeScaleAspectFit kCAGravityResizeAspect
#else
#import "platform/darwin/VideoMetalView.h"
@ -212,6 +212,7 @@
OngoingCallStateWebrtc _state;
OngoingCallVideoStateWebrtc _videoState;
bool _connectedOnce;
OngoingCallRemoteBatteryLevelWebrtc _remoteBatteryLevel;
OngoingCallRemoteVideoStateWebrtc _remoteVideoState;
OngoingCallRemoteAudioStateWebrtc _remoteAudioState;
OngoingCallVideoOrientationWebrtc _remoteVideoOrientation;
@ -460,7 +461,26 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
strongSelf->_remoteVideoState = remoteVideoState;
strongSelf->_remoteAudioState = remoteAudioState;
if (strongSelf->_stateChanged) {
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remotePreferredAspectRatio);
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
}
}
}
}];
},
.remoteBatteryLevelIsLowUpdated = [weakSelf, queue](bool isLow) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
OngoingCallRemoteBatteryLevelWebrtc remoteBatteryLevel;
if (isLow) {
remoteBatteryLevel = OngoingCallRemoteBatteryLevelLow;
} else {
remoteBatteryLevel = OngoingCallRemoteBatteryLevelNormal;
}
if (strongSelf->_remoteBatteryLevel != remoteBatteryLevel) {
strongSelf->_remoteBatteryLevel = remoteBatteryLevel;
if (strongSelf->_stateChanged) {
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
}
}
}
@ -472,7 +492,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
if (strongSelf) {
strongSelf->_remotePreferredAspectRatio = value;
if (strongSelf->_stateChanged) {
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remotePreferredAspectRatio);
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
}
}
}];
@ -583,7 +603,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
_state = callState;
if (_stateChanged) {
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remotePreferredAspectRatio);
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
}
}
}
@ -674,7 +694,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
_videoState = OngoingCallVideoStateActive;
if (_stateChanged) {
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remotePreferredAspectRatio);
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
}
}
}
@ -686,7 +706,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
_videoState = OngoingCallVideoStateInactive;
if (_stateChanged) {
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remotePreferredAspectRatio);
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
}
}
}

@ -1 +1 @@
Subproject commit 1825e17e07014a1dce1778c63aec4fb35d1ce3a5
Subproject commit b11a508237ee8db555a1ddb98b58a7bb54f8656e

View File

@ -25,6 +25,7 @@ public enum TooltipActiveTextAction {
}
private final class TooltipScreenNode: ViewControllerTracingNode {
private let tooltipStyle: TooltipScreen.Style
private let icon: TooltipScreen.Icon?
private let location: TooltipScreen.Location
private let displayDuration: TooltipScreen.DisplayDuration
@ -33,10 +34,12 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
private let scrollingContainer: ASDisplayNode
private let containerNode: ASDisplayNode
private let backgroundContainerNode: ASDisplayNode
private let backgroundNode: ASImageNode
private var effectView: UIView?
private let arrowNode: ASImageNode
private let arrowContainer: ASDisplayNode
private var arrowEffectView: UIView?
private let animatedStickerNode: AnimatedStickerNode
private let textNode: ImmediateTextNode
@ -44,7 +47,8 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
private var validLayout: ContainerViewLayout?
init(text: String, textEntities: [MessageTextEntity], icon: TooltipScreen.Icon?, location: TooltipScreen.Location, displayDuration: TooltipScreen.DisplayDuration, shouldDismissOnTouch: @escaping (CGPoint) -> TooltipScreen.DismissOnTouch, requestDismiss: @escaping () -> Void, openActiveTextItem: @escaping (TooltipActiveTextItem, TooltipActiveTextAction) -> Void) {
init(text: String, textEntities: [MessageTextEntity], style: TooltipScreen.Style, icon: TooltipScreen.Icon?, location: TooltipScreen.Location, displayDuration: TooltipScreen.DisplayDuration, shouldDismissOnTouch: @escaping (CGPoint) -> TooltipScreen.DismissOnTouch, requestDismiss: @escaping () -> Void, openActiveTextItem: @escaping (TooltipActiveTextItem, TooltipActiveTextAction) -> Void) {
self.tooltipStyle = style
self.icon = icon
self.location = location
self.displayDuration = displayDuration
@ -52,6 +56,7 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
self.requestDismiss = requestDismiss
self.containerNode = ASDisplayNode()
self.backgroundContainerNode = ASDisplayNode()
let fillColor = UIColor(white: 0.0, alpha: 0.8)
@ -59,14 +64,43 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
self.backgroundNode = ASImageNode()
self.backgroundNode.image = generateAdjustedStretchableFilledCircleImage(diameter: 15.0, color: fillColor)
if case .top = location {
self.effectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
self.containerNode.clipsToBounds = true
self.containerNode.cornerRadius = 9.0
func svgPath(_ path: StaticString, scale: CGPoint = CGPoint(x: 1.0, y: 1.0), offset: CGPoint = CGPoint()) throws -> UIBezierPath {
var index: UnsafePointer<UInt8> = path.utf8Start
let end = path.utf8Start.advanced(by: path.utf8CodeUnitCount)
let path = UIBezierPath()
while index < end {
let c = index.pointee
index = index.successor()
if c == 77 { // M
let x = try readCGFloat(&index, end: end, separator: 44) * scale.x + offset.x
let y = try readCGFloat(&index, end: end, separator: 32) * scale.y + offset.y
path.move(to: CGPoint(x: x, y: y))
} else if c == 76 { // L
let x = try readCGFloat(&index, end: end, separator: 44) * scale.x + offset.x
let y = try readCGFloat(&index, end: end, separator: 32) * scale.y + offset.y
path.addLine(to: CGPoint(x: x, y: y))
} else if c == 67 { // C
let x1 = try readCGFloat(&index, end: end, separator: 44) * scale.x + offset.x
let y1 = try readCGFloat(&index, end: end, separator: 32) * scale.y + offset.y
let x2 = try readCGFloat(&index, end: end, separator: 44) * scale.x + offset.x
let y2 = try readCGFloat(&index, end: end, separator: 32) * scale.y + offset.y
let x = try readCGFloat(&index, end: end, separator: 44) * scale.x + offset.x
let y = try readCGFloat(&index, end: end, separator: 32) * scale.y + offset.y
path.addCurve(to: CGPoint(x: x, y: y), controlPoint1: CGPoint(x: x1, y: y1), controlPoint2: CGPoint(x: x2, y: y2))
} else if c == 32 { // space
continue
}
}
path.close()
return path
}
self.arrowNode = ASImageNode()
let arrowSize = CGSize(width: 29.0, height: 10.0)
self.arrowNode = ASImageNode()
self.arrowNode.image = generateImage(arrowSize, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setFillColor(fillColor.cgColor)
@ -77,11 +111,42 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
self.arrowContainer = ASDisplayNode()
let fontSize: CGFloat
if style == .light {
self.effectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
self.backgroundContainerNode.clipsToBounds = true
self.backgroundContainerNode.cornerRadius = 14.0
if #available(iOS 13.0, *) {
self.backgroundContainerNode.layer.cornerCurve = .continuous
}
fontSize = 17.0
self.arrowEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
self.arrowContainer.view.addSubview(self.arrowEffectView!)
let maskLayer = CAShapeLayer()
if let path = try? svgPath("M85.882251,0 C79.5170552,0 73.4125613,2.52817247 68.9116882,7.02834833 L51.4264069,24.5109211 C46.7401154,29.1964866 39.1421356,29.1964866 34.4558441,24.5109211 L16.9705627,7.02834833 C12.4696897,2.52817247 6.36519576,0 0,0 L85.882251,0 ", scale: CGPoint(x: 0.333333, y: 0.333333), offset: CGPoint()) {
maskLayer.path = path.cgPath
}
maskLayer.frame = CGRect(origin: CGPoint(), size: arrowSize)
self.arrowContainer.layer.mask = maskLayer
} else if case .top = location {
self.effectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
self.containerNode.clipsToBounds = true
self.containerNode.cornerRadius = 9.0
if #available(iOS 13.0, *) {
self.containerNode.layer.cornerCurve = .continuous
}
fontSize = 14.0
} else {
fontSize = 14.0
}
self.textNode = ImmediateTextNode()
self.textNode.displaysAsynchronously = false
self.textNode.maximumNumberOfLines = 0
self.textNode.attributedText = stringWithAppliedEntities(text, entities: textEntities, baseColor: .white, linkColor: .white, baseFont: Font.regular(14.0), linkFont: Font.regular(14.0), boldFont: Font.semibold(14.0), italicFont: Font.italic(14.0), boldItalicFont: Font.semiboldItalic(14.0), fixedFont: Font.monospace(14.0), blockQuoteFont: Font.regular(14.0), underlineLinks: true, external: false)
self.textNode.attributedText = stringWithAppliedEntities(text, entities: textEntities, baseColor: .white, linkColor: .white, baseFont: Font.regular(fontSize), linkFont: Font.regular(fontSize), boldFont: Font.semibold(14.0), italicFont: Font.italic(fontSize), boldItalicFont: Font.semiboldItalic(fontSize), fixedFont: Font.monospace(fontSize), blockQuoteFont: Font.regular(fontSize), underlineLinks: true, external: false)
self.animatedStickerNode = AnimatedStickerNode()
switch icon {
@ -101,12 +166,17 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
super.init()
self.containerNode.addSubnode(self.backgroundContainerNode)
self.arrowContainer.addSubnode(self.arrowNode)
self.backgroundNode.addSubnode(self.arrowContainer)
if let effectView = self.effectView {
self.containerNode.view.addSubview(effectView)
self.backgroundContainerNode.view.addSubview(effectView)
if let _ = self.arrowEffectView {
self.containerNode.addSubnode(self.arrowContainer)
self.arrowNode.removeFromSupernode()
}
} else {
self.containerNode.addSubnode(self.backgroundNode)
self.backgroundContainerNode.addSubnode(self.backgroundNode)
}
self.containerNode.addSubnode(self.textNode)
self.containerNode.addSubnode(self.animatedStickerNode)
@ -207,8 +277,14 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
var backgroundFrame: CGRect
let backgroundHeight = max(animationSize.height, textSize.height) + contentVerticalInset * 2.0
let backgroundHeight: CGFloat
switch self.tooltipStyle {
case .default:
backgroundHeight = max(animationSize.height, textSize.height) + contentVerticalInset * 2.0
case .light:
backgroundHeight = max(28.0, max(animationSize.height, textSize.height) + 4.0 * 2.0)
}
var invertArrow = false
switch self.location {
case let .point(rect):
@ -231,6 +307,7 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
}
transition.updateFrame(node: self.containerNode, frame: backgroundFrame)
transition.updateFrame(node: self.backgroundContainerNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
transition.updateFrame(node: self.backgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
if let effectView = self.effectView {
transition.updateFrame(view: effectView, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
@ -252,8 +329,10 @@ private final class TooltipScreenNode: ViewControllerTracingNode {
ContainedViewLayoutTransition.immediate.updateTransformScale(node: self.arrowContainer, scale: CGPoint(x: 1.0, y: invertArrow ? -1.0 : 1.0))
self.arrowNode.frame = CGRect(origin: CGPoint(), size: arrowFrame.size)
self.arrowEffectView?.frame = CGRect(origin: CGPoint(), size: arrowFrame.size)
} else {
self.arrowNode.isHidden = true
self.arrowEffectView?.isHidden = true
}
transition.updateFrame(node: self.textNode, frame: CGRect(origin: CGPoint(x: contentInset + animationSize.width + animationSpacing, y: floor((backgroundHeight - textSize.height) / 2.0)), size: textSize))
@ -373,8 +452,14 @@ public final class TooltipScreen: ViewController {
case custom(Double)
}
public enum Style {
case `default`
case light
}
public let text: String
public let textEntities: [MessageTextEntity]
private let style: TooltipScreen.Style
private let icon: TooltipScreen.Icon?
private let location: TooltipScreen.Location
private let displayDuration: DisplayDuration
@ -393,9 +478,10 @@ public final class TooltipScreen: ViewController {
private var dismissTimer: Foundation.Timer?
public init(text: String, textEntities: [MessageTextEntity] = [], icon: TooltipScreen.Icon?, location: TooltipScreen.Location, displayDuration: DisplayDuration = .default, shouldDismissOnTouch: @escaping (CGPoint) -> TooltipScreen.DismissOnTouch, openActiveTextItem: @escaping (TooltipActiveTextItem, TooltipActiveTextAction) -> Void = { _, _ in }) {
public init(text: String, textEntities: [MessageTextEntity] = [], style: TooltipScreen.Style = .default, icon: TooltipScreen.Icon?, location: TooltipScreen.Location, displayDuration: DisplayDuration = .default, shouldDismissOnTouch: @escaping (CGPoint) -> TooltipScreen.DismissOnTouch, openActiveTextItem: @escaping (TooltipActiveTextItem, TooltipActiveTextAction) -> Void = { _, _ in }) {
self.text = text
self.textEntities = textEntities
self.style = style
self.icon = icon
self.location = location
self.displayDuration = displayDuration
@ -455,7 +541,7 @@ public final class TooltipScreen: ViewController {
}
override public func loadDisplayNode() {
self.displayNode = TooltipScreenNode(text: self.text, textEntities: self.textEntities, icon: self.icon, location: self.location, displayDuration: self.displayDuration, shouldDismissOnTouch: self.shouldDismissOnTouch, requestDismiss: { [weak self] in
self.displayNode = TooltipScreenNode(text: self.text, textEntities: self.textEntities, style: self.style, icon: self.icon, location: self.location, displayDuration: self.displayDuration, shouldDismissOnTouch: self.shouldDismissOnTouch, requestDismiss: { [weak self] in
guard let strongSelf = self else {
return
}