mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
2b9d62a4cf
@ -414,6 +414,7 @@ official_apple_pay_merchants = [
|
||||
"merchant.psbank.test.telegramios",
|
||||
"merchant.psbank.prod.telegramios",
|
||||
#"merchant.org.telegram.billinenet.test",
|
||||
#"merchant.org.telegram.billinenet.prod",
|
||||
#"merchant.org.telegram.portmone.test",
|
||||
]
|
||||
|
||||
|
@ -8269,8 +8269,8 @@ Sorry for the inconvenience.";
|
||||
|
||||
"Undo.DeletedTopic" = "Topic Deleted";
|
||||
|
||||
"ChatList.MaxThreadPinsFinalText_1" = "Sorry, you can't pin more than **%@** thread to the top. Unpin some that are currently pinned.";
|
||||
"ChatList.MaxThreadPinsFinalText_any" = "Sorry, you can't pin more than **%@** threads to the top. Unpin some that are currently pinned.";
|
||||
"ChatList.MaxThreadPinsFinalText_1" = "Sorry, you can't pin more than %@ topics to the top. Unpin some that are currently pinned.";
|
||||
"ChatList.MaxThreadPinsFinalText_any" = "Sorry, you can't pin more than %@ topics to the top. Unpin some that are currently pinned.";
|
||||
|
||||
"EmojiSearch.SearchTopicIconsPlaceholder" = "Search Topic Icons";
|
||||
"EmojiSearch.SearchTopicIconsEmptyResult" = "No emoji found";
|
||||
@ -8301,8 +8301,8 @@ Sorry for the inconvenience.";
|
||||
|
||||
"Notification.ForumTopicHidden" = "Topic hidden";
|
||||
"Notification.ForumTopicUnhidden" = "Topic unhidden";
|
||||
"Notification.ForumTopicHiddenAuthor" = "%1$@ hid topic";
|
||||
"Notification.ForumTopicUnhiddenAuthor" = "%1$@ unhid topic";
|
||||
"Notification.ForumTopicHiddenAuthor" = "%1$@ hid the topic";
|
||||
"Notification.ForumTopicUnhiddenAuthor" = "%1$@ unhid the topic";
|
||||
"Notification.OverviewTopicHidden" = "%1$@ hid %2$@ %3$@";
|
||||
"Notification.OverviewTopicUnhidden" = "%1$@ unhid %2$@ %3$@";
|
||||
|
||||
|
@ -865,6 +865,8 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
|
||||
|
||||
if let layout = self.validLayout {
|
||||
self.tabContainerNode.update(size: CGSize(width: layout.size.width, height: 46.0), sideInset: layout.safeInsets.left, filters: self.tabContainerData?.0 ?? [], selectedFilter: self.chatListDisplayNode.effectiveContainerNode.currentItemFilter, isReordering: self.chatListDisplayNode.isReorderingFilters || (self.chatListDisplayNode.effectiveContainerNode.currentItemNode.currentState.editing && !self.chatListDisplayNode.didBeginSelectingChatsWhileEditing), isEditing: self.chatListDisplayNode.effectiveContainerNode.currentItemNode.currentState.editing, canReorderAllChats: self.isPremium, filtersLimit: self.tabContainerData?.2, transitionFraction: self.chatListDisplayNode.effectiveContainerNode.transitionFraction, presentationData: self.presentationData, transition: .immediate)
|
||||
|
||||
self.requestUpdateHeaderContent(transition: .immediate)
|
||||
}
|
||||
|
||||
if self.isNodeLoaded {
|
||||
|
@ -721,7 +721,7 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
||||
})
|
||||
case .index:
|
||||
var headerType: ChatListSearchItemHeaderType = .messages(location: nil)
|
||||
if case .forum = location, let peer = peer.peer {
|
||||
if case let .forum(peerId) = location, let peer = peer.peer, peer.id == peerId {
|
||||
headerType = .messages(location: peer.compactDisplayTitle)
|
||||
}
|
||||
header = ChatListSearchItemHeader(type: headerType, theme: presentationData.theme, strings: presentationData.strings, actionTitle: nil, action: nil)
|
||||
@ -738,15 +738,26 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
||||
} else {
|
||||
let index: EngineChatList.Item.Index
|
||||
var chatThreadInfo: ChatListItemContent.ThreadInfo?
|
||||
chatThreadInfo = nil
|
||||
var displayAsMessage = false
|
||||
switch location {
|
||||
case .chatList:
|
||||
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||
case .forum:
|
||||
if let threadId = message.threadId, let threadInfo = threadInfo {
|
||||
chatThreadInfo = ChatListItemContent.ThreadInfo(id: threadId, info: threadInfo, isOwnedByMe: false, isClosed: false, isHidden: false)
|
||||
index = .forum(pinnedIndex: .none, timestamp: message.index.timestamp, threadId: threadId, namespace: message.index.id.namespace, id: message.index.id.id)
|
||||
case let .forum(peerId):
|
||||
let _ = peerId
|
||||
let _ = threadInfo
|
||||
|
||||
displayAsMessage = true
|
||||
|
||||
if message.id.peerId == peerId {
|
||||
if let threadId = message.threadId, let threadInfo = threadInfo {
|
||||
chatThreadInfo = ChatListItemContent.ThreadInfo(id: threadId, info: threadInfo, isOwnedByMe: false, isClosed: false, isHidden: false)
|
||||
index = .forum(pinnedIndex: .none, timestamp: message.index.timestamp, threadId: threadId, namespace: message.index.id.namespace, id: message.index.id.id)
|
||||
} else {
|
||||
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||
}
|
||||
} else {
|
||||
index = .chatList( EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||
}
|
||||
}
|
||||
return ChatListItem(presentationData: presentationData, context: context, chatListLocation: location, filterData: nil, index: index, content: .peer(ChatListItemContent.PeerData(
|
||||
@ -762,7 +773,7 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
||||
inputActivities: nil,
|
||||
promoInfo: nil,
|
||||
ignoreUnreadBadge: true,
|
||||
displayAsMessage: false,
|
||||
displayAsMessage: displayAsMessage,
|
||||
hasFailedMessages: false,
|
||||
forumTopicData: nil,
|
||||
topForumTopicItems: [],
|
||||
@ -1217,7 +1228,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
}
|
||||
let previousRecentlySearchedPeersState = Atomic<SearchedPeersState?>(value: nil)
|
||||
|
||||
let foundItems = combineLatest(queue: .mainQueue(), searchQuery, searchOptions, downloadItems)
|
||||
let foundItems: Signal<([ChatListSearchEntry], Bool)?, NoError> = combineLatest(queue: .mainQueue(), searchQuery, searchOptions, downloadItems)
|
||||
|> mapToSignal { [weak self] query, options, downloadItems -> Signal<([ChatListSearchEntry], Bool)?, NoError> in
|
||||
if query == nil && options == nil && [.chats, .topics].contains(key) {
|
||||
let _ = currentRemotePeers.swap(nil)
|
||||
@ -1464,55 +1475,113 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
updateSearchContexts { _ in
|
||||
return ([:], true)
|
||||
}
|
||||
let foundRemoteMessages: Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError>
|
||||
|
||||
struct FoundRemoteMessages {
|
||||
var messages: [EngineMessage]
|
||||
var readCounters: [EnginePeer.Id: EnginePeerReadCounters]
|
||||
var threadsData: [EngineMessage.Id: MessageHistoryThreadData]
|
||||
var totalCount: Int32
|
||||
|
||||
init(messages: [EngineMessage], readCounters: [EnginePeer.Id: EnginePeerReadCounters], threadsData: [EngineMessage.Id: MessageHistoryThreadData], totalCount: Int32) {
|
||||
self.messages = messages
|
||||
self.readCounters = readCounters
|
||||
self.threadsData = threadsData
|
||||
self.totalCount = totalCount
|
||||
}
|
||||
}
|
||||
|
||||
let foundRemoteMessages: Signal<([FoundRemoteMessages], Bool), NoError>
|
||||
if peersFilter.contains(.doNotSearchMessages) {
|
||||
foundRemoteMessages = .single((([], [:], [:], 0), false))
|
||||
foundRemoteMessages = .single(([FoundRemoteMessages(messages: [], readCounters: [:], threadsData: [:], totalCount: 0)], false))
|
||||
} else {
|
||||
if !finalQuery.isEmpty {
|
||||
addAppLogEvent(postbox: context.account.postbox, type: "search_global_query")
|
||||
}
|
||||
|
||||
let searchSignals: [Signal<(SearchMessagesResult, SearchMessagesState), NoError>] = searchLocations.map { searchLocation in
|
||||
return context.engine.messages.searchMessages(location: searchLocation, query: finalQuery, state: nil, limit: 50)
|
||||
let limit: Int32
|
||||
#if DEBUG
|
||||
limit = 50
|
||||
#else
|
||||
limit = 50
|
||||
#endif
|
||||
return context.engine.messages.searchMessages(location: searchLocation, query: finalQuery, state: nil, limit: limit)
|
||||
}
|
||||
|
||||
let searchSignal = combineLatest(searchSignals)
|
||||
|> map { results -> ChatListSearchMessagesResult in
|
||||
let (result, updatedState) = results[0]
|
||||
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
||||
|> map { results -> [ChatListSearchMessagesResult] in
|
||||
var mappedResults: [ChatListSearchMessagesResult] = []
|
||||
for resultData in results {
|
||||
let (result, updatedState) = resultData
|
||||
|
||||
mappedResults.append(ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState))
|
||||
}
|
||||
return mappedResults
|
||||
}
|
||||
|
||||
let loadMore = searchContexts.get()
|
||||
|> mapToSignal { searchContexts -> Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError> in
|
||||
if let searchContext = searchContexts[0], searchContext.result.hasMore {
|
||||
if let _ = searchContext.loadMoreIndex {
|
||||
return context.engine.messages.searchMessages(location: searchLocations[0], query: finalQuery, state: searchContext.result.state, limit: 80)
|
||||
|> map { result, updatedState -> ChatListSearchMessagesResult in
|
||||
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
||||
}
|
||||
|> mapToSignal { foundMessages -> Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError> in
|
||||
updateSearchContexts { previous in
|
||||
let updated = ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)
|
||||
return ([0: updated], true)
|
||||
|> mapToSignal { searchContexts -> Signal<([FoundRemoteMessages], Bool), NoError> in
|
||||
for i in 0 ..< 2 {
|
||||
if let searchContext = searchContexts[i], searchContext.result.hasMore {
|
||||
var restResults: [Int: FoundRemoteMessages] = [:]
|
||||
for j in 0 ..< 2 {
|
||||
if j != i {
|
||||
if let otherContext = searchContexts[j] {
|
||||
restResults[j] = FoundRemoteMessages(messages: otherContext.result.messages, readCounters: otherContext.result.readStates, threadsData: otherContext.result.threadInfo, totalCount: otherContext.result.totalCount)
|
||||
}
|
||||
}
|
||||
return .complete()
|
||||
}
|
||||
} else {
|
||||
return .single(((searchContext.result.messages, searchContext.result.readStates, searchContext.result.threadInfo, searchContext.result.totalCount), false))
|
||||
if let _ = searchContext.loadMoreIndex {
|
||||
return context.engine.messages.searchMessages(location: searchLocations[i], query: finalQuery, state: searchContext.result.state, limit: 80)
|
||||
|> map { result, updatedState -> ChatListSearchMessagesResult in
|
||||
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
||||
}
|
||||
|> mapToSignal { foundMessages -> Signal<([FoundRemoteMessages], Bool), NoError> in
|
||||
updateSearchContexts { previous in
|
||||
let updated = ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)
|
||||
var previous = previous
|
||||
previous[i] = updated
|
||||
return (previous, true)
|
||||
}
|
||||
return .complete()
|
||||
}
|
||||
} else {
|
||||
var currentResults: [FoundRemoteMessages] = []
|
||||
for i in 0 ..< 2 {
|
||||
if let currentContext = searchContexts[i] {
|
||||
currentResults.append(FoundRemoteMessages(messages: currentContext.result.messages, readCounters: currentContext.result.readStates, threadsData: currentContext.result.threadInfo, totalCount: currentContext.result.totalCount))
|
||||
if currentContext.result.hasMore {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return .single((currentResults, false))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return .complete()
|
||||
}
|
||||
|
||||
return .complete()
|
||||
}
|
||||
|
||||
foundRemoteMessages = .single((([], [:], [:], 0), true))
|
||||
foundRemoteMessages = .single(([FoundRemoteMessages(messages: [], readCounters: [:], threadsData: [:], totalCount: 0)], true))
|
||||
|> then(
|
||||
searchSignal
|
||||
|> map { foundMessages -> (([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool) in
|
||||
|> map { foundMessages -> ([FoundRemoteMessages], Bool) in
|
||||
updateSearchContexts { _ in
|
||||
return ([0: ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)], true)
|
||||
var resultContexts: [Int: ChatListSearchMessagesContext] = [:]
|
||||
for i in 0 ..< foundMessages.count {
|
||||
resultContexts[i] = ChatListSearchMessagesContext(result: foundMessages[i], loadMoreIndex: nil)
|
||||
}
|
||||
return (resultContexts, true)
|
||||
}
|
||||
return ((foundMessages.messages, foundMessages.readStates, foundMessages.threadInfo, foundMessages.totalCount), false)
|
||||
var result: [FoundRemoteMessages] = []
|
||||
for i in 0 ..< foundMessages.count {
|
||||
result.append(FoundRemoteMessages(messages: foundMessages[i].messages, readCounters: foundMessages[i].readStates, threadsData: foundMessages[i].threadInfo, totalCount: foundMessages[i].totalCount))
|
||||
if foundMessages[i].hasMore {
|
||||
break
|
||||
}
|
||||
}
|
||||
return (result, false)
|
||||
}
|
||||
|> delay(0.2, queue: Queue.concurrentDefaultQueue())
|
||||
|> then(loadMore)
|
||||
@ -1766,25 +1835,33 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
var firstHeaderId: Int64?
|
||||
if !foundRemotePeers.2 {
|
||||
index = 0
|
||||
for message in foundRemoteMessages.0.0 {
|
||||
if searchState.deletedMessageIds.contains(message.id) {
|
||||
continue
|
||||
} else if message.id.namespace == Namespaces.Message.Cloud && searchState.deletedGlobalMessageIds.contains(message.id.id) {
|
||||
continue
|
||||
}
|
||||
let headerId = listMessageDateHeaderId(timestamp: message.timestamp)
|
||||
if firstHeaderId == nil {
|
||||
firstHeaderId = headerId
|
||||
}
|
||||
var peer = EngineRenderedPeer(message: message)
|
||||
if let group = message.peers[message.id.peerId] as? TelegramGroup, let migrationReference = group.migrationReference {
|
||||
if let channelPeer = message.peers[migrationReference.peerId] {
|
||||
peer = EngineRenderedPeer(peer: EnginePeer(channelPeer))
|
||||
var existingMessageIds = Set<MessageId>()
|
||||
for foundRemoteMessageSet in foundRemoteMessages.0 {
|
||||
for message in foundRemoteMessageSet.messages {
|
||||
if existingMessageIds.contains(message.id) {
|
||||
continue
|
||||
}
|
||||
existingMessageIds.insert(message.id)
|
||||
|
||||
if searchState.deletedMessageIds.contains(message.id) {
|
||||
continue
|
||||
} else if message.id.namespace == Namespaces.Message.Cloud && searchState.deletedGlobalMessageIds.contains(message.id.id) {
|
||||
continue
|
||||
}
|
||||
let headerId = listMessageDateHeaderId(timestamp: message.timestamp)
|
||||
if firstHeaderId == nil {
|
||||
firstHeaderId = headerId
|
||||
}
|
||||
var peer = EngineRenderedPeer(message: message)
|
||||
if let group = message.peers[message.id.peerId] as? TelegramGroup, let migrationReference = group.migrationReference {
|
||||
if let channelPeer = message.peers[migrationReference.peerId] {
|
||||
peer = EngineRenderedPeer(peer: EnginePeer(channelPeer))
|
||||
}
|
||||
}
|
||||
|
||||
entries.append(.message(message, peer, foundRemoteMessageSet.readCounters[message.id.peerId], foundRemoteMessageSet.threadsData[message.id]?.info, presentationData, foundRemoteMessageSet.totalCount, selectionState?.contains(message.id), headerId == firstHeaderId, .index(message.index), nil, .generic, false))
|
||||
index += 1
|
||||
}
|
||||
|
||||
entries.append(.message(message, peer, foundRemoteMessages.0.1[message.id.peerId], foundRemoteMessages.0.2[message.id]?.info, presentationData, foundRemoteMessages.0.3, selectionState?.contains(message.id), headerId == firstHeaderId, .index(message.index), nil, .generic, false))
|
||||
index += 1
|
||||
}
|
||||
}
|
||||
|
||||
@ -1807,16 +1884,25 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
|
||||
let loadMore = {
|
||||
updateSearchContexts { previousMap in
|
||||
guard let previous = previousMap[0] else {
|
||||
return ([:], false)
|
||||
var updatedMap = previousMap
|
||||
var isSearching = false
|
||||
for i in 0 ..< 2 {
|
||||
if let previous = updatedMap[i] {
|
||||
if previous.loadMoreIndex != nil {
|
||||
continue
|
||||
}
|
||||
guard let last = previous.result.messages.last else {
|
||||
continue
|
||||
}
|
||||
updatedMap[i] = ChatListSearchMessagesContext(result: previous.result, loadMoreIndex: last.index)
|
||||
isSearching = true
|
||||
|
||||
if previous.result.hasMore {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if previous.loadMoreIndex != nil {
|
||||
return ([0: previous], false)
|
||||
}
|
||||
guard let last = previous.result.messages.last else {
|
||||
return ([0: previous], false)
|
||||
}
|
||||
return ([0: ChatListSearchMessagesContext(result: previous.result, loadMoreIndex: last.index)], true)
|
||||
return (updatedMap, isSearching)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1919,6 +2005,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
self.listNode.clearHighlightAnimated(true)
|
||||
})
|
||||
})
|
||||
chatListInteraction.isSearchMode = true
|
||||
|
||||
let listInteraction = ListMessageItemInteraction(openMessage: { [weak self] message, mode -> Bool in
|
||||
guard let strongSelf = self else {
|
||||
@ -2023,25 +2110,25 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
})
|
||||
|
||||
self.searchDisposable.set((foundItems
|
||||
|> deliverOnMainQueue).start(next: { [weak self] entriesAndFlags in
|
||||
|> deliverOnMainQueue).start(next: { [weak self] foundItems in
|
||||
if let strongSelf = self {
|
||||
let previousSelectedMessageIds = previousSelectedMessages.swap(strongSelf.selectedMessages)
|
||||
|
||||
let isSearching = entriesAndFlags?.1 ?? false
|
||||
var entriesAndFlags = foundItems?.0
|
||||
|
||||
let isSearching = foundItems?.1 ?? false
|
||||
strongSelf._isSearching.set(isSearching)
|
||||
|
||||
if strongSelf.tagMask == .photoOrVideo {
|
||||
var entries: [ChatListSearchEntry]? = entriesAndFlags?.0 ?? []
|
||||
var entries: [ChatListSearchEntry]? = entriesAndFlags ?? []
|
||||
if isSearching && (entries?.isEmpty ?? true) {
|
||||
entries = nil
|
||||
}
|
||||
strongSelf.mediaNode.updateHistory(entries: entries, totalCount: 0, updateType: .Initial)
|
||||
}
|
||||
|
||||
var entriesAndFlags = entriesAndFlags
|
||||
|
||||
var peers: [EnginePeer] = []
|
||||
if let entries = entriesAndFlags?.0 {
|
||||
if let entries = entriesAndFlags {
|
||||
var filteredEntries: [ChatListSearchEntry] = []
|
||||
for entry in entries {
|
||||
if case let .localPeer(peer, _, _, _, _, _, _, _, _) = entry {
|
||||
@ -2053,16 +2140,16 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
}
|
||||
|
||||
if strongSelf.tagMask != nil || strongSelf.searchOptionsValue?.date != nil || strongSelf.searchOptionsValue?.peer != nil {
|
||||
entriesAndFlags?.0 = filteredEntries
|
||||
entriesAndFlags = filteredEntries
|
||||
}
|
||||
}
|
||||
|
||||
let previousEntries = previousSearchItems.swap(entriesAndFlags?.0)
|
||||
let newEntries = entriesAndFlags?.0 ?? []
|
||||
let previousEntries = previousSearchItems.swap(entriesAndFlags)
|
||||
let newEntries = entriesAndFlags ?? []
|
||||
|
||||
let animated = (previousSelectedMessageIds == nil) != (strongSelf.selectedMessages == nil)
|
||||
let firstTime = previousEntries == nil
|
||||
var transition = chatListSearchContainerPreparedTransition(from: previousEntries ?? [], to: newEntries, displayingResults: entriesAndFlags?.0 != nil, isEmpty: !isSearching && (entriesAndFlags?.0.isEmpty ?? false), isLoading: isSearching, animated: animated, context: context, presentationData: strongSelf.presentationData, enableHeaders: true, filter: peersFilter, location: location, key: strongSelf.key, tagMask: tagMask, interaction: chatListInteraction, listInteraction: listInteraction, peerContextAction: { message, node, rect, gesture, location in
|
||||
var transition = chatListSearchContainerPreparedTransition(from: previousEntries ?? [], to: newEntries, displayingResults: entriesAndFlags != nil, isEmpty: !isSearching && (entriesAndFlags?.isEmpty ?? false), isLoading: isSearching, animated: animated, context: context, presentationData: strongSelf.presentationData, enableHeaders: true, filter: peersFilter, location: location, key: strongSelf.key, tagMask: tagMask, interaction: chatListInteraction, listInteraction: listInteraction, peerContextAction: { message, node, rect, gesture, location in
|
||||
interaction.peerContextAction?(message, node, rect, gesture, location)
|
||||
}, toggleExpandLocalResults: {
|
||||
guard let strongSelf = self else {
|
||||
@ -3115,8 +3202,9 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
|
||||
}, present: { _ in }, openForumThread: { _, _ in })
|
||||
var isInlineMode = false
|
||||
if case .topics = key {
|
||||
isInlineMode = true
|
||||
isInlineMode = false
|
||||
}
|
||||
interaction.isSearchMode = true
|
||||
interaction.isInlineMode = isInlineMode
|
||||
|
||||
let items = (0 ..< 2).compactMap { _ -> ListViewItem? in
|
||||
|
@ -1590,6 +1590,15 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
}
|
||||
}
|
||||
|
||||
let useChatListLayout: Bool
|
||||
if case .chatList = item.chatListLocation {
|
||||
useChatListLayout = true
|
||||
} else if displayAsMessage {
|
||||
useChatListLayout = true
|
||||
} else {
|
||||
useChatListLayout = false
|
||||
}
|
||||
|
||||
let theme = item.presentationData.theme.chatList
|
||||
|
||||
var updatedTheme: PresentationTheme?
|
||||
@ -1653,7 +1662,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
let avatarLeftInset: CGFloat
|
||||
if item.interaction.isInlineMode {
|
||||
avatarLeftInset = 12.0
|
||||
} else if case .forum = item.index {
|
||||
} else if !useChatListLayout {
|
||||
avatarLeftInset = 50.0
|
||||
} else {
|
||||
avatarLeftInset = 18.0 + avatarDiameter
|
||||
@ -2501,7 +2510,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
var mainContentBoundsOffset: CGFloat
|
||||
var mainContentAlpha: CGFloat = 1.0
|
||||
|
||||
if case .chatList = item.chatListLocation {
|
||||
if useChatListLayout {
|
||||
mainContentFrame = CGRect(origin: CGPoint(x: leftInset - 2.0, y: 0.0), size: CGSize(width: layout.contentSize.width, height: layout.contentSize.height))
|
||||
mainContentBoundsOffset = mainContentFrame.origin.x
|
||||
|
||||
@ -2694,7 +2703,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
}
|
||||
}
|
||||
|
||||
if let threadInfo = threadInfo {
|
||||
if let threadInfo = threadInfo, !displayAsMessage {
|
||||
let avatarIconView: ComponentHostView<Empty>
|
||||
if let current = strongSelf.avatarIconView {
|
||||
avatarIconView = current
|
||||
@ -2742,7 +2751,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
avatarIconView.removeFromSuperview()
|
||||
}
|
||||
|
||||
if case .forum = item.index {
|
||||
if !useChatListLayout {
|
||||
strongSelf.avatarContainerNode.isHidden = true
|
||||
} else {
|
||||
strongSelf.avatarContainerNode.isHidden = false
|
||||
|
@ -95,6 +95,8 @@ public final class ChatListNodeInteraction {
|
||||
public var searchTextHighightState: String?
|
||||
var highlightedChatLocation: ChatListHighlightedLocation?
|
||||
|
||||
var isSearchMode: Bool = false
|
||||
|
||||
var isInlineMode: Bool = false
|
||||
var inlineNavigationLocation: ChatListHighlightedLocation?
|
||||
|
||||
|
@ -1186,7 +1186,7 @@ public final class ListMessageFileItemNode: ListMessageNode {
|
||||
let iconFrame = CGRect(origin: CGPoint(x: params.leftInset + leftOffset + 12.0, y: 8.0), size: iconSize)
|
||||
transition.updateFrame(node: strongSelf.extensionIconNode, frame: iconFrame)
|
||||
strongSelf.extensionIconNode.image = extensionIconImage
|
||||
transition.updateFrame(node: strongSelf.extensionIconText, frame: CGRect(origin: CGPoint(x: iconFrame.minX + floor((iconFrame.width - extensionTextLayout.size.width) / 2.0), y: iconFrame.minY + 7.0 + floor((iconFrame.height - extensionTextLayout.size.height) / 2.0)), size: extensionTextLayout.size))
|
||||
transition.updateFrame(node: strongSelf.extensionIconText, frame: CGRect(origin: CGPoint(x: iconFrame.minX + floorToScreenPixels((iconFrame.width - extensionTextLayout.size.width) / 2.0), y: iconFrame.minY + 7.0 + floorToScreenPixels((iconFrame.height - extensionTextLayout.size.height) / 2.0)), size: extensionTextLayout.size))
|
||||
|
||||
transition.updateFrame(node: strongSelf.iconStatusNode, frame: iconFrame)
|
||||
|
||||
@ -1235,7 +1235,7 @@ public final class ListMessageFileItemNode: ListMessageNode {
|
||||
}
|
||||
|
||||
if let downloadStatusIconNode = strongSelf.downloadStatusIconNode {
|
||||
transition.updateFrame(node: downloadStatusIconNode, frame: CGRect(origin: CGPoint(x: leftOffset + leftInset - 3.0, y: strongSelf.descriptionNode.frame.minY + floor((strongSelf.descriptionNode.frame.height - 18.0) / 2.0)), size: CGSize(width: 18.0, height: 18.0)))
|
||||
transition.updateFrame(node: downloadStatusIconNode, frame: CGRect(origin: CGPoint(x: leftOffset + leftInset - 3.0, y: strongSelf.descriptionNode.frame.minY + floorToScreenPixels((strongSelf.descriptionNode.frame.height - 18.0) / 2.0) + UIScreenPixel), size: CGSize(width: 18.0, height: 18.0)))
|
||||
}
|
||||
|
||||
if let updatedFetchControls = updatedFetchControls {
|
||||
@ -1280,10 +1280,10 @@ public final class ListMessageFileItemNode: ListMessageNode {
|
||||
let lineDiameter: CGFloat = 8.0
|
||||
|
||||
let titleFrame = strongSelf.titleNode.frame
|
||||
shapes.append(.roundedRectLine(startPoint: CGPoint(x: titleFrame.minX, y: titleFrame.minY + floor((titleFrame.height - lineDiameter) / 2.0)), width: titleLineWidth, diameter: lineDiameter))
|
||||
shapes.append(.roundedRectLine(startPoint: CGPoint(x: titleFrame.minX, y: titleFrame.minY + floorToScreenPixels((titleFrame.height - lineDiameter) / 2.0)), width: titleLineWidth, diameter: lineDiameter))
|
||||
|
||||
let descriptionFrame = strongSelf.descriptionNode.frame
|
||||
shapes.append(.roundedRectLine(startPoint: CGPoint(x: descriptionFrame.minX, y: descriptionFrame.minY + floor((descriptionFrame.height - lineDiameter) / 2.0)), width: descriptionLineWidth, diameter: lineDiameter))
|
||||
shapes.append(.roundedRectLine(startPoint: CGPoint(x: descriptionFrame.minX, y: descriptionFrame.minY + floorToScreenPixels((descriptionFrame.height - lineDiameter) / 2.0)), width: descriptionLineWidth, diameter: lineDiameter))
|
||||
|
||||
if let media = selectedMedia as? TelegramMediaFile, media.isInstantVideo {
|
||||
shapes.append(.circle(iconFrame))
|
||||
@ -1453,7 +1453,7 @@ public final class ListMessageFileItemNode: ListMessageNode {
|
||||
|
||||
switch maybeFetchStatus {
|
||||
case .Fetching(_, let progress), .Paused(let progress):
|
||||
let progressFrame = CGRect(x: self.currentLeftOffset + leftInset + 65.0, y: size.height - 3.0, width: floor((size.width - 65.0 - leftInset - rightInset)), height: 3.0)
|
||||
let progressFrame = CGRect(x: self.currentLeftOffset + leftInset + 65.0, y: size.height - 3.0, width: floorToScreenPixels((size.width - 65.0 - leftInset - rightInset)), height: 3.0)
|
||||
let linearProgressNode: LinearProgressNode
|
||||
if let current = self.linearProgressNode {
|
||||
linearProgressNode = current
|
||||
@ -1543,10 +1543,10 @@ public final class ListMessageFileItemNode: ListMessageNode {
|
||||
alphaTransition.updateAlpha(node: self.descriptionNode, alpha: 1.0)
|
||||
}
|
||||
|
||||
let descriptionFont = Font.with(size: floor(item.presentationData.fontSize.baseDisplaySize * 13.0 / 17.0), design: .regular, weight: .regular, traits: [.monospacedNumbers])
|
||||
let descriptionFont = Font.with(size: floorToScreenPixels(item.presentationData.fontSize.baseDisplaySize * 13.0 / 17.0), design: .regular, weight: .regular, traits: [.monospacedNumbers])
|
||||
self.descriptionProgressNode.attributedText = NSAttributedString(string: downloadingString ?? "", font: descriptionFont, textColor: item.presentationData.theme.theme.list.itemSecondaryTextColor)
|
||||
let descriptionSize = self.descriptionProgressNode.updateLayout(CGSize(width: size.width - 14.0, height: size.height))
|
||||
transition.updateFrame(node: self.descriptionProgressNode, frame: CGRect(origin: self.descriptionNode.frame.origin, size: descriptionSize))
|
||||
transition.updateFrame(node: self.descriptionProgressNode, frame: CGRect(origin: CGPoint(x: self.descriptionNode.frame.minX, y: self.descriptionNode.frame.minY + floorToScreenPixels((self.descriptionNode.bounds.height - descriptionSize.height) / 2.0)), size: descriptionSize))
|
||||
}
|
||||
|
||||
public func activateMedia() {
|
||||
|
@ -110,6 +110,9 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
private let joinButtonTitleNode: ImmediateTextNode
|
||||
private let joinButtonBackgroundNode: ASImageNode
|
||||
|
||||
private var previewImageNode: ASImageNode?
|
||||
private var previewImage: UIImage?
|
||||
|
||||
private var audioLevelView: VoiceBlobView?
|
||||
|
||||
private let micButton: HighlightTrackingButtonNode
|
||||
@ -139,6 +142,7 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
private let membersDisposable = MetaDisposable()
|
||||
private let isMutedDisposable = MetaDisposable()
|
||||
private let audioLevelDisposable = MetaDisposable()
|
||||
private var imageDisposable: Disposable?
|
||||
|
||||
private var callState: PresentationGroupCallState?
|
||||
|
||||
@ -233,6 +237,8 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
self.isMutedDisposable.dispose()
|
||||
self.audioLevelGeneratorTimer?.invalidate()
|
||||
self.updateTimer?.invalidate()
|
||||
self.imageDisposable?.dispose()
|
||||
self.audioLevelDisposable.dispose()
|
||||
}
|
||||
|
||||
public override func didLoad() {
|
||||
@ -366,6 +372,11 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
self.avatarsContent = self.avatarsContext.update(peers: [], animated: false)
|
||||
} else {
|
||||
self.avatarsContent = self.avatarsContext.update(peers: data.topParticipants.map { EnginePeer($0.peer) }, animated: false)
|
||||
|
||||
if let imageDisposable = self.imageDisposable {
|
||||
self.imageDisposable = nil
|
||||
imageDisposable.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
self.textNode.attributedText = NSAttributedString(string: membersText, font: Font.regular(13.0), textColor: self.theme.chat.inputPanel.secondaryTextColor)
|
||||
@ -484,6 +495,67 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
updateAudioLevels = true
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
if data.info.isStream {
|
||||
if self.imageDisposable == nil {
|
||||
let engine = self.context.engine
|
||||
let info = data.info
|
||||
self.imageDisposable = (engine.calls.getAudioBroadcastDataSource(callId: info.id, accessHash: info.accessHash)
|
||||
|> mapToSignal { source -> Signal<Data?, NoError> in
|
||||
guard let source else {
|
||||
return .single(nil)
|
||||
}
|
||||
|
||||
let time = engine.calls.requestStreamState(dataSource: source, callId: info.id, accessHash: info.accessHash)
|
||||
|> map { state -> Int64? in
|
||||
guard let state else {
|
||||
return nil
|
||||
}
|
||||
return state.channels.first?.latestTimestamp
|
||||
}
|
||||
|
||||
return time
|
||||
|> mapToSignal { latestTimestamp -> Signal<Data?, NoError> in
|
||||
guard let latestTimestamp else {
|
||||
return .single(nil)
|
||||
}
|
||||
|
||||
let durationMilliseconds: Int64 = 32000
|
||||
let bufferOffset: Int64 = 1 * durationMilliseconds
|
||||
let timestampId = (latestTimestamp / durationMilliseconds) * durationMilliseconds - bufferOffset
|
||||
|
||||
return engine.calls.getVideoBroadcastPart(dataSource: source, callId: info.id, accessHash: info.accessHash, timestampIdMilliseconds: timestampId, durationMilliseconds: durationMilliseconds, channelId: 2, quality: 0)
|
||||
|> mapToSignal { result -> Signal<Data?, NoError> in
|
||||
switch result.status {
|
||||
case let .data(data):
|
||||
return .single(data)
|
||||
case .notReady, .resyncNeeded, .rejoinNeeded:
|
||||
return .single(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|> deliverOnMainQueue).start(next: { [weak self] data in
|
||||
guard let self, let data else {
|
||||
return
|
||||
}
|
||||
|
||||
var image: UIImage?
|
||||
for i in 0 ..< 100 {
|
||||
image = UIImage(data: data.subdata(in: i ..< data.count))
|
||||
if image != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
self.previewImage = image
|
||||
if let (size, leftInset, rightInset) = self.validLayout {
|
||||
self.updateLayout(size: size, leftInset: leftInset, rightInset: rightInset, transition: .animated(duration: 0.2, curve: .easeInOut))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if let (size, leftInset, rightInset) = self.validLayout {
|
||||
self.updateLayout(size: size, leftInset: leftInset, rightInset: rightInset, transition: .animated(duration: 0.2, curve: .easeInOut))
|
||||
}
|
||||
@ -609,6 +681,26 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
staticTransition.updateFrame(node: self.joinButtonBackgroundNode, frame: CGRect(origin: CGPoint(), size: joinButtonFrame.size))
|
||||
staticTransition.updateFrame(node: self.joinButtonTitleNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((joinButtonFrame.width - joinButtonTitleSize.width) / 2.0), y: floorToScreenPixels((joinButtonFrame.height - joinButtonTitleSize.height) / 2.0)), size: joinButtonTitleSize))
|
||||
|
||||
if let previewImage = self.previewImage {
|
||||
let previewImageNode: ASImageNode
|
||||
if let current = self.previewImageNode {
|
||||
previewImageNode = current
|
||||
} else {
|
||||
previewImageNode = ASImageNode()
|
||||
previewImageNode.clipsToBounds = true
|
||||
previewImageNode.cornerRadius = 8.0
|
||||
previewImageNode.contentMode = .scaleAspectFill
|
||||
self.previewImageNode = previewImageNode
|
||||
self.addSubnode(previewImageNode)
|
||||
}
|
||||
previewImageNode.image = previewImage
|
||||
let previewSize = CGSize(width: 40.0, height: 40.0)
|
||||
previewImageNode.frame = CGRect(origin: CGPoint(x: joinButtonFrame.minX - previewSize.width - 8.0, y: joinButtonFrame.minY + floor((joinButtonFrame.height - previewSize.height) / 2.0)), size: previewSize)
|
||||
} else if let previewImageNode = self.previewImageNode {
|
||||
self.previewImageNode = nil
|
||||
previewImageNode.removeFromSupernode()
|
||||
}
|
||||
|
||||
let micButtonSize = CGSize(width: 36.0, height: 36.0)
|
||||
let micButtonFrame = CGRect(origin: CGPoint(x: size.width - rightInset - 7.0 - micButtonSize.width, y: floor((panelHeight - micButtonSize.height) / 2.0)), size: micButtonSize)
|
||||
staticTransition.updateFrame(node: self.micButton, frame: micButtonFrame)
|
||||
|
@ -15,180 +15,6 @@ import AccountContext
|
||||
import DeviceProximity
|
||||
import PhoneNumberFormat
|
||||
|
||||
final class PresentationCallToneRenderer {
|
||||
let queue: Queue
|
||||
|
||||
let tone: PresentationCallTone
|
||||
|
||||
private let toneRenderer: MediaPlayerAudioRenderer
|
||||
private var toneRendererAudioSession: MediaPlayerAudioSessionCustomControl?
|
||||
private var toneRendererAudioSessionActivated = false
|
||||
private let audioLevelPipe = ValuePipe<Float>()
|
||||
|
||||
init(tone: PresentationCallTone, completed: (() -> Void)? = nil) {
|
||||
let queue = Queue.mainQueue()
|
||||
self.queue = queue
|
||||
|
||||
self.tone = tone
|
||||
|
||||
var controlImpl: ((MediaPlayerAudioSessionCustomControl) -> Disposable)?
|
||||
|
||||
self.toneRenderer = MediaPlayerAudioRenderer(audioSession: .custom({ control in
|
||||
return controlImpl?(control) ?? EmptyDisposable
|
||||
}), playAndRecord: false, useVoiceProcessingMode: true, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {})
|
||||
|
||||
controlImpl = { [weak self] control in
|
||||
queue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.toneRendererAudioSession = control
|
||||
if strongSelf.toneRendererAudioSessionActivated {
|
||||
control.activate()
|
||||
}
|
||||
}
|
||||
}
|
||||
return ActionDisposable {
|
||||
}
|
||||
}
|
||||
|
||||
let toneDataOffset = Atomic<Int>(value: 0)
|
||||
|
||||
let toneData = Atomic<Data?>(value: nil)
|
||||
let reportedCompletion = Atomic<Bool>(value: false)
|
||||
|
||||
self.toneRenderer.beginRequestingFrames(queue: DispatchQueue.global(), takeFrame: {
|
||||
var data = toneData.with { $0 }
|
||||
if data == nil {
|
||||
data = presentationCallToneData(tone)
|
||||
if data != nil {
|
||||
let _ = toneData.swap(data)
|
||||
}
|
||||
}
|
||||
|
||||
guard let toneData = data else {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
|
||||
let toneDataMaxOffset: Int?
|
||||
if let loopCount = tone.loopCount {
|
||||
toneDataMaxOffset = (data?.count ?? 0) * loopCount
|
||||
} else {
|
||||
toneDataMaxOffset = nil
|
||||
}
|
||||
|
||||
let frameSize = 44100
|
||||
|
||||
var takeOffset: Int?
|
||||
let _ = toneDataOffset.modify { current in
|
||||
takeOffset = current
|
||||
return current + frameSize
|
||||
}
|
||||
|
||||
if let takeOffset = takeOffset {
|
||||
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset >= toneDataMaxOffset {
|
||||
if !reportedCompletion.swap(true) {
|
||||
Queue.mainQueue().after(1.0, {
|
||||
completed?()
|
||||
})
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
|
||||
var blockBuffer: CMBlockBuffer?
|
||||
|
||||
let bytes = malloc(frameSize)!
|
||||
toneData.withUnsafeBytes { dataBuffer -> Void in
|
||||
guard let dataBytes = dataBuffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
|
||||
return
|
||||
}
|
||||
var takenCount = 0
|
||||
while takenCount < frameSize {
|
||||
let dataOffset = (takeOffset + takenCount) % toneData.count
|
||||
let dataCount = min(frameSize - takenCount, toneData.count - dataOffset)
|
||||
//print("take from \(dataOffset) count: \(dataCount)")
|
||||
memcpy(bytes.advanced(by: takenCount), dataBytes.advanced(by: dataOffset), dataCount)
|
||||
takenCount += dataCount
|
||||
|
||||
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset + takenCount >= toneDataMaxOffset {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if takenCount < frameSize {
|
||||
//print("fill with zeros from \(takenCount) count: \(frameSize - takenCount)")
|
||||
memset(bytes.advanced(by: takenCount), 0, frameSize - takenCount)
|
||||
}
|
||||
}
|
||||
|
||||
/*if let toneDataMaxOffset = toneDataMaxOffset, takeOffset + frameSize > toneDataMaxOffset {
|
||||
let validCount = max(0, toneDataMaxOffset - takeOffset)
|
||||
memset(bytes.advanced(by: validCount), 0, frameSize - validCount)
|
||||
print("clear from \(validCount) count: \(frameSize - validCount)")
|
||||
}*/
|
||||
|
||||
let status = CMBlockBufferCreateWithMemoryBlock(allocator: nil, memoryBlock: bytes, blockLength: frameSize, blockAllocator: nil, customBlockSource: nil, offsetToData: 0, dataLength: frameSize, flags: 0, blockBufferOut: &blockBuffer)
|
||||
if status != noErr {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
|
||||
let sampleCount = frameSize / 2
|
||||
|
||||
let pts = CMTime(value: Int64(takeOffset / 2), timescale: 44100)
|
||||
var timingInfo = CMSampleTimingInfo(duration: CMTime(value: Int64(sampleCount), timescale: 44100), presentationTimeStamp: pts, decodeTimeStamp: pts)
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
var sampleSize = frameSize
|
||||
guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: nil, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
|
||||
if let sampleBuffer = sampleBuffer {
|
||||
return .frame(MediaTrackFrame(type: .audio, sampleBuffer: sampleBuffer, resetDecoder: false, decoded: true))
|
||||
} else {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
} else {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
})
|
||||
self.toneRenderer.start()
|
||||
self.toneRenderer.setRate(1.0)
|
||||
}
|
||||
|
||||
deinit {
|
||||
assert(self.queue.isCurrent())
|
||||
self.toneRenderer.stop()
|
||||
}
|
||||
|
||||
func setAudioSessionActive(_ value: Bool) {
|
||||
if self.toneRendererAudioSessionActivated != value {
|
||||
self.toneRendererAudioSessionActivated = value
|
||||
if let control = self.toneRendererAudioSession {
|
||||
if value {
|
||||
self.toneRenderer.setRate(1.0)
|
||||
control.activate()
|
||||
} else {
|
||||
self.toneRenderer.setRate(0.0)
|
||||
control.deactivate()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class PresentationCallImpl: PresentationCall {
|
||||
public let context: AccountContext
|
||||
private let audioSession: ManagedAudioSession
|
||||
@ -221,6 +47,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
private var callContextState: OngoingCallContextState?
|
||||
private var ongoingContext: OngoingCallContext?
|
||||
private var ongoingContextStateDisposable: Disposable?
|
||||
private var sharedAudioDevice: OngoingCallContext.AudioDevice?
|
||||
private var requestedVideoAspect: Float?
|
||||
private var reception: Int32?
|
||||
private var receptionDisposable: Disposable?
|
||||
@ -282,7 +109,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
private var audioSessionActiveDisposable: Disposable?
|
||||
private var isAudioSessionActive = false
|
||||
|
||||
private var toneRenderer: PresentationCallToneRenderer?
|
||||
private var currentTone: PresentationCallTone?
|
||||
|
||||
private var droppedCall = false
|
||||
private var dropCallKitCallTimer: SwiftSignalKit.Timer?
|
||||
@ -463,6 +290,12 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
}
|
||||
})
|
||||
|
||||
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
|
||||
self.sharedAudioDevice = nil
|
||||
} else {
|
||||
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create()
|
||||
}
|
||||
|
||||
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||
if let strongSelf = self {
|
||||
@ -702,7 +535,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
|
||||
let updatedConnections = connections
|
||||
|
||||
let ongoingContext = OngoingCallContext(account: self.context.account, callSessionManager: self.callSessionManager, callId: id, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: updatedConnections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, enableTCP: self.enableTCP, enableStunMarking: self.enableStunMarking, audioSessionActive: self.audioSessionActive.get(), logName: logName, preferredVideoCodec: self.preferredVideoCodec)
|
||||
let ongoingContext = OngoingCallContext(account: self.context.account, callSessionManager: self.callSessionManager, callId: id, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: updatedConnections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, enableTCP: self.enableTCP, enableStunMarking: self.enableStunMarking, audioSessionActive: self.audioSessionActive.get(), logName: logName, preferredVideoCodec: self.preferredVideoCodec, audioDevice: self.sharedAudioDevice)
|
||||
self.ongoingContext = ongoingContext
|
||||
ongoingContext.setIsMuted(self.isMutedValue)
|
||||
if let requestedVideoAspect = self.requestedVideoAspect {
|
||||
@ -864,26 +697,19 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
break
|
||||
}
|
||||
}
|
||||
if tone != self.toneRenderer?.tone {
|
||||
if let tone = tone {
|
||||
if "".isEmpty {
|
||||
let _ = tone
|
||||
} else {
|
||||
let toneRenderer = PresentationCallToneRenderer(tone: tone)
|
||||
self.toneRenderer = toneRenderer
|
||||
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
|
||||
}
|
||||
} else {
|
||||
self.toneRenderer = nil
|
||||
}
|
||||
if tone != self.currentTone {
|
||||
self.currentTone = tone
|
||||
self.sharedAudioDevice?.setTone(tone: tone.flatMap(presentationCallToneData).flatMap { data in
|
||||
return OngoingCallContext.Tone(samples: data, sampleRate: 48000, loopCount: tone?.loopCount ?? 1000000)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private func updateIsAudioSessionActive(_ value: Bool) {
|
||||
if self.isAudioSessionActive != value {
|
||||
self.isAudioSessionActive = value
|
||||
self.toneRenderer?.setAudioSessionActive(value)
|
||||
}
|
||||
self.sharedAudioDevice?.setIsAudioSessionActive(value)
|
||||
}
|
||||
|
||||
public func answer() {
|
||||
|
@ -4,12 +4,12 @@ import AVFoundation
|
||||
private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Data? {
|
||||
let outputSettings: [String: Any] = [
|
||||
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
|
||||
AVSampleRateKey: 44100.0 as NSNumber,
|
||||
AVSampleRateKey: 48000.0 as NSNumber,
|
||||
AVLinearPCMBitDepthKey: 16 as NSNumber,
|
||||
AVLinearPCMIsNonInterleaved: false as NSNumber,
|
||||
AVLinearPCMIsFloatKey: false as NSNumber,
|
||||
AVLinearPCMIsBigEndianKey: false as NSNumber,
|
||||
AVNumberOfChannelsKey: 2 as NSNumber
|
||||
AVNumberOfChannelsKey: 1 as NSNumber
|
||||
]
|
||||
|
||||
let nsName: NSString = name as NSString
|
||||
@ -63,9 +63,9 @@ private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Dat
|
||||
}
|
||||
|
||||
if !addSilenceDuration.isZero {
|
||||
let sampleRate = 44100
|
||||
let sampleRate = 48000
|
||||
let numberOfSamples = Int(Double(sampleRate) * addSilenceDuration)
|
||||
let numberOfChannels = 2
|
||||
let numberOfChannels = 1
|
||||
let numberOfBytes = numberOfSamples * 2 * numberOfChannels
|
||||
|
||||
data.append(Data(count: numberOfBytes))
|
||||
|
@ -433,6 +433,15 @@ private extension CurrentImpl {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
func setTone(tone: OngoingGroupCallContext.Tone?) {
|
||||
switch self {
|
||||
case let .call(callContext):
|
||||
callContext.setTone(tone: tone)
|
||||
case .mediaStream:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func groupCallLogsPath(account: Account) -> String {
|
||||
@ -823,7 +832,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
private var didStartConnectingOnce: Bool = false
|
||||
private var didConnectOnce: Bool = false
|
||||
private var toneRenderer: PresentationCallToneRenderer?
|
||||
|
||||
private var videoCapturer: OngoingCallVideoCapturer?
|
||||
private var useFrontCamera: Bool = true
|
||||
@ -1841,7 +1849,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
if isConnecting {
|
||||
strongSelf.beginTone(tone: .groupConnecting)
|
||||
} else {
|
||||
strongSelf.toneRenderer = nil
|
||||
strongSelf.beginTone(tone: nil)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2470,15 +2478,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
private func updateIsAudioSessionActive(_ value: Bool) {
|
||||
if self.isAudioSessionActive != value {
|
||||
self.isAudioSessionActive = value
|
||||
self.toneRenderer?.setAudioSessionActive(value)
|
||||
}
|
||||
}
|
||||
|
||||
private func beginTone(tone: PresentationCallTone) {
|
||||
if "".isEmpty {
|
||||
return
|
||||
}
|
||||
if self.isStream {
|
||||
private func beginTone(tone: PresentationCallTone?) {
|
||||
if self.isStream, let tone {
|
||||
switch tone {
|
||||
case .groupJoined, .groupLeft:
|
||||
return
|
||||
@ -2486,21 +2490,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
break
|
||||
}
|
||||
}
|
||||
var completed: (() -> Void)?
|
||||
let toneRenderer = PresentationCallToneRenderer(tone: tone, completed: {
|
||||
completed?()
|
||||
})
|
||||
completed = { [weak self, weak toneRenderer] in
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let toneRenderer = toneRenderer, toneRenderer === strongSelf.toneRenderer else {
|
||||
return
|
||||
}
|
||||
strongSelf.toneRenderer = nil
|
||||
}
|
||||
if let tone, let toneData = presentationCallToneData(tone) {
|
||||
self.genericCallContext?.setTone(tone: OngoingGroupCallContext.Tone(
|
||||
samples: toneData,
|
||||
sampleRate: 48000,
|
||||
loopCount: tone.loopCount ?? 100000
|
||||
))
|
||||
} else {
|
||||
self.genericCallContext?.setTone(tone: nil)
|
||||
}
|
||||
|
||||
self.toneRenderer = toneRenderer
|
||||
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
|
||||
}
|
||||
|
||||
public func playTone(_ tone: PresentationGroupCallTone) {
|
||||
|
@ -200,6 +200,12 @@ public final class EngineMessage: Equatable {
|
||||
if lhs.associatedThreadInfo != rhs.associatedThreadInfo {
|
||||
return false
|
||||
}
|
||||
if lhs.attributes.count != rhs.attributes.count {
|
||||
return false
|
||||
}
|
||||
if lhs.stableVersion != rhs.stableVersion {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +117,10 @@ private func mergedState(transaction: Transaction, seedConfiguration: SeedConfig
|
||||
|
||||
var peerIdsSet: Set<PeerId> = Set()
|
||||
var readStates: [PeerId: CombinedPeerReadState] = [:]
|
||||
var threadInfo:[MessageId : MessageHistoryThreadData] = [:]
|
||||
var threadInfo: [MessageId : MessageHistoryThreadData] = [:]
|
||||
if let state = state {
|
||||
threadInfo = state.threadInfo
|
||||
}
|
||||
|
||||
var renderedMessages: [Message] = []
|
||||
for message in messages {
|
||||
|
@ -400,9 +400,22 @@ public final class OngoingGroupCallContext {
|
||||
public var incomingVideoStats: [String: IncomingVideoStats]
|
||||
}
|
||||
|
||||
public final class Tone {
|
||||
public let samples: Data
|
||||
public let sampleRate: Int
|
||||
public let loopCount: Int
|
||||
|
||||
public init(samples: Data, sampleRate: Int, loopCount: Int) {
|
||||
self.samples = samples
|
||||
self.sampleRate = sampleRate
|
||||
self.loopCount = loopCount
|
||||
}
|
||||
}
|
||||
|
||||
private final class Impl {
|
||||
let queue: Queue
|
||||
let context: GroupCallThreadLocalContext
|
||||
let audioDevice: SharedCallAudioDevice?
|
||||
|
||||
let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max))
|
||||
|
||||
@ -421,6 +434,13 @@ public final class OngoingGroupCallContext {
|
||||
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
|
||||
self.queue = queue
|
||||
|
||||
self.audioDevice = nil
|
||||
/*#if DEBUG
|
||||
self.audioDevice = SharedCallAudioDevice(disableRecording: disableAudioInput)
|
||||
#else
|
||||
self.audioDevice = nil
|
||||
#endif*/
|
||||
|
||||
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
|
||||
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
|
||||
|
||||
@ -526,7 +546,8 @@ public final class OngoingGroupCallContext {
|
||||
enableNoiseSuppression: enableNoiseSuppression,
|
||||
disableAudioInput: disableAudioInput,
|
||||
preferX264: preferX264,
|
||||
logPath: logPath
|
||||
logPath: logPath,
|
||||
audioDevice: self.audioDevice
|
||||
)
|
||||
|
||||
let queue = self.queue
|
||||
@ -580,6 +601,7 @@ public final class OngoingGroupCallContext {
|
||||
return
|
||||
}
|
||||
#if os(iOS)
|
||||
self.audioDevice?.setManualAudioSessionIsActive(isActive)
|
||||
self.context.setManualAudioSessionIsActive(isActive)
|
||||
#endif
|
||||
}))
|
||||
@ -884,6 +906,17 @@ public final class OngoingGroupCallContext {
|
||||
completion(Stats(incomingVideoStats: incomingVideoStats))
|
||||
})
|
||||
}
|
||||
|
||||
func setTone(tone: Tone?) {
|
||||
let mappedTone = tone.flatMap { tone in
|
||||
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
|
||||
}
|
||||
if let audioDevice = self.audioDevice {
|
||||
audioDevice.setTone(mappedTone)
|
||||
} else {
|
||||
self.context.setTone(mappedTone)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private let queue = Queue()
|
||||
@ -1075,4 +1108,10 @@ public final class OngoingGroupCallContext {
|
||||
impl.getStats(completion: completion)
|
||||
}
|
||||
}
|
||||
|
||||
public func setTone(tone: Tone?) {
|
||||
self.impl.with { impl in
|
||||
impl.setTone(tone: tone)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -706,6 +706,40 @@ public final class OngoingCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public final class Tone {
|
||||
public let samples: Data
|
||||
public let sampleRate: Int
|
||||
public let loopCount: Int
|
||||
|
||||
public init(samples: Data, sampleRate: Int, loopCount: Int) {
|
||||
self.samples = samples
|
||||
self.sampleRate = sampleRate
|
||||
self.loopCount = loopCount
|
||||
}
|
||||
}
|
||||
|
||||
public final class AudioDevice {
|
||||
let impl: SharedCallAudioDevice
|
||||
|
||||
public static func create() -> AudioDevice? {
|
||||
return AudioDevice(impl: SharedCallAudioDevice(disableRecording: false))
|
||||
}
|
||||
|
||||
private init(impl: SharedCallAudioDevice) {
|
||||
self.impl = impl
|
||||
}
|
||||
|
||||
public func setIsAudioSessionActive(_ isActive: Bool) {
|
||||
self.impl.setManualAudioSessionIsActive(isActive)
|
||||
}
|
||||
|
||||
public func setTone(tone: Tone?) {
|
||||
self.impl.setTone(tone.flatMap { tone in
|
||||
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
public static func setupAudioSession() {
|
||||
OngoingCallThreadLocalContextWebrtc.setupAudioSession()
|
||||
}
|
||||
@ -751,7 +785,7 @@ public final class OngoingCallContext {
|
||||
|
||||
private var signalingConnectionManager: QueueLocalObject<CallSignalingConnectionManager>?
|
||||
|
||||
private let audioDevice: SharedCallAudioDevice?
|
||||
private let audioDevice: AudioDevice?
|
||||
|
||||
public static func versions(includeExperimental: Bool, includeReference: Bool) -> [(version: String, supportsVideo: Bool)] {
|
||||
#if os(iOS) && DEBUG && false
|
||||
@ -771,7 +805,7 @@ public final class OngoingCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public init(account: Account, callSessionManager: CallSessionManager, callId: CallId, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, enableTCP: Bool, enableStunMarking: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String, preferredVideoCodec: String?) {
|
||||
public init(account: Account, callSessionManager: CallSessionManager, callId: CallId, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, enableTCP: Bool, enableStunMarking: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String, preferredVideoCodec: String?, audioDevice: AudioDevice?) {
|
||||
let _ = setupLogs
|
||||
OngoingCallThreadLocalContext.applyServerConfig(serializedData)
|
||||
|
||||
@ -782,12 +816,6 @@ public final class OngoingCallContext {
|
||||
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
|
||||
let logPath = self.logPath
|
||||
|
||||
let audioDevice: SharedCallAudioDevice?
|
||||
if !"".isEmpty {
|
||||
audioDevice = SharedCallAudioDevice()
|
||||
} else {
|
||||
audioDevice = nil
|
||||
}
|
||||
self.audioDevice = audioDevice
|
||||
|
||||
let _ = try? FileManager.default.createDirectory(atPath: callLogsPath(account: account), withIntermediateDirectories: true, attributes: nil)
|
||||
@ -910,7 +938,7 @@ public final class OngoingCallContext {
|
||||
callSessionManager.sendSignalingData(internalId: internalId, data: data)
|
||||
}
|
||||
}
|
||||
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", audioDevice: audioDevice)
|
||||
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", audioDevice: audioDevice?.impl)
|
||||
|
||||
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
|
||||
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
|
||||
|
@ -11,14 +11,26 @@
|
||||
#define UIView NSView
|
||||
#endif
|
||||
|
||||
@interface CallAudioTone : NSObject
|
||||
|
||||
@property (nonatomic, strong, readonly) NSData * _Nonnull samples;
|
||||
@property (nonatomic, readonly) NSInteger sampleRate;
|
||||
@property (nonatomic, readonly) NSInteger loopCount;
|
||||
|
||||
- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount;
|
||||
|
||||
@end
|
||||
|
||||
@interface SharedCallAudioDevice : NSObject
|
||||
|
||||
- (instancetype _Nonnull)init;
|
||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording;
|
||||
|
||||
+ (void)setupAudioSession;
|
||||
|
||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
||||
|
||||
- (void)setTone:(CallAudioTone * _Nullable)tone;
|
||||
|
||||
@end
|
||||
|
||||
@interface OngoingCallConnectionDescriptionWebrtc : NSObject
|
||||
@ -385,12 +397,15 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
|
||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||
disableAudioInput:(bool)disableAudioInput
|
||||
preferX264:(bool)preferX264
|
||||
logPath:(NSString * _Nonnull)logPath;
|
||||
logPath:(NSString * _Nonnull)logPath
|
||||
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice;
|
||||
|
||||
- (void)stop;
|
||||
|
||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
||||
|
||||
- (void)setTone:(CallAudioTone * _Nullable)tone;
|
||||
|
||||
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast;
|
||||
|
||||
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
|
||||
|
@ -43,6 +43,28 @@
|
||||
#import "platform/darwin/TGRTCCVPixelBuffer.h"
|
||||
#include "rtc_base/logging.h"
|
||||
|
||||
@implementation CallAudioTone
|
||||
|
||||
- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_samples = samples;
|
||||
_sampleRate = sampleRate;
|
||||
_loopCount = loopCount;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (std::shared_ptr<tgcalls::CallAudioTone>)asTone {
|
||||
std::vector<int16_t> data;
|
||||
data.resize(_samples.length / 2);
|
||||
memcpy(data.data(), _samples.bytes, _samples.length);
|
||||
|
||||
return std::make_shared<tgcalls::CallAudioTone>(std::move(data), (int)_sampleRate, (int)_loopCount);
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
namespace tgcalls {
|
||||
|
||||
class SharedAudioDeviceModule {
|
||||
@ -50,51 +72,67 @@ public:
|
||||
virtual ~SharedAudioDeviceModule() = default;
|
||||
|
||||
public:
|
||||
virtual rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule() = 0;
|
||||
virtual rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> audioDeviceModule() = 0;
|
||||
virtual void start() = 0;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule {
|
||||
public:
|
||||
SharedAudioDeviceModuleImpl() {
|
||||
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) {
|
||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
} else {
|
||||
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
|
||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
});
|
||||
}
|
||||
SharedAudioDeviceModuleImpl(bool disableAudioInput) {
|
||||
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
||||
}
|
||||
|
||||
virtual ~SharedAudioDeviceModuleImpl() override {
|
||||
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) {
|
||||
if (_audioDeviceModule->Playing()) {
|
||||
_audioDeviceModule->StopPlayout();
|
||||
_audioDeviceModule->StopRecording();
|
||||
}
|
||||
_audioDeviceModule = nullptr;
|
||||
} else {
|
||||
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
|
||||
if (_audioDeviceModule->Playing()) {
|
||||
_audioDeviceModule->StopPlayout();
|
||||
_audioDeviceModule->StopRecording();
|
||||
}
|
||||
_audioDeviceModule = nullptr;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
virtual rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule() override {
|
||||
virtual rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> audioDeviceModule() override {
|
||||
return _audioDeviceModule;
|
||||
}
|
||||
|
||||
virtual void start() override {
|
||||
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
||||
|
||||
_audioDeviceModule->Init();
|
||||
if (!_audioDeviceModule->Playing()) {
|
||||
_audioDeviceModule->InitPlayout();
|
||||
//_audioDeviceModule->InitRecording();
|
||||
_audioDeviceModule->InternalStartPlayout();
|
||||
//_audioDeviceModule->InternalStartRecording();
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
|
||||
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _audioDeviceModule;
|
||||
};
|
||||
|
||||
@implementation SharedCallAudioDevice {
|
||||
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule;
|
||||
}
|
||||
|
||||
- (instancetype _Nonnull)init {
|
||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), []() mutable {
|
||||
return (tgcalls::SharedAudioDeviceModule *)(new SharedAudioDeviceModuleImpl());
|
||||
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), [disableRecording]() mutable {
|
||||
return (tgcalls::SharedAudioDeviceModule *)(new SharedAudioDeviceModuleImpl(disableRecording));
|
||||
}));
|
||||
}
|
||||
return self;
|
||||
@ -104,6 +142,12 @@ private:
|
||||
_audioDeviceModule.reset();
|
||||
}
|
||||
|
||||
- (void)setTone:(CallAudioTone * _Nullable)tone {
|
||||
_audioDeviceModule->perform([tone](tgcalls::SharedAudioDeviceModule *audioDeviceModule) {
|
||||
audioDeviceModule->audioDeviceModule()->setTone([tone asTone]);
|
||||
});
|
||||
}
|
||||
|
||||
- (std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>>)getAudioDeviceModule {
|
||||
return _audioDeviceModule;
|
||||
}
|
||||
@ -128,6 +172,12 @@ private:
|
||||
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
|
||||
}
|
||||
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
|
||||
|
||||
if (isAudioSessionActive) {
|
||||
_audioDeviceModule->perform([](tgcalls::SharedAudioDeviceModule *audioDeviceModule) {
|
||||
audioDeviceModule->start();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
@ -800,6 +850,9 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
||||
bool _useManualAudioSessionControl;
|
||||
SharedCallAudioDevice *_audioDevice;
|
||||
|
||||
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
|
||||
rtc::Thread *_currentAudioDeviceModuleThread;
|
||||
|
||||
OngoingCallNetworkTypeWebrtc _networkType;
|
||||
NSTimeInterval _callReceiveTimeout;
|
||||
NSTimeInterval _callRingTimeout;
|
||||
@ -1213,11 +1266,20 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}];
|
||||
},
|
||||
.createAudioDeviceModule = [audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||
.createAudioDeviceModule = [weakSelf, queue, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||
if (audioDeviceModule) {
|
||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||
} else {
|
||||
return rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
[queue dispatch:^{
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
if (strongSelf) {
|
||||
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
|
||||
strongSelf->_currentAudioDeviceModule = resultModule;
|
||||
}
|
||||
}];
|
||||
return resultModule;
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1232,6 +1294,14 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
InternalVoipLoggingFunction(@"OngoingCallThreadLocalContext: dealloc");
|
||||
}
|
||||
|
||||
if (_currentAudioDeviceModuleThread) {
|
||||
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||
_currentAudioDeviceModule = nullptr;
|
||||
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||
});
|
||||
_currentAudioDeviceModuleThread = nullptr;
|
||||
}
|
||||
|
||||
if (_tgVoip != NULL) {
|
||||
[self stop:nil];
|
||||
}
|
||||
@ -1537,6 +1607,11 @@ private:
|
||||
|
||||
int _nextSinkId;
|
||||
NSMutableDictionary<NSNumber *, GroupCallVideoSink *> *_sinks;
|
||||
|
||||
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
|
||||
rtc::Thread *_currentAudioDeviceModuleThread;
|
||||
|
||||
SharedCallAudioDevice * _audioDevice;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -1558,7 +1633,8 @@ private:
|
||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||
disableAudioInput:(bool)disableAudioInput
|
||||
preferX264:(bool)preferX264
|
||||
logPath:(NSString * _Nonnull)logPath {
|
||||
logPath:(NSString * _Nonnull)logPath
|
||||
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_queue = queue;
|
||||
@ -1570,6 +1646,12 @@ private:
|
||||
_networkStateUpdated = [networkStateUpdated copy];
|
||||
_videoCapturer = videoCapturer;
|
||||
|
||||
_audioDevice = audioDevice;
|
||||
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> audioDeviceModule;
|
||||
if (_audioDevice) {
|
||||
audioDeviceModule = [_audioDevice getAudioDeviceModule];
|
||||
}
|
||||
|
||||
tgcalls::VideoContentType _videoContentType;
|
||||
switch (videoContentType) {
|
||||
case OngoingGroupCallVideoContentTypeGeneric: {
|
||||
@ -1777,19 +1859,64 @@ private:
|
||||
|
||||
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
|
||||
},
|
||||
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit
|
||||
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
|
||||
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||
if (audioDeviceModule) {
|
||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||
} else {
|
||||
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
||||
[queue dispatch:^{
|
||||
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
|
||||
if (strongSelf) {
|
||||
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
|
||||
strongSelf->_currentAudioDeviceModule = resultModule;
|
||||
}
|
||||
}];
|
||||
return resultModule;
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
if (_currentAudioDeviceModuleThread) {
|
||||
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||
_currentAudioDeviceModule = nullptr;
|
||||
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||
});
|
||||
_currentAudioDeviceModuleThread = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)stop {
|
||||
if (_currentAudioDeviceModuleThread) {
|
||||
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||
_currentAudioDeviceModule = nullptr;
|
||||
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||
});
|
||||
_currentAudioDeviceModuleThread = nullptr;
|
||||
}
|
||||
|
||||
if (_instance) {
|
||||
_instance->stop();
|
||||
_instance.reset();
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setTone:(CallAudioTone * _Nullable)tone {
|
||||
if (_currentAudioDeviceModuleThread) {
|
||||
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||
if (currentAudioDeviceModule) {
|
||||
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule, tone]() {
|
||||
currentAudioDeviceModule->setTone([tone asTone]);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
|
||||
if (isAudioSessionActive) {
|
||||
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 97d616abe1dae6214b11eae19b3ec25cb88d98ce
|
||||
Subproject commit e7032ab6f7b305cbd1914e2d422646c2fd132b49
|
Loading…
x
Reference in New Issue
Block a user