mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
02a56a0d03
@ -414,6 +414,7 @@ official_apple_pay_merchants = [
|
||||
"merchant.psbank.test.telegramios",
|
||||
"merchant.psbank.prod.telegramios",
|
||||
#"merchant.org.telegram.billinenet.test",
|
||||
#"merchant.org.telegram.billinenet.prod",
|
||||
#"merchant.org.telegram.portmone.test",
|
||||
]
|
||||
|
||||
|
@ -8261,14 +8261,16 @@ Sorry for the inconvenience.";
|
||||
"Channel.AdminLog.TopicRenamedWithRemovedIcon" = "%1$@ renamed topic %2$@ to %3$@ and removed icon";
|
||||
"Channel.AdminLog.TopicChangedIcon" = "%1$@ changed topic %2$@ icon to %3$@";
|
||||
"Channel.AdminLog.TopicRemovedIcon" = "%1$@ removed topic %2$@ icon";
|
||||
"Channel.AdminLog.TopicUnhidden" = "%1$@ unhid topic %2$@";
|
||||
"Channel.AdminLog.TopicHidden" = "%1$@ hid topic %2$@";
|
||||
|
||||
"Attachment.Pasteboard" = "Clipboard";
|
||||
"Attachment.DiscardPasteboardAlertText" = "Discard pasted items?";
|
||||
|
||||
"Undo.DeletedTopic" = "Topic Deleted";
|
||||
|
||||
"ChatList.MaxThreadPinsFinalText_1" = "Sorry, you can't pin more than **%@** thread to the top. Unpin some that are currently pinned.";
|
||||
"ChatList.MaxThreadPinsFinalText_any" = "Sorry, you can't pin more than **%@** threads to the top. Unpin some that are currently pinned.";
|
||||
"ChatList.MaxThreadPinsFinalText_1" = "Sorry, you can't pin more than %@ topics to the top. Unpin some that are currently pinned.";
|
||||
"ChatList.MaxThreadPinsFinalText_any" = "Sorry, you can't pin more than %@ topics to the top. Unpin some that are currently pinned.";
|
||||
|
||||
"EmojiSearch.SearchTopicIconsPlaceholder" = "Search Topic Icons";
|
||||
"EmojiSearch.SearchTopicIconsEmptyResult" = "No emoji found";
|
||||
@ -8299,8 +8301,8 @@ Sorry for the inconvenience.";
|
||||
|
||||
"Notification.ForumTopicHidden" = "Topic hidden";
|
||||
"Notification.ForumTopicUnhidden" = "Topic unhidden";
|
||||
"Notification.ForumTopicHiddenAuthor" = "%1$@ hid topic";
|
||||
"Notification.ForumTopicUnhiddenAuthor" = "%1$@ unhid topic";
|
||||
"Notification.ForumTopicHiddenAuthor" = "%1$@ hid the topic";
|
||||
"Notification.ForumTopicUnhiddenAuthor" = "%1$@ unhid the topic";
|
||||
"Notification.OverviewTopicHidden" = "%1$@ hid %2$@ %3$@";
|
||||
"Notification.OverviewTopicUnhidden" = "%1$@ unhid %2$@ %3$@";
|
||||
|
||||
@ -8405,6 +8407,8 @@ Sorry for the inconvenience.";
|
||||
"GlobalAutodeleteSettings.AttemptDisabledGenericSelection" = "You can't enable auto-delete in this chat.";
|
||||
|
||||
"EmojiSearch.SearchEmojiPlaceholder" = "Search Emoji";
|
||||
"StickersSearch.SearchStickersPlaceholder" = "Search Stickers";
|
||||
"GifSearch.SearchGifPlaceholder" = "Search GIFs";
|
||||
|
||||
"MessageTimer.LargeShortSeconds_1" = "%@s";
|
||||
"MessageTimer.LargeShortSeconds_2" = "%@s";
|
||||
|
@ -865,6 +865,8 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
|
||||
|
||||
if let layout = self.validLayout {
|
||||
self.tabContainerNode.update(size: CGSize(width: layout.size.width, height: 46.0), sideInset: layout.safeInsets.left, filters: self.tabContainerData?.0 ?? [], selectedFilter: self.chatListDisplayNode.effectiveContainerNode.currentItemFilter, isReordering: self.chatListDisplayNode.isReorderingFilters || (self.chatListDisplayNode.effectiveContainerNode.currentItemNode.currentState.editing && !self.chatListDisplayNode.didBeginSelectingChatsWhileEditing), isEditing: self.chatListDisplayNode.effectiveContainerNode.currentItemNode.currentState.editing, canReorderAllChats: self.isPremium, filtersLimit: self.tabContainerData?.2, transitionFraction: self.chatListDisplayNode.effectiveContainerNode.transitionFraction, presentationData: self.presentationData, transition: .immediate)
|
||||
|
||||
self.requestUpdateHeaderContent(transition: .immediate)
|
||||
}
|
||||
|
||||
if self.isNodeLoaded {
|
||||
|
@ -721,7 +721,7 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
||||
})
|
||||
case .index:
|
||||
var headerType: ChatListSearchItemHeaderType = .messages(location: nil)
|
||||
if case .forum = location, let peer = peer.peer {
|
||||
if case let .forum(peerId) = location, let peer = peer.peer, peer.id == peerId {
|
||||
headerType = .messages(location: peer.compactDisplayTitle)
|
||||
}
|
||||
header = ChatListSearchItemHeader(type: headerType, theme: presentationData.theme, strings: presentationData.strings, actionTitle: nil, action: nil)
|
||||
@ -738,15 +738,26 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
||||
} else {
|
||||
let index: EngineChatList.Item.Index
|
||||
var chatThreadInfo: ChatListItemContent.ThreadInfo?
|
||||
chatThreadInfo = nil
|
||||
var displayAsMessage = false
|
||||
switch location {
|
||||
case .chatList:
|
||||
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||
case .forum:
|
||||
if let threadId = message.threadId, let threadInfo = threadInfo {
|
||||
chatThreadInfo = ChatListItemContent.ThreadInfo(id: threadId, info: threadInfo, isOwnedByMe: false, isClosed: false, isHidden: false)
|
||||
index = .forum(pinnedIndex: .none, timestamp: message.index.timestamp, threadId: threadId, namespace: message.index.id.namespace, id: message.index.id.id)
|
||||
case let .forum(peerId):
|
||||
let _ = peerId
|
||||
let _ = threadInfo
|
||||
|
||||
displayAsMessage = true
|
||||
|
||||
if message.id.peerId == peerId {
|
||||
if let threadId = message.threadId, let threadInfo = threadInfo {
|
||||
chatThreadInfo = ChatListItemContent.ThreadInfo(id: threadId, info: threadInfo, isOwnedByMe: false, isClosed: false, isHidden: false)
|
||||
index = .forum(pinnedIndex: .none, timestamp: message.index.timestamp, threadId: threadId, namespace: message.index.id.namespace, id: message.index.id.id)
|
||||
} else {
|
||||
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||
}
|
||||
} else {
|
||||
index = .chatList( EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||
}
|
||||
}
|
||||
return ChatListItem(presentationData: presentationData, context: context, chatListLocation: location, filterData: nil, index: index, content: .peer(ChatListItemContent.PeerData(
|
||||
@ -762,7 +773,7 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
||||
inputActivities: nil,
|
||||
promoInfo: nil,
|
||||
ignoreUnreadBadge: true,
|
||||
displayAsMessage: false,
|
||||
displayAsMessage: displayAsMessage,
|
||||
hasFailedMessages: false,
|
||||
forumTopicData: nil,
|
||||
topForumTopicItems: [],
|
||||
@ -1217,7 +1228,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
}
|
||||
let previousRecentlySearchedPeersState = Atomic<SearchedPeersState?>(value: nil)
|
||||
|
||||
let foundItems = combineLatest(queue: .mainQueue(), searchQuery, searchOptions, downloadItems)
|
||||
let foundItems: Signal<([ChatListSearchEntry], Bool)?, NoError> = combineLatest(queue: .mainQueue(), searchQuery, searchOptions, downloadItems)
|
||||
|> mapToSignal { [weak self] query, options, downloadItems -> Signal<([ChatListSearchEntry], Bool)?, NoError> in
|
||||
if query == nil && options == nil && [.chats, .topics].contains(key) {
|
||||
let _ = currentRemotePeers.swap(nil)
|
||||
@ -1464,55 +1475,113 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
updateSearchContexts { _ in
|
||||
return ([:], true)
|
||||
}
|
||||
let foundRemoteMessages: Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError>
|
||||
|
||||
struct FoundRemoteMessages {
|
||||
var messages: [EngineMessage]
|
||||
var readCounters: [EnginePeer.Id: EnginePeerReadCounters]
|
||||
var threadsData: [EngineMessage.Id: MessageHistoryThreadData]
|
||||
var totalCount: Int32
|
||||
|
||||
init(messages: [EngineMessage], readCounters: [EnginePeer.Id: EnginePeerReadCounters], threadsData: [EngineMessage.Id: MessageHistoryThreadData], totalCount: Int32) {
|
||||
self.messages = messages
|
||||
self.readCounters = readCounters
|
||||
self.threadsData = threadsData
|
||||
self.totalCount = totalCount
|
||||
}
|
||||
}
|
||||
|
||||
let foundRemoteMessages: Signal<([FoundRemoteMessages], Bool), NoError>
|
||||
if peersFilter.contains(.doNotSearchMessages) {
|
||||
foundRemoteMessages = .single((([], [:], [:], 0), false))
|
||||
foundRemoteMessages = .single(([FoundRemoteMessages(messages: [], readCounters: [:], threadsData: [:], totalCount: 0)], false))
|
||||
} else {
|
||||
if !finalQuery.isEmpty {
|
||||
addAppLogEvent(postbox: context.account.postbox, type: "search_global_query")
|
||||
}
|
||||
|
||||
let searchSignals: [Signal<(SearchMessagesResult, SearchMessagesState), NoError>] = searchLocations.map { searchLocation in
|
||||
return context.engine.messages.searchMessages(location: searchLocation, query: finalQuery, state: nil, limit: 50)
|
||||
let limit: Int32
|
||||
#if DEBUG
|
||||
limit = 50
|
||||
#else
|
||||
limit = 50
|
||||
#endif
|
||||
return context.engine.messages.searchMessages(location: searchLocation, query: finalQuery, state: nil, limit: limit)
|
||||
}
|
||||
|
||||
let searchSignal = combineLatest(searchSignals)
|
||||
|> map { results -> ChatListSearchMessagesResult in
|
||||
let (result, updatedState) = results[0]
|
||||
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
||||
|> map { results -> [ChatListSearchMessagesResult] in
|
||||
var mappedResults: [ChatListSearchMessagesResult] = []
|
||||
for resultData in results {
|
||||
let (result, updatedState) = resultData
|
||||
|
||||
mappedResults.append(ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState))
|
||||
}
|
||||
return mappedResults
|
||||
}
|
||||
|
||||
let loadMore = searchContexts.get()
|
||||
|> mapToSignal { searchContexts -> Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError> in
|
||||
if let searchContext = searchContexts[0], searchContext.result.hasMore {
|
||||
if let _ = searchContext.loadMoreIndex {
|
||||
return context.engine.messages.searchMessages(location: searchLocations[0], query: finalQuery, state: searchContext.result.state, limit: 80)
|
||||
|> map { result, updatedState -> ChatListSearchMessagesResult in
|
||||
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
||||
}
|
||||
|> mapToSignal { foundMessages -> Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError> in
|
||||
updateSearchContexts { previous in
|
||||
let updated = ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)
|
||||
return ([0: updated], true)
|
||||
|> mapToSignal { searchContexts -> Signal<([FoundRemoteMessages], Bool), NoError> in
|
||||
for i in 0 ..< 2 {
|
||||
if let searchContext = searchContexts[i], searchContext.result.hasMore {
|
||||
var restResults: [Int: FoundRemoteMessages] = [:]
|
||||
for j in 0 ..< 2 {
|
||||
if j != i {
|
||||
if let otherContext = searchContexts[j] {
|
||||
restResults[j] = FoundRemoteMessages(messages: otherContext.result.messages, readCounters: otherContext.result.readStates, threadsData: otherContext.result.threadInfo, totalCount: otherContext.result.totalCount)
|
||||
}
|
||||
}
|
||||
return .complete()
|
||||
}
|
||||
} else {
|
||||
return .single(((searchContext.result.messages, searchContext.result.readStates, searchContext.result.threadInfo, searchContext.result.totalCount), false))
|
||||
if let _ = searchContext.loadMoreIndex {
|
||||
return context.engine.messages.searchMessages(location: searchLocations[i], query: finalQuery, state: searchContext.result.state, limit: 80)
|
||||
|> map { result, updatedState -> ChatListSearchMessagesResult in
|
||||
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
||||
}
|
||||
|> mapToSignal { foundMessages -> Signal<([FoundRemoteMessages], Bool), NoError> in
|
||||
updateSearchContexts { previous in
|
||||
let updated = ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)
|
||||
var previous = previous
|
||||
previous[i] = updated
|
||||
return (previous, true)
|
||||
}
|
||||
return .complete()
|
||||
}
|
||||
} else {
|
||||
var currentResults: [FoundRemoteMessages] = []
|
||||
for i in 0 ..< 2 {
|
||||
if let currentContext = searchContexts[i] {
|
||||
currentResults.append(FoundRemoteMessages(messages: currentContext.result.messages, readCounters: currentContext.result.readStates, threadsData: currentContext.result.threadInfo, totalCount: currentContext.result.totalCount))
|
||||
if currentContext.result.hasMore {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return .single((currentResults, false))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return .complete()
|
||||
}
|
||||
|
||||
return .complete()
|
||||
}
|
||||
|
||||
foundRemoteMessages = .single((([], [:], [:], 0), true))
|
||||
foundRemoteMessages = .single(([FoundRemoteMessages(messages: [], readCounters: [:], threadsData: [:], totalCount: 0)], true))
|
||||
|> then(
|
||||
searchSignal
|
||||
|> map { foundMessages -> (([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool) in
|
||||
|> map { foundMessages -> ([FoundRemoteMessages], Bool) in
|
||||
updateSearchContexts { _ in
|
||||
return ([0: ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)], true)
|
||||
var resultContexts: [Int: ChatListSearchMessagesContext] = [:]
|
||||
for i in 0 ..< foundMessages.count {
|
||||
resultContexts[i] = ChatListSearchMessagesContext(result: foundMessages[i], loadMoreIndex: nil)
|
||||
}
|
||||
return (resultContexts, true)
|
||||
}
|
||||
return ((foundMessages.messages, foundMessages.readStates, foundMessages.threadInfo, foundMessages.totalCount), false)
|
||||
var result: [FoundRemoteMessages] = []
|
||||
for i in 0 ..< foundMessages.count {
|
||||
result.append(FoundRemoteMessages(messages: foundMessages[i].messages, readCounters: foundMessages[i].readStates, threadsData: foundMessages[i].threadInfo, totalCount: foundMessages[i].totalCount))
|
||||
if foundMessages[i].hasMore {
|
||||
break
|
||||
}
|
||||
}
|
||||
return (result, false)
|
||||
}
|
||||
|> delay(0.2, queue: Queue.concurrentDefaultQueue())
|
||||
|> then(loadMore)
|
||||
@ -1766,25 +1835,33 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
var firstHeaderId: Int64?
|
||||
if !foundRemotePeers.2 {
|
||||
index = 0
|
||||
for message in foundRemoteMessages.0.0 {
|
||||
if searchState.deletedMessageIds.contains(message.id) {
|
||||
continue
|
||||
} else if message.id.namespace == Namespaces.Message.Cloud && searchState.deletedGlobalMessageIds.contains(message.id.id) {
|
||||
continue
|
||||
}
|
||||
let headerId = listMessageDateHeaderId(timestamp: message.timestamp)
|
||||
if firstHeaderId == nil {
|
||||
firstHeaderId = headerId
|
||||
}
|
||||
var peer = EngineRenderedPeer(message: message)
|
||||
if let group = message.peers[message.id.peerId] as? TelegramGroup, let migrationReference = group.migrationReference {
|
||||
if let channelPeer = message.peers[migrationReference.peerId] {
|
||||
peer = EngineRenderedPeer(peer: EnginePeer(channelPeer))
|
||||
var existingMessageIds = Set<MessageId>()
|
||||
for foundRemoteMessageSet in foundRemoteMessages.0 {
|
||||
for message in foundRemoteMessageSet.messages {
|
||||
if existingMessageIds.contains(message.id) {
|
||||
continue
|
||||
}
|
||||
existingMessageIds.insert(message.id)
|
||||
|
||||
if searchState.deletedMessageIds.contains(message.id) {
|
||||
continue
|
||||
} else if message.id.namespace == Namespaces.Message.Cloud && searchState.deletedGlobalMessageIds.contains(message.id.id) {
|
||||
continue
|
||||
}
|
||||
let headerId = listMessageDateHeaderId(timestamp: message.timestamp)
|
||||
if firstHeaderId == nil {
|
||||
firstHeaderId = headerId
|
||||
}
|
||||
var peer = EngineRenderedPeer(message: message)
|
||||
if let group = message.peers[message.id.peerId] as? TelegramGroup, let migrationReference = group.migrationReference {
|
||||
if let channelPeer = message.peers[migrationReference.peerId] {
|
||||
peer = EngineRenderedPeer(peer: EnginePeer(channelPeer))
|
||||
}
|
||||
}
|
||||
|
||||
entries.append(.message(message, peer, foundRemoteMessageSet.readCounters[message.id.peerId], foundRemoteMessageSet.threadsData[message.id]?.info, presentationData, foundRemoteMessageSet.totalCount, selectionState?.contains(message.id), headerId == firstHeaderId, .index(message.index), nil, .generic, false))
|
||||
index += 1
|
||||
}
|
||||
|
||||
entries.append(.message(message, peer, foundRemoteMessages.0.1[message.id.peerId], foundRemoteMessages.0.2[message.id]?.info, presentationData, foundRemoteMessages.0.3, selectionState?.contains(message.id), headerId == firstHeaderId, .index(message.index), nil, .generic, false))
|
||||
index += 1
|
||||
}
|
||||
}
|
||||
|
||||
@ -1807,16 +1884,25 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
|
||||
let loadMore = {
|
||||
updateSearchContexts { previousMap in
|
||||
guard let previous = previousMap[0] else {
|
||||
return ([:], false)
|
||||
var updatedMap = previousMap
|
||||
var isSearching = false
|
||||
for i in 0 ..< 2 {
|
||||
if let previous = updatedMap[i] {
|
||||
if previous.loadMoreIndex != nil {
|
||||
continue
|
||||
}
|
||||
guard let last = previous.result.messages.last else {
|
||||
continue
|
||||
}
|
||||
updatedMap[i] = ChatListSearchMessagesContext(result: previous.result, loadMoreIndex: last.index)
|
||||
isSearching = true
|
||||
|
||||
if previous.result.hasMore {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if previous.loadMoreIndex != nil {
|
||||
return ([0: previous], false)
|
||||
}
|
||||
guard let last = previous.result.messages.last else {
|
||||
return ([0: previous], false)
|
||||
}
|
||||
return ([0: ChatListSearchMessagesContext(result: previous.result, loadMoreIndex: last.index)], true)
|
||||
return (updatedMap, isSearching)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1919,6 +2005,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
self.listNode.clearHighlightAnimated(true)
|
||||
})
|
||||
})
|
||||
chatListInteraction.isSearchMode = true
|
||||
|
||||
let listInteraction = ListMessageItemInteraction(openMessage: { [weak self] message, mode -> Bool in
|
||||
guard let strongSelf = self else {
|
||||
@ -2023,25 +2110,25 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
})
|
||||
|
||||
self.searchDisposable.set((foundItems
|
||||
|> deliverOnMainQueue).start(next: { [weak self] entriesAndFlags in
|
||||
|> deliverOnMainQueue).start(next: { [weak self] foundItems in
|
||||
if let strongSelf = self {
|
||||
let previousSelectedMessageIds = previousSelectedMessages.swap(strongSelf.selectedMessages)
|
||||
|
||||
let isSearching = entriesAndFlags?.1 ?? false
|
||||
var entriesAndFlags = foundItems?.0
|
||||
|
||||
let isSearching = foundItems?.1 ?? false
|
||||
strongSelf._isSearching.set(isSearching)
|
||||
|
||||
if strongSelf.tagMask == .photoOrVideo {
|
||||
var entries: [ChatListSearchEntry]? = entriesAndFlags?.0 ?? []
|
||||
var entries: [ChatListSearchEntry]? = entriesAndFlags ?? []
|
||||
if isSearching && (entries?.isEmpty ?? true) {
|
||||
entries = nil
|
||||
}
|
||||
strongSelf.mediaNode.updateHistory(entries: entries, totalCount: 0, updateType: .Initial)
|
||||
}
|
||||
|
||||
var entriesAndFlags = entriesAndFlags
|
||||
|
||||
var peers: [EnginePeer] = []
|
||||
if let entries = entriesAndFlags?.0 {
|
||||
if let entries = entriesAndFlags {
|
||||
var filteredEntries: [ChatListSearchEntry] = []
|
||||
for entry in entries {
|
||||
if case let .localPeer(peer, _, _, _, _, _, _, _, _) = entry {
|
||||
@ -2053,16 +2140,16 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
||||
}
|
||||
|
||||
if strongSelf.tagMask != nil || strongSelf.searchOptionsValue?.date != nil || strongSelf.searchOptionsValue?.peer != nil {
|
||||
entriesAndFlags?.0 = filteredEntries
|
||||
entriesAndFlags = filteredEntries
|
||||
}
|
||||
}
|
||||
|
||||
let previousEntries = previousSearchItems.swap(entriesAndFlags?.0)
|
||||
let newEntries = entriesAndFlags?.0 ?? []
|
||||
let previousEntries = previousSearchItems.swap(entriesAndFlags)
|
||||
let newEntries = entriesAndFlags ?? []
|
||||
|
||||
let animated = (previousSelectedMessageIds == nil) != (strongSelf.selectedMessages == nil)
|
||||
let firstTime = previousEntries == nil
|
||||
var transition = chatListSearchContainerPreparedTransition(from: previousEntries ?? [], to: newEntries, displayingResults: entriesAndFlags?.0 != nil, isEmpty: !isSearching && (entriesAndFlags?.0.isEmpty ?? false), isLoading: isSearching, animated: animated, context: context, presentationData: strongSelf.presentationData, enableHeaders: true, filter: peersFilter, location: location, key: strongSelf.key, tagMask: tagMask, interaction: chatListInteraction, listInteraction: listInteraction, peerContextAction: { message, node, rect, gesture, location in
|
||||
var transition = chatListSearchContainerPreparedTransition(from: previousEntries ?? [], to: newEntries, displayingResults: entriesAndFlags != nil, isEmpty: !isSearching && (entriesAndFlags?.isEmpty ?? false), isLoading: isSearching, animated: animated, context: context, presentationData: strongSelf.presentationData, enableHeaders: true, filter: peersFilter, location: location, key: strongSelf.key, tagMask: tagMask, interaction: chatListInteraction, listInteraction: listInteraction, peerContextAction: { message, node, rect, gesture, location in
|
||||
interaction.peerContextAction?(message, node, rect, gesture, location)
|
||||
}, toggleExpandLocalResults: {
|
||||
guard let strongSelf = self else {
|
||||
@ -3115,8 +3202,9 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
|
||||
}, present: { _ in }, openForumThread: { _, _ in })
|
||||
var isInlineMode = false
|
||||
if case .topics = key {
|
||||
isInlineMode = true
|
||||
isInlineMode = false
|
||||
}
|
||||
interaction.isSearchMode = true
|
||||
interaction.isInlineMode = isInlineMode
|
||||
|
||||
let items = (0 ..< 2).compactMap { _ -> ListViewItem? in
|
||||
|
@ -1295,8 +1295,8 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
return
|
||||
}
|
||||
let cachedPeerData = peerView.cachedData
|
||||
if let cachedPeerData = cachedPeerData as? CachedUserData {
|
||||
if let photo = cachedPeerData.photo, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
|
||||
if let cachedPeerData = cachedPeerData as? CachedUserData, case let .known(maybePhoto) = cachedPeerData.photo {
|
||||
if let photo = maybePhoto, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
|
||||
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
||||
let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
||||
@ -1590,6 +1590,15 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
}
|
||||
}
|
||||
|
||||
let useChatListLayout: Bool
|
||||
if case .chatList = item.chatListLocation {
|
||||
useChatListLayout = true
|
||||
} else if displayAsMessage {
|
||||
useChatListLayout = true
|
||||
} else {
|
||||
useChatListLayout = false
|
||||
}
|
||||
|
||||
let theme = item.presentationData.theme.chatList
|
||||
|
||||
var updatedTheme: PresentationTheme?
|
||||
@ -1653,7 +1662,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
let avatarLeftInset: CGFloat
|
||||
if item.interaction.isInlineMode {
|
||||
avatarLeftInset = 12.0
|
||||
} else if case .forum = item.index {
|
||||
} else if !useChatListLayout {
|
||||
avatarLeftInset = 50.0
|
||||
} else {
|
||||
avatarLeftInset = 18.0 + avatarDiameter
|
||||
@ -2501,7 +2510,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
var mainContentBoundsOffset: CGFloat
|
||||
var mainContentAlpha: CGFloat = 1.0
|
||||
|
||||
if case .chatList = item.chatListLocation {
|
||||
if useChatListLayout {
|
||||
mainContentFrame = CGRect(origin: CGPoint(x: leftInset - 2.0, y: 0.0), size: CGSize(width: layout.contentSize.width, height: layout.contentSize.height))
|
||||
mainContentBoundsOffset = mainContentFrame.origin.x
|
||||
|
||||
@ -2694,7 +2703,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
}
|
||||
}
|
||||
|
||||
if let threadInfo = threadInfo {
|
||||
if let threadInfo = threadInfo, !displayAsMessage {
|
||||
let avatarIconView: ComponentHostView<Empty>
|
||||
if let current = strongSelf.avatarIconView {
|
||||
avatarIconView = current
|
||||
@ -2742,7 +2751,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
avatarIconView.removeFromSuperview()
|
||||
}
|
||||
|
||||
if case .forum = item.index {
|
||||
if !useChatListLayout {
|
||||
strongSelf.avatarContainerNode.isHidden = true
|
||||
} else {
|
||||
strongSelf.avatarContainerNode.isHidden = false
|
||||
|
@ -95,6 +95,8 @@ public final class ChatListNodeInteraction {
|
||||
public var searchTextHighightState: String?
|
||||
var highlightedChatLocation: ChatListHighlightedLocation?
|
||||
|
||||
var isSearchMode: Bool = false
|
||||
|
||||
var isInlineMode: Bool = false
|
||||
var inlineNavigationLocation: ChatListHighlightedLocation?
|
||||
|
||||
|
@ -1411,10 +1411,14 @@ open class TextNode: ASDisplayNode {
|
||||
context.setAllowsFontSubpixelQuantization(true)
|
||||
context.setShouldSubpixelQuantizeFonts(true)
|
||||
|
||||
var blendMode: CGBlendMode = .normal
|
||||
|
||||
var clearRects: [CGRect] = []
|
||||
if let layout = parameters as? TextNodeLayout {
|
||||
if !isRasterizing || layout.backgroundColor != nil {
|
||||
context.setBlendMode(.copy)
|
||||
blendMode = .copy
|
||||
|
||||
context.setFillColor((layout.backgroundColor ?? UIColor.clear).cgColor)
|
||||
context.fill(bounds)
|
||||
}
|
||||
@ -1426,6 +1430,8 @@ open class TextNode: ASDisplayNode {
|
||||
|
||||
if let (textStrokeColor, textStrokeWidth) = layout.textStroke {
|
||||
context.setBlendMode(.normal)
|
||||
blendMode = .normal
|
||||
|
||||
context.setLineCap(.round)
|
||||
context.setLineJoin(.round)
|
||||
context.setStrokeColor(textStrokeColor.cgColor)
|
||||
@ -1487,7 +1493,28 @@ open class TextNode: ASDisplayNode {
|
||||
if attributes["Attribute__EmbeddedItem"] != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
var fixCoupleEmoji = false
|
||||
if glyphCount == 2, let font = attributes["NSFont"] as? UIFont, font.fontName.contains("ColorEmoji"), let string = layout.attributedString {
|
||||
let range = CTRunGetStringRange(run)
|
||||
let substring = string.attributedSubstring(from: NSMakeRange(range.location, range.length)).string
|
||||
|
||||
let heart = Unicode.Scalar(0x2764)!
|
||||
let man = Unicode.Scalar(0x1F468)!
|
||||
let woman = Unicode.Scalar(0x1F469)!
|
||||
|
||||
if substring.unicodeScalars.contains(heart) && (substring.unicodeScalars.contains(man) || substring.unicodeScalars.contains(woman)) {
|
||||
fixCoupleEmoji = true
|
||||
}
|
||||
}
|
||||
|
||||
if fixCoupleEmoji {
|
||||
context.setBlendMode(.normal)
|
||||
}
|
||||
CTRunDraw(run, context, CFRangeMake(0, glyphCount))
|
||||
if fixCoupleEmoji {
|
||||
context.setBlendMode(blendMode)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -517,7 +517,7 @@ private final class PictureInPictureContentImpl: NSObject, PictureInPictureConte
|
||||
guard let status = self.status else {
|
||||
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)))
|
||||
}
|
||||
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: status.duration, preferredTimescale: CMTimeScale(30.0)))
|
||||
return CMTimeRange(start: CMTime(seconds: status.timestamp, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: status.duration, preferredTimescale: CMTimeScale(30.0)))
|
||||
}
|
||||
|
||||
public func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
|
||||
|
@ -104,7 +104,9 @@ final class AppIconsDemoComponent: Component {
|
||||
position = CGPoint(x: availableSize.width * 0.5, y: availableSize.height * 0.5)
|
||||
}
|
||||
|
||||
view.center = position.offsetBy(dx: availableSize.width / 2.0, dy: 0.0)
|
||||
if !self.animating {
|
||||
view.center = position.offsetBy(dx: availableSize.width / 2.0, dy: 0.0)
|
||||
}
|
||||
|
||||
i += 1
|
||||
}
|
||||
@ -126,7 +128,10 @@ final class AppIconsDemoComponent: Component {
|
||||
return availableSize
|
||||
}
|
||||
|
||||
private var animating = false
|
||||
func animateIn(availableSize: CGSize) {
|
||||
self.animating = true
|
||||
|
||||
var i = 0
|
||||
for view in self.imageViews {
|
||||
let from: CGPoint
|
||||
@ -146,9 +151,17 @@ final class AppIconsDemoComponent: Component {
|
||||
delay = 0.0
|
||||
}
|
||||
|
||||
let initialPosition = view.layer.position
|
||||
view.layer.position = initialPosition.offsetBy(dx: from.x, dy: from.y)
|
||||
|
||||
Queue.mainQueue().after(delay) {
|
||||
view.layer.position = initialPosition
|
||||
view.layer.animateScale(from: 3.0, to: 1.0, duration: 0.5, delay: 0.0, timingFunction: kCAMediaTimingFunctionSpring)
|
||||
view.layer.animatePosition(from: from, to: CGPoint(), duration: 0.5, delay: 0.0, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
|
||||
if i == 2 {
|
||||
self.animating = false
|
||||
}
|
||||
}
|
||||
|
||||
i += 1
|
||||
|
@ -110,6 +110,9 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
private let joinButtonTitleNode: ImmediateTextNode
|
||||
private let joinButtonBackgroundNode: ASImageNode
|
||||
|
||||
private var previewImageNode: ASImageNode?
|
||||
private var previewImage: UIImage?
|
||||
|
||||
private var audioLevelView: VoiceBlobView?
|
||||
|
||||
private let micButton: HighlightTrackingButtonNode
|
||||
@ -139,6 +142,7 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
private let membersDisposable = MetaDisposable()
|
||||
private let isMutedDisposable = MetaDisposable()
|
||||
private let audioLevelDisposable = MetaDisposable()
|
||||
private var imageDisposable: Disposable?
|
||||
|
||||
private var callState: PresentationGroupCallState?
|
||||
|
||||
@ -233,6 +237,8 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
self.isMutedDisposable.dispose()
|
||||
self.audioLevelGeneratorTimer?.invalidate()
|
||||
self.updateTimer?.invalidate()
|
||||
self.imageDisposable?.dispose()
|
||||
self.audioLevelDisposable.dispose()
|
||||
}
|
||||
|
||||
public override func didLoad() {
|
||||
@ -366,6 +372,11 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
self.avatarsContent = self.avatarsContext.update(peers: [], animated: false)
|
||||
} else {
|
||||
self.avatarsContent = self.avatarsContext.update(peers: data.topParticipants.map { EnginePeer($0.peer) }, animated: false)
|
||||
|
||||
if let imageDisposable = self.imageDisposable {
|
||||
self.imageDisposable = nil
|
||||
imageDisposable.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
self.textNode.attributedText = NSAttributedString(string: membersText, font: Font.regular(13.0), textColor: self.theme.chat.inputPanel.secondaryTextColor)
|
||||
@ -484,6 +495,67 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
updateAudioLevels = true
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
if data.info.isStream {
|
||||
if self.imageDisposable == nil {
|
||||
let engine = self.context.engine
|
||||
let info = data.info
|
||||
self.imageDisposable = (engine.calls.getAudioBroadcastDataSource(callId: info.id, accessHash: info.accessHash)
|
||||
|> mapToSignal { source -> Signal<Data?, NoError> in
|
||||
guard let source else {
|
||||
return .single(nil)
|
||||
}
|
||||
|
||||
let time = engine.calls.requestStreamState(dataSource: source, callId: info.id, accessHash: info.accessHash)
|
||||
|> map { state -> Int64? in
|
||||
guard let state else {
|
||||
return nil
|
||||
}
|
||||
return state.channels.first?.latestTimestamp
|
||||
}
|
||||
|
||||
return time
|
||||
|> mapToSignal { latestTimestamp -> Signal<Data?, NoError> in
|
||||
guard let latestTimestamp else {
|
||||
return .single(nil)
|
||||
}
|
||||
|
||||
let durationMilliseconds: Int64 = 32000
|
||||
let bufferOffset: Int64 = 1 * durationMilliseconds
|
||||
let timestampId = (latestTimestamp / durationMilliseconds) * durationMilliseconds - bufferOffset
|
||||
|
||||
return engine.calls.getVideoBroadcastPart(dataSource: source, callId: info.id, accessHash: info.accessHash, timestampIdMilliseconds: timestampId, durationMilliseconds: durationMilliseconds, channelId: 2, quality: 0)
|
||||
|> mapToSignal { result -> Signal<Data?, NoError> in
|
||||
switch result.status {
|
||||
case let .data(data):
|
||||
return .single(data)
|
||||
case .notReady, .resyncNeeded, .rejoinNeeded:
|
||||
return .single(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|> deliverOnMainQueue).start(next: { [weak self] data in
|
||||
guard let self, let data else {
|
||||
return
|
||||
}
|
||||
|
||||
var image: UIImage?
|
||||
for i in 0 ..< 100 {
|
||||
image = UIImage(data: data.subdata(in: i ..< data.count))
|
||||
if image != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
self.previewImage = image
|
||||
if let (size, leftInset, rightInset) = self.validLayout {
|
||||
self.updateLayout(size: size, leftInset: leftInset, rightInset: rightInset, transition: .animated(duration: 0.2, curve: .easeInOut))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if let (size, leftInset, rightInset) = self.validLayout {
|
||||
self.updateLayout(size: size, leftInset: leftInset, rightInset: rightInset, transition: .animated(duration: 0.2, curve: .easeInOut))
|
||||
}
|
||||
@ -609,6 +681,26 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
||||
staticTransition.updateFrame(node: self.joinButtonBackgroundNode, frame: CGRect(origin: CGPoint(), size: joinButtonFrame.size))
|
||||
staticTransition.updateFrame(node: self.joinButtonTitleNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((joinButtonFrame.width - joinButtonTitleSize.width) / 2.0), y: floorToScreenPixels((joinButtonFrame.height - joinButtonTitleSize.height) / 2.0)), size: joinButtonTitleSize))
|
||||
|
||||
if let previewImage = self.previewImage {
|
||||
let previewImageNode: ASImageNode
|
||||
if let current = self.previewImageNode {
|
||||
previewImageNode = current
|
||||
} else {
|
||||
previewImageNode = ASImageNode()
|
||||
previewImageNode.clipsToBounds = true
|
||||
previewImageNode.cornerRadius = 8.0
|
||||
previewImageNode.contentMode = .scaleAspectFill
|
||||
self.previewImageNode = previewImageNode
|
||||
self.addSubnode(previewImageNode)
|
||||
}
|
||||
previewImageNode.image = previewImage
|
||||
let previewSize = CGSize(width: 40.0, height: 40.0)
|
||||
previewImageNode.frame = CGRect(origin: CGPoint(x: joinButtonFrame.minX - previewSize.width - 8.0, y: joinButtonFrame.minY + floor((joinButtonFrame.height - previewSize.height) / 2.0)), size: previewSize)
|
||||
} else if let previewImageNode = self.previewImageNode {
|
||||
self.previewImageNode = nil
|
||||
previewImageNode.removeFromSupernode()
|
||||
}
|
||||
|
||||
let micButtonSize = CGSize(width: 36.0, height: 36.0)
|
||||
let micButtonFrame = CGRect(origin: CGPoint(x: size.width - rightInset - 7.0 - micButtonSize.width, y: floor((panelHeight - micButtonSize.height) / 2.0)), size: micButtonSize)
|
||||
staticTransition.updateFrame(node: self.micButton, frame: micButtonFrame)
|
||||
|
@ -15,180 +15,6 @@ import AccountContext
|
||||
import DeviceProximity
|
||||
import PhoneNumberFormat
|
||||
|
||||
final class PresentationCallToneRenderer {
|
||||
let queue: Queue
|
||||
|
||||
let tone: PresentationCallTone
|
||||
|
||||
private let toneRenderer: MediaPlayerAudioRenderer
|
||||
private var toneRendererAudioSession: MediaPlayerAudioSessionCustomControl?
|
||||
private var toneRendererAudioSessionActivated = false
|
||||
private let audioLevelPipe = ValuePipe<Float>()
|
||||
|
||||
init(tone: PresentationCallTone, completed: (() -> Void)? = nil) {
|
||||
let queue = Queue.mainQueue()
|
||||
self.queue = queue
|
||||
|
||||
self.tone = tone
|
||||
|
||||
var controlImpl: ((MediaPlayerAudioSessionCustomControl) -> Disposable)?
|
||||
|
||||
self.toneRenderer = MediaPlayerAudioRenderer(audioSession: .custom({ control in
|
||||
return controlImpl?(control) ?? EmptyDisposable
|
||||
}), playAndRecord: false, useVoiceProcessingMode: true, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {})
|
||||
|
||||
controlImpl = { [weak self] control in
|
||||
queue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.toneRendererAudioSession = control
|
||||
if strongSelf.toneRendererAudioSessionActivated {
|
||||
control.activate()
|
||||
}
|
||||
}
|
||||
}
|
||||
return ActionDisposable {
|
||||
}
|
||||
}
|
||||
|
||||
let toneDataOffset = Atomic<Int>(value: 0)
|
||||
|
||||
let toneData = Atomic<Data?>(value: nil)
|
||||
let reportedCompletion = Atomic<Bool>(value: false)
|
||||
|
||||
self.toneRenderer.beginRequestingFrames(queue: DispatchQueue.global(), takeFrame: {
|
||||
var data = toneData.with { $0 }
|
||||
if data == nil {
|
||||
data = presentationCallToneData(tone)
|
||||
if data != nil {
|
||||
let _ = toneData.swap(data)
|
||||
}
|
||||
}
|
||||
|
||||
guard let toneData = data else {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
|
||||
let toneDataMaxOffset: Int?
|
||||
if let loopCount = tone.loopCount {
|
||||
toneDataMaxOffset = (data?.count ?? 0) * loopCount
|
||||
} else {
|
||||
toneDataMaxOffset = nil
|
||||
}
|
||||
|
||||
let frameSize = 44100
|
||||
|
||||
var takeOffset: Int?
|
||||
let _ = toneDataOffset.modify { current in
|
||||
takeOffset = current
|
||||
return current + frameSize
|
||||
}
|
||||
|
||||
if let takeOffset = takeOffset {
|
||||
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset >= toneDataMaxOffset {
|
||||
if !reportedCompletion.swap(true) {
|
||||
Queue.mainQueue().after(1.0, {
|
||||
completed?()
|
||||
})
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
|
||||
var blockBuffer: CMBlockBuffer?
|
||||
|
||||
let bytes = malloc(frameSize)!
|
||||
toneData.withUnsafeBytes { dataBuffer -> Void in
|
||||
guard let dataBytes = dataBuffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
|
||||
return
|
||||
}
|
||||
var takenCount = 0
|
||||
while takenCount < frameSize {
|
||||
let dataOffset = (takeOffset + takenCount) % toneData.count
|
||||
let dataCount = min(frameSize - takenCount, toneData.count - dataOffset)
|
||||
//print("take from \(dataOffset) count: \(dataCount)")
|
||||
memcpy(bytes.advanced(by: takenCount), dataBytes.advanced(by: dataOffset), dataCount)
|
||||
takenCount += dataCount
|
||||
|
||||
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset + takenCount >= toneDataMaxOffset {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if takenCount < frameSize {
|
||||
//print("fill with zeros from \(takenCount) count: \(frameSize - takenCount)")
|
||||
memset(bytes.advanced(by: takenCount), 0, frameSize - takenCount)
|
||||
}
|
||||
}
|
||||
|
||||
/*if let toneDataMaxOffset = toneDataMaxOffset, takeOffset + frameSize > toneDataMaxOffset {
|
||||
let validCount = max(0, toneDataMaxOffset - takeOffset)
|
||||
memset(bytes.advanced(by: validCount), 0, frameSize - validCount)
|
||||
print("clear from \(validCount) count: \(frameSize - validCount)")
|
||||
}*/
|
||||
|
||||
let status = CMBlockBufferCreateWithMemoryBlock(allocator: nil, memoryBlock: bytes, blockLength: frameSize, blockAllocator: nil, customBlockSource: nil, offsetToData: 0, dataLength: frameSize, flags: 0, blockBufferOut: &blockBuffer)
|
||||
if status != noErr {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
|
||||
let sampleCount = frameSize / 2
|
||||
|
||||
let pts = CMTime(value: Int64(takeOffset / 2), timescale: 44100)
|
||||
var timingInfo = CMSampleTimingInfo(duration: CMTime(value: Int64(sampleCount), timescale: 44100), presentationTimeStamp: pts, decodeTimeStamp: pts)
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
var sampleSize = frameSize
|
||||
guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: nil, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
|
||||
if let sampleBuffer = sampleBuffer {
|
||||
return .frame(MediaTrackFrame(type: .audio, sampleBuffer: sampleBuffer, resetDecoder: false, decoded: true))
|
||||
} else {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
} else {
|
||||
if !reportedCompletion.swap(true) {
|
||||
completed?()
|
||||
}
|
||||
return .finished
|
||||
}
|
||||
})
|
||||
self.toneRenderer.start()
|
||||
self.toneRenderer.setRate(1.0)
|
||||
}
|
||||
|
||||
deinit {
|
||||
assert(self.queue.isCurrent())
|
||||
self.toneRenderer.stop()
|
||||
}
|
||||
|
||||
func setAudioSessionActive(_ value: Bool) {
|
||||
if self.toneRendererAudioSessionActivated != value {
|
||||
self.toneRendererAudioSessionActivated = value
|
||||
if let control = self.toneRendererAudioSession {
|
||||
if value {
|
||||
self.toneRenderer.setRate(1.0)
|
||||
control.activate()
|
||||
} else {
|
||||
self.toneRenderer.setRate(0.0)
|
||||
control.deactivate()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class PresentationCallImpl: PresentationCall {
|
||||
public let context: AccountContext
|
||||
private let audioSession: ManagedAudioSession
|
||||
@ -221,6 +47,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
private var callContextState: OngoingCallContextState?
|
||||
private var ongoingContext: OngoingCallContext?
|
||||
private var ongoingContextStateDisposable: Disposable?
|
||||
private var sharedAudioDevice: OngoingCallContext.AudioDevice?
|
||||
private var requestedVideoAspect: Float?
|
||||
private var reception: Int32?
|
||||
private var receptionDisposable: Disposable?
|
||||
@ -282,7 +109,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
private var audioSessionActiveDisposable: Disposable?
|
||||
private var isAudioSessionActive = false
|
||||
|
||||
private var toneRenderer: PresentationCallToneRenderer?
|
||||
private var currentTone: PresentationCallTone?
|
||||
|
||||
private var droppedCall = false
|
||||
private var dropCallKitCallTimer: SwiftSignalKit.Timer?
|
||||
@ -463,6 +290,12 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
}
|
||||
})
|
||||
|
||||
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
|
||||
self.sharedAudioDevice = nil
|
||||
} else {
|
||||
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create()
|
||||
}
|
||||
|
||||
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||
if let strongSelf = self {
|
||||
@ -702,7 +535,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
|
||||
let updatedConnections = connections
|
||||
|
||||
let ongoingContext = OngoingCallContext(account: self.context.account, callSessionManager: self.callSessionManager, callId: id, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: updatedConnections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, enableTCP: self.enableTCP, enableStunMarking: self.enableStunMarking, audioSessionActive: self.audioSessionActive.get(), logName: logName, preferredVideoCodec: self.preferredVideoCodec)
|
||||
let ongoingContext = OngoingCallContext(account: self.context.account, callSessionManager: self.callSessionManager, callId: id, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: updatedConnections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, enableTCP: self.enableTCP, enableStunMarking: self.enableStunMarking, audioSessionActive: self.audioSessionActive.get(), logName: logName, preferredVideoCodec: self.preferredVideoCodec, audioDevice: self.sharedAudioDevice)
|
||||
self.ongoingContext = ongoingContext
|
||||
ongoingContext.setIsMuted(self.isMutedValue)
|
||||
if let requestedVideoAspect = self.requestedVideoAspect {
|
||||
@ -864,26 +697,19 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
break
|
||||
}
|
||||
}
|
||||
if tone != self.toneRenderer?.tone {
|
||||
if let tone = tone {
|
||||
if "".isEmpty {
|
||||
let _ = tone
|
||||
} else {
|
||||
let toneRenderer = PresentationCallToneRenderer(tone: tone)
|
||||
self.toneRenderer = toneRenderer
|
||||
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
|
||||
}
|
||||
} else {
|
||||
self.toneRenderer = nil
|
||||
}
|
||||
if tone != self.currentTone {
|
||||
self.currentTone = tone
|
||||
self.sharedAudioDevice?.setTone(tone: tone.flatMap(presentationCallToneData).flatMap { data in
|
||||
return OngoingCallContext.Tone(samples: data, sampleRate: 48000, loopCount: tone?.loopCount ?? 1000000)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private func updateIsAudioSessionActive(_ value: Bool) {
|
||||
if self.isAudioSessionActive != value {
|
||||
self.isAudioSessionActive = value
|
||||
self.toneRenderer?.setAudioSessionActive(value)
|
||||
}
|
||||
self.sharedAudioDevice?.setIsAudioSessionActive(value)
|
||||
}
|
||||
|
||||
public func answer() {
|
||||
|
@ -4,12 +4,12 @@ import AVFoundation
|
||||
private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Data? {
|
||||
let outputSettings: [String: Any] = [
|
||||
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
|
||||
AVSampleRateKey: 44100.0 as NSNumber,
|
||||
AVSampleRateKey: 48000.0 as NSNumber,
|
||||
AVLinearPCMBitDepthKey: 16 as NSNumber,
|
||||
AVLinearPCMIsNonInterleaved: false as NSNumber,
|
||||
AVLinearPCMIsFloatKey: false as NSNumber,
|
||||
AVLinearPCMIsBigEndianKey: false as NSNumber,
|
||||
AVNumberOfChannelsKey: 2 as NSNumber
|
||||
AVNumberOfChannelsKey: 1 as NSNumber
|
||||
]
|
||||
|
||||
let nsName: NSString = name as NSString
|
||||
@ -63,9 +63,9 @@ private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Dat
|
||||
}
|
||||
|
||||
if !addSilenceDuration.isZero {
|
||||
let sampleRate = 44100
|
||||
let sampleRate = 48000
|
||||
let numberOfSamples = Int(Double(sampleRate) * addSilenceDuration)
|
||||
let numberOfChannels = 2
|
||||
let numberOfChannels = 1
|
||||
let numberOfBytes = numberOfSamples * 2 * numberOfChannels
|
||||
|
||||
data.append(Data(count: numberOfBytes))
|
||||
|
@ -433,6 +433,15 @@ private extension CurrentImpl {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
func setTone(tone: OngoingGroupCallContext.Tone?) {
|
||||
switch self {
|
||||
case let .call(callContext):
|
||||
callContext.setTone(tone: tone)
|
||||
case .mediaStream:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func groupCallLogsPath(account: Account) -> String {
|
||||
@ -823,7 +832,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
private var didStartConnectingOnce: Bool = false
|
||||
private var didConnectOnce: Bool = false
|
||||
private var toneRenderer: PresentationCallToneRenderer?
|
||||
|
||||
private var videoCapturer: OngoingCallVideoCapturer?
|
||||
private var useFrontCamera: Bool = true
|
||||
@ -1841,7 +1849,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
if isConnecting {
|
||||
strongSelf.beginTone(tone: .groupConnecting)
|
||||
} else {
|
||||
strongSelf.toneRenderer = nil
|
||||
strongSelf.beginTone(tone: nil)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2470,15 +2478,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
private func updateIsAudioSessionActive(_ value: Bool) {
|
||||
if self.isAudioSessionActive != value {
|
||||
self.isAudioSessionActive = value
|
||||
self.toneRenderer?.setAudioSessionActive(value)
|
||||
}
|
||||
}
|
||||
|
||||
private func beginTone(tone: PresentationCallTone) {
|
||||
if "".isEmpty {
|
||||
return
|
||||
}
|
||||
if self.isStream {
|
||||
private func beginTone(tone: PresentationCallTone?) {
|
||||
if self.isStream, let tone {
|
||||
switch tone {
|
||||
case .groupJoined, .groupLeft:
|
||||
return
|
||||
@ -2486,21 +2490,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
break
|
||||
}
|
||||
}
|
||||
var completed: (() -> Void)?
|
||||
let toneRenderer = PresentationCallToneRenderer(tone: tone, completed: {
|
||||
completed?()
|
||||
})
|
||||
completed = { [weak self, weak toneRenderer] in
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let toneRenderer = toneRenderer, toneRenderer === strongSelf.toneRenderer else {
|
||||
return
|
||||
}
|
||||
strongSelf.toneRenderer = nil
|
||||
}
|
||||
if let tone, let toneData = presentationCallToneData(tone) {
|
||||
self.genericCallContext?.setTone(tone: OngoingGroupCallContext.Tone(
|
||||
samples: toneData,
|
||||
sampleRate: 48000,
|
||||
loopCount: tone.loopCount ?? 100000
|
||||
))
|
||||
} else {
|
||||
self.genericCallContext?.setTone(tone: nil)
|
||||
}
|
||||
|
||||
self.toneRenderer = toneRenderer
|
||||
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
|
||||
}
|
||||
|
||||
public func playTone(_ tone: PresentationGroupCallTone) {
|
||||
|
@ -50,6 +50,34 @@ public enum CachedPeerAutoremoveTimeout: Equatable, PostboxCoding {
|
||||
}
|
||||
}
|
||||
|
||||
public enum CachedPeerProfilePhoto: Equatable, PostboxCoding {
|
||||
case unknown
|
||||
case known(TelegramMediaImage?)
|
||||
|
||||
public init(decoder: PostboxDecoder) {
|
||||
switch decoder.decodeInt32ForKey("_v", orElse: 0) {
|
||||
case 1:
|
||||
self = .known(decoder.decodeObjectForKey("v", decoder: { TelegramMediaImage(decoder: $0) }) as? TelegramMediaImage)
|
||||
default:
|
||||
self = .unknown
|
||||
}
|
||||
}
|
||||
|
||||
public func encode(_ encoder: PostboxEncoder) {
|
||||
switch self {
|
||||
case .unknown:
|
||||
encoder.encodeInt32(0, forKey: "_v")
|
||||
case let .known(value):
|
||||
encoder.encodeInt32(1, forKey: "_v")
|
||||
if let value = value {
|
||||
encoder.encodeObject(value, forKey: "v")
|
||||
} else {
|
||||
encoder.encodeNil(forKey: "v")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public struct CachedPremiumGiftOption: Equatable, PostboxCoding {
|
||||
public let months: Int32
|
||||
public let currency: String
|
||||
@ -123,7 +151,7 @@ public final class CachedUserData: CachedPeerData {
|
||||
public let hasScheduledMessages: Bool
|
||||
public let autoremoveTimeout: CachedPeerAutoremoveTimeout
|
||||
public let themeEmoticon: String?
|
||||
public let photo: TelegramMediaImage?
|
||||
public let photo: CachedPeerProfilePhoto
|
||||
public let premiumGiftOptions: [CachedPremiumGiftOption]
|
||||
public let voiceMessagesAvailable: Bool
|
||||
|
||||
@ -145,14 +173,14 @@ public final class CachedUserData: CachedPeerData {
|
||||
self.hasScheduledMessages = false
|
||||
self.autoremoveTimeout = .unknown
|
||||
self.themeEmoticon = nil
|
||||
self.photo = nil
|
||||
self.photo = .unknown
|
||||
self.premiumGiftOptions = []
|
||||
self.voiceMessagesAvailable = true
|
||||
self.peerIds = Set()
|
||||
self.messageIds = Set()
|
||||
}
|
||||
|
||||
public init(about: String?, botInfo: BotInfo?, peerStatusSettings: PeerStatusSettings?, pinnedMessageId: MessageId?, isBlocked: Bool, commonGroupCount: Int32, voiceCallsAvailable: Bool, videoCallsAvailable: Bool, callsPrivate: Bool, canPinMessages: Bool, hasScheduledMessages: Bool, autoremoveTimeout: CachedPeerAutoremoveTimeout, themeEmoticon: String?, photo: TelegramMediaImage?, premiumGiftOptions: [CachedPremiumGiftOption], voiceMessagesAvailable: Bool) {
|
||||
public init(about: String?, botInfo: BotInfo?, peerStatusSettings: PeerStatusSettings?, pinnedMessageId: MessageId?, isBlocked: Bool, commonGroupCount: Int32, voiceCallsAvailable: Bool, videoCallsAvailable: Bool, callsPrivate: Bool, canPinMessages: Bool, hasScheduledMessages: Bool, autoremoveTimeout: CachedPeerAutoremoveTimeout, themeEmoticon: String?, photo: CachedPeerProfilePhoto, premiumGiftOptions: [CachedPremiumGiftOption], voiceMessagesAvailable: Bool) {
|
||||
self.about = about
|
||||
self.botInfo = botInfo
|
||||
self.peerStatusSettings = peerStatusSettings
|
||||
@ -204,12 +232,8 @@ public final class CachedUserData: CachedPeerData {
|
||||
self.autoremoveTimeout = decoder.decodeObjectForKey("artv", decoder: CachedPeerAutoremoveTimeout.init(decoder:)) as? CachedPeerAutoremoveTimeout ?? .unknown
|
||||
self.themeEmoticon = decoder.decodeOptionalStringForKey("te")
|
||||
|
||||
if let photo = decoder.decodeObjectForKey("ph", decoder: { TelegramMediaImage(decoder: $0) }) as? TelegramMediaImage {
|
||||
self.photo = photo
|
||||
} else {
|
||||
self.photo = nil
|
||||
}
|
||||
|
||||
self.photo = decoder.decodeObjectForKey("phv", decoder: CachedPeerProfilePhoto.init(decoder:)) as? CachedPeerProfilePhoto ?? .unknown
|
||||
|
||||
self.premiumGiftOptions = decoder.decodeObjectArrayWithDecoderForKey("pgo") as [CachedPremiumGiftOption]
|
||||
self.voiceMessagesAvailable = decoder.decodeInt32ForKey("vma", orElse: 0) != 0
|
||||
|
||||
@ -261,12 +285,8 @@ public final class CachedUserData: CachedPeerData {
|
||||
encoder.encodeNil(forKey: "te")
|
||||
}
|
||||
|
||||
if let photo = self.photo {
|
||||
encoder.encodeObject(photo, forKey: "ph")
|
||||
} else {
|
||||
encoder.encodeNil(forKey: "ph")
|
||||
}
|
||||
|
||||
encoder.encodeObject(self.photo, forKey: "phv")
|
||||
|
||||
encoder.encodeObjectArray(self.premiumGiftOptions, forKey: "pgo")
|
||||
encoder.encodeInt32(self.voiceMessagesAvailable ? 1 : 0, forKey: "vma")
|
||||
}
|
||||
@ -338,7 +358,7 @@ public final class CachedUserData: CachedPeerData {
|
||||
return CachedUserData(about: self.about, botInfo: self.botInfo, peerStatusSettings: self.peerStatusSettings, pinnedMessageId: self.pinnedMessageId, isBlocked: self.isBlocked, commonGroupCount: self.commonGroupCount, voiceCallsAvailable: self.voiceCallsAvailable, videoCallsAvailable: self.videoCallsAvailable, callsPrivate: self.callsPrivate, canPinMessages: self.canPinMessages, hasScheduledMessages: self.hasScheduledMessages, autoremoveTimeout: self.autoremoveTimeout, themeEmoticon: themeEmoticon, photo: self.photo, premiumGiftOptions: self.premiumGiftOptions, voiceMessagesAvailable: self.voiceMessagesAvailable)
|
||||
}
|
||||
|
||||
public func withUpdatedPhoto(_ photo: TelegramMediaImage?) -> CachedUserData {
|
||||
public func withUpdatedPhoto(_ photo: CachedPeerProfilePhoto) -> CachedUserData {
|
||||
return CachedUserData(about: self.about, botInfo: self.botInfo, peerStatusSettings: self.peerStatusSettings, pinnedMessageId: self.pinnedMessageId, isBlocked: self.isBlocked, commonGroupCount: self.commonGroupCount, voiceCallsAvailable: self.voiceCallsAvailable, videoCallsAvailable: self.videoCallsAvailable, callsPrivate: self.callsPrivate, canPinMessages: self.canPinMessages, hasScheduledMessages: self.hasScheduledMessages, autoremoveTimeout: self.autoremoveTimeout, themeEmoticon: self.themeEmoticon, photo: photo, premiumGiftOptions: self.premiumGiftOptions, voiceMessagesAvailable: self.voiceMessagesAvailable)
|
||||
}
|
||||
|
||||
|
@ -739,7 +739,11 @@ public extension TelegramEngine.EngineData.Item {
|
||||
preconditionFailure()
|
||||
}
|
||||
if let cachedData = view.cachedPeerData as? CachedUserData {
|
||||
return .known(cachedData.photo)
|
||||
if case let .known(value) = cachedData.photo {
|
||||
return .known(value)
|
||||
} else {
|
||||
return .unknown
|
||||
}
|
||||
} else if let cachedData = view.cachedPeerData as? CachedGroupData {
|
||||
return .known(cachedData.photo)
|
||||
} else if let cachedData = view.cachedPeerData as? CachedChannelData {
|
||||
|
@ -117,7 +117,10 @@ private func mergedState(transaction: Transaction, seedConfiguration: SeedConfig
|
||||
|
||||
var peerIdsSet: Set<PeerId> = Set()
|
||||
var readStates: [PeerId: CombinedPeerReadState] = [:]
|
||||
var threadInfo:[MessageId : MessageHistoryThreadData] = [:]
|
||||
var threadInfo: [MessageId : MessageHistoryThreadData] = [:]
|
||||
if let state = state {
|
||||
threadInfo = state.threadInfo
|
||||
}
|
||||
|
||||
var renderedMessages: [Message] = []
|
||||
for message in messages {
|
||||
|
@ -35,10 +35,12 @@ public enum AdminLogEventAction {
|
||||
public struct ForumTopicInfo {
|
||||
public var info: EngineMessageHistoryThread.Info
|
||||
public var isClosed: Bool
|
||||
public var isHidden: Bool
|
||||
|
||||
public init(info: EngineMessageHistoryThread.Info, isClosed: Bool) {
|
||||
public init(info: EngineMessageHistoryThread.Info, isClosed: Bool, isHidden: Bool) {
|
||||
self.info = info
|
||||
self.isClosed = isClosed
|
||||
self.isHidden = isHidden
|
||||
}
|
||||
}
|
||||
|
||||
@ -302,17 +304,17 @@ func channelAdminLogEvents(postbox: Postbox, network: Network, peerId: PeerId, m
|
||||
let prevInfo: AdminLogEventAction.ForumTopicInfo
|
||||
switch prevTopic {
|
||||
case let .forumTopic(flags, _, _, title, iconColor, iconEmojiId, _, _, _, _, _, _, _, _, _):
|
||||
prevInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: title, icon: iconEmojiId, iconColor: iconColor), isClosed: (flags & (1 << 2)) != 0)
|
||||
prevInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: title, icon: iconEmojiId, iconColor: iconColor), isClosed: (flags & (1 << 2)) != 0, isHidden: (flags & (1 << 6)) != 0)
|
||||
case .forumTopicDeleted:
|
||||
prevInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: "", icon: nil, iconColor: 0), isClosed: false)
|
||||
prevInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: "", icon: nil, iconColor: 0), isClosed: false, isHidden: false)
|
||||
}
|
||||
|
||||
let newInfo: AdminLogEventAction.ForumTopicInfo
|
||||
switch newTopic {
|
||||
case let .forumTopic(flags, _, _, title, iconColor, iconEmojiId, _, _, _, _, _, _, _, _, _):
|
||||
newInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: title, icon: iconEmojiId, iconColor: iconColor), isClosed: (flags & (1 << 2)) != 0)
|
||||
newInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: title, icon: iconEmojiId, iconColor: iconColor), isClosed: (flags & (1 << 2)) != 0, isHidden: (flags & (1 << 6)) != 0)
|
||||
case .forumTopicDeleted:
|
||||
newInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: "", icon: nil, iconColor: 0), isClosed: false)
|
||||
newInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: "", icon: nil, iconColor: 0), isClosed: false, isHidden: false)
|
||||
}
|
||||
|
||||
action = .editTopic(prevInfo: prevInfo, newInfo: newInfo)
|
||||
|
@ -267,7 +267,7 @@ func _internal_fetchAndUpdateCachedPeerData(accountPeerId: PeerId, peerId rawPee
|
||||
return previous.withUpdatedAbout(userFullAbout).withUpdatedBotInfo(botInfo).withUpdatedCommonGroupCount(userFullCommonChatsCount).withUpdatedIsBlocked(isBlocked).withUpdatedVoiceCallsAvailable(voiceCallsAvailable).withUpdatedVideoCallsAvailable(videoCallsAvailable).withUpdatedCallsPrivate(callsPrivate).withUpdatedCanPinMessages(canPinMessages).withUpdatedPeerStatusSettings(peerStatusSettings).withUpdatedPinnedMessageId(pinnedMessageId).withUpdatedHasScheduledMessages(hasScheduledMessages)
|
||||
.withUpdatedAutoremoveTimeout(autoremoveTimeout)
|
||||
.withUpdatedThemeEmoticon(userFullThemeEmoticon)
|
||||
.withUpdatedPhoto(photo)
|
||||
.withUpdatedPhoto(.known(photo))
|
||||
.withUpdatedPremiumGiftOptions(premiumGiftOptions)
|
||||
.withUpdatedVoiceMessagesAvailable(voiceMessagesAvailable)
|
||||
}
|
||||
|
@ -290,6 +290,9 @@ public enum PresentationResourceKey: Int32 {
|
||||
case chatKeyboardActionButtonWebAppIcon
|
||||
|
||||
case chatGeneralThreadIcon
|
||||
case chatGeneralThreadIncomingIcon
|
||||
case chatGeneralThreadOutgoingIcon
|
||||
case chatGeneralThreadFreeIcon
|
||||
|
||||
case uploadToneIcon
|
||||
}
|
||||
|
@ -1325,4 +1325,22 @@ public struct PresentationResourcesChat {
|
||||
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Info/GeneralIcon"), color: theme.rootController.navigationBar.controlColor)
|
||||
})
|
||||
}
|
||||
|
||||
public static func chatGeneralThreadIncomingIcon(_ theme: PresentationTheme) -> UIImage? {
|
||||
return theme.image(PresentationResourceKey.chatGeneralThreadIncomingIcon.rawValue, { theme in
|
||||
return generateTintedImage(image: UIImage(bundleImageName: "Chat List/GeneralTopicIcon"), color: theme.chat.message.incoming.accentTextColor)
|
||||
})
|
||||
}
|
||||
|
||||
public static func chatGeneralThreadOutgoingIcon(_ theme: PresentationTheme) -> UIImage? {
|
||||
return theme.image(PresentationResourceKey.chatGeneralThreadOutgoingIcon.rawValue, { theme in
|
||||
return generateTintedImage(image: UIImage(bundleImageName: "Chat List/GeneralTopicIcon"), color: theme.chat.message.outgoing.accentTextColor)
|
||||
})
|
||||
}
|
||||
|
||||
public static func chatGeneralThreadFreeIcon(_ theme: PresentationTheme) -> UIImage? {
|
||||
return theme.image(PresentationResourceKey.chatGeneralThreadFreeIcon.rawValue, { theme in
|
||||
return generateTintedImage(image: UIImage(bundleImageName: "Chat List/GeneralTopicIcon"), color: theme.chat.message.mediaOverlayControlColors.foregroundColor)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -123,8 +123,8 @@ final class ChatAvatarNavigationNode: ASDisplayNode {
|
||||
return
|
||||
}
|
||||
let cachedPeerData = peerView.cachedData
|
||||
if let cachedPeerData = cachedPeerData as? CachedUserData {
|
||||
if let photo = cachedPeerData.photo, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
|
||||
if let cachedPeerData = cachedPeerData as? CachedUserData, case let .known(maybePhoto) = cachedPeerData.photo {
|
||||
if let photo = maybePhoto, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
|
||||
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
||||
let videoContent = NativeVideoContent(id: .profileVideo(videoId, "header"), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
||||
|
@ -501,6 +501,7 @@ final class ChatEntityKeyboardInputNode: ChatInputNode {
|
||||
}
|
||||
}
|
||||
|
||||
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||
return EmojiPagerContentComponent(
|
||||
id: "stickers",
|
||||
context: context,
|
||||
@ -537,7 +538,7 @@ final class ChatEntityKeyboardInputNode: ChatInputNode {
|
||||
itemLayoutType: .detailed,
|
||||
itemContentUniqueId: nil,
|
||||
warpContentsOnEdges: false,
|
||||
displaySearchWithPlaceholder: "Search Stickers",
|
||||
displaySearchWithPlaceholder: presentationData.strings.StickersSearch_SearchStickersPlaceholder,
|
||||
searchInitiallyHidden: false,
|
||||
searchIsPlaceholderOnly: true,
|
||||
emptySearchResults: nil,
|
||||
@ -748,6 +749,7 @@ final class ChatEntityKeyboardInputNode: ChatInputNode {
|
||||
return !savedGifs.isEmpty
|
||||
}
|
||||
|
||||
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||
let gifItems: Signal<EntityKeyboardGifContent, NoError>
|
||||
switch subject {
|
||||
case .recent:
|
||||
@ -769,7 +771,7 @@ final class ChatEntityKeyboardInputNode: ChatInputNode {
|
||||
items: items,
|
||||
isLoading: false,
|
||||
loadMoreToken: nil,
|
||||
displaySearchWithPlaceholder: "Search GIFs",
|
||||
displaySearchWithPlaceholder: presentationData.strings.GifSearch_SearchGifPlaceholder,
|
||||
searchInitiallyHidden: false
|
||||
)
|
||||
)
|
||||
|
@ -1186,6 +1186,7 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
|
||||
}
|
||||
}
|
||||
|
||||
var replyMessage: Message?
|
||||
for attribute in item.message.attributes {
|
||||
if let attribute = attribute as? InlineBotMessageAttribute {
|
||||
var inlineBotNameString: String?
|
||||
@ -1205,51 +1206,48 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
|
||||
viaBotApply = viaBotLayout(TextNodeLayoutArguments(attributedString: botString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: max(0, availableContentWidth), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
||||
}
|
||||
}
|
||||
|
||||
if let replyAttribute = attribute as? ReplyMessageAttribute, let replyMessage = item.message.associatedMessages[replyAttribute.messageId] {
|
||||
var hasReply = true
|
||||
|
||||
if case let .replyThread(replyThreadMessage) = item.chatLocation, replyThreadMessage.messageId == replyAttribute.messageId {
|
||||
hasReply = false
|
||||
}
|
||||
|
||||
if case .peer = item.chatLocation, replyMessage.threadId != nil, case let .peer(peerId) = item.chatLocation, peerId == replyMessage.id.peerId, let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum) {
|
||||
if let threadId = item.message.threadId, Int64(replyMessage.id.id) == threadId {
|
||||
hasReply = false
|
||||
}
|
||||
|
||||
threadInfoApply = makeThreadInfoLayout(ChatMessageThreadInfoNode.Arguments(
|
||||
presentationData: item.presentationData,
|
||||
strings: item.presentationData.strings,
|
||||
context: item.context,
|
||||
controllerInteraction: item.controllerInteraction,
|
||||
type: .standalone,
|
||||
message: replyMessage,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: availableContentWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||
))
|
||||
}
|
||||
|
||||
if hasReply {
|
||||
replyInfoApply = makeReplyInfoLayout(ChatMessageReplyInfoNode.Arguments(
|
||||
presentationData: item.presentationData,
|
||||
strings: item.presentationData.strings,
|
||||
context: item.context,
|
||||
type: .standalone,
|
||||
message: replyMessage,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: availableContentWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||
))
|
||||
}
|
||||
|
||||
if let replyAttribute = attribute as? ReplyMessageAttribute {
|
||||
replyMessage = item.message.associatedMessages[replyAttribute.messageId]
|
||||
} else if let attribute = attribute as? ReplyMarkupMessageAttribute, attribute.flags.contains(.inline), !attribute.rows.isEmpty {
|
||||
replyMarkup = attribute
|
||||
}
|
||||
}
|
||||
|
||||
var hasReply = replyMessage != nil
|
||||
if case let .peer(peerId) = item.chatLocation, (peerId == replyMessage?.id.peerId || item.message.threadId == 1), let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||
if let threadId = item.message.threadId, let replyMessage = replyMessage, Int64(replyMessage.id.id) == threadId {
|
||||
hasReply = false
|
||||
}
|
||||
|
||||
threadInfoApply = makeThreadInfoLayout(ChatMessageThreadInfoNode.Arguments(
|
||||
presentationData: item.presentationData,
|
||||
strings: item.presentationData.strings,
|
||||
context: item.context,
|
||||
controllerInteraction: item.controllerInteraction,
|
||||
type: .standalone,
|
||||
threadId: item.message.threadId ?? 1,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: availableContentWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||
))
|
||||
}
|
||||
|
||||
if let replyMessage = replyMessage, hasReply {
|
||||
replyInfoApply = makeReplyInfoLayout(ChatMessageReplyInfoNode.Arguments(
|
||||
presentationData: item.presentationData,
|
||||
strings: item.presentationData.strings,
|
||||
context: item.context,
|
||||
type: .standalone,
|
||||
message: replyMessage,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: availableContentWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||
))
|
||||
}
|
||||
|
||||
if item.message.id.peerId != item.context.account.peerId && !item.message.id.peerId.isReplies {
|
||||
for attribute in item.message.attributes {
|
||||
if let attribute = attribute as? SourceReferenceMessageAttribute {
|
||||
|
@ -1737,6 +1737,9 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
|
||||
if replyMessage != nil {
|
||||
displayHeader = true
|
||||
}
|
||||
if !displayHeader, case .peer = item.chatLocation, let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||
displayHeader = true
|
||||
}
|
||||
}
|
||||
|
||||
let firstNodeTopPosition: ChatMessageBubbleRelativePosition
|
||||
@ -1963,8 +1966,8 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
|
||||
}
|
||||
|
||||
var hasReply = replyMessage != nil
|
||||
if !isInstantVideo, let replyMessage = replyMessage, replyMessage.threadId != nil, case let .peer(peerId) = item.chatLocation, peerId == replyMessage.id.peerId, let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||
if let threadId = item.message.threadId, Int64(replyMessage.id.id) == threadId {
|
||||
if !isInstantVideo, case let .peer(peerId) = item.chatLocation, (peerId == replyMessage?.id.peerId || item.message.threadId == 1), let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||
if let threadId = item.message.threadId, let replyMessage = replyMessage, Int64(replyMessage.id.id) == threadId {
|
||||
hasReply = false
|
||||
}
|
||||
|
||||
@ -1980,7 +1983,7 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
|
||||
context: item.context,
|
||||
controllerInteraction: item.controllerInteraction,
|
||||
type: .bubble(incoming: incoming),
|
||||
message: replyMessage,
|
||||
threadId: item.message.threadId ?? 1,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: maximumNodeWidth - layoutConstants.text.bubbleInsets.left - layoutConstants.text.bubbleInsets.right, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
|
@ -531,8 +531,8 @@ final class ChatMessageAvatarHeaderNode: ListViewItemHeaderNode {
|
||||
return
|
||||
}
|
||||
let cachedPeerData = peerView.cachedData
|
||||
if let cachedPeerData = cachedPeerData as? CachedUserData {
|
||||
if let photo = cachedPeerData.photo, let video = photo.videoRepresentations.last, let peerReference = PeerReference(peer) {
|
||||
if let cachedPeerData = cachedPeerData as? CachedUserData, case let .known(maybePhoto) = cachedPeerData.photo {
|
||||
if let photo = maybePhoto, let video = photo.videoRepresentations.last, let peerReference = PeerReference(peer) {
|
||||
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
||||
let videoContent = NativeVideoContent(id: .profileVideo(videoId, "\(Int32.random(in: 0 ..< Int32.max))"), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
||||
|
@ -614,6 +614,7 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
|
||||
}
|
||||
}
|
||||
|
||||
var replyMessage: Message?
|
||||
for attribute in item.message.attributes {
|
||||
if let attribute = attribute as? InlineBotMessageAttribute {
|
||||
var inlineBotNameString: String?
|
||||
@ -634,49 +635,48 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
|
||||
}
|
||||
}
|
||||
|
||||
if let replyAttribute = attribute as? ReplyMessageAttribute, let replyMessage = item.message.associatedMessages[replyAttribute.messageId] {
|
||||
var hasReply = true
|
||||
|
||||
if case let .replyThread(replyThreadMessage) = item.chatLocation, replyThreadMessage.messageId == replyAttribute.messageId {
|
||||
hasReply = false
|
||||
}
|
||||
|
||||
if case .peer = item.chatLocation, replyMessage.threadId != nil, case let .peer(peerId) = item.chatLocation, peerId == replyMessage.id.peerId, let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum) {
|
||||
if let threadId = item.message.threadId, Int64(replyMessage.id.id) == threadId {
|
||||
hasReply = false
|
||||
}
|
||||
threadInfoApply = makeThreadInfoLayout(ChatMessageThreadInfoNode.Arguments(
|
||||
presentationData: item.presentationData,
|
||||
strings: item.presentationData.strings,
|
||||
context: item.context,
|
||||
controllerInteraction: item.controllerInteraction,
|
||||
type: .standalone,
|
||||
message: replyMessage,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: availableWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||
))
|
||||
}
|
||||
|
||||
if hasReply {
|
||||
replyInfoApply = makeReplyInfoLayout(ChatMessageReplyInfoNode.Arguments(
|
||||
presentationData: item.presentationData,
|
||||
strings: item.presentationData.strings,
|
||||
context: item.context,
|
||||
type: .standalone,
|
||||
message: replyMessage,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: availableWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||
))
|
||||
}
|
||||
|
||||
if let replyAttribute = attribute as? ReplyMessageAttribute {
|
||||
replyMessage = item.message.associatedMessages[replyAttribute.messageId]
|
||||
} else if let attribute = attribute as? ReplyMarkupMessageAttribute, attribute.flags.contains(.inline), !attribute.rows.isEmpty {
|
||||
replyMarkup = attribute
|
||||
}
|
||||
}
|
||||
|
||||
var hasReply = replyMessage != nil
|
||||
if case let .peer(peerId) = item.chatLocation, (peerId == replyMessage?.id.peerId || item.message.threadId == 1), let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||
if let threadId = item.message.threadId, let replyMessage = replyMessage, Int64(replyMessage.id.id) == threadId {
|
||||
hasReply = false
|
||||
}
|
||||
|
||||
threadInfoApply = makeThreadInfoLayout(ChatMessageThreadInfoNode.Arguments(
|
||||
presentationData: item.presentationData,
|
||||
strings: item.presentationData.strings,
|
||||
context: item.context,
|
||||
controllerInteraction: item.controllerInteraction,
|
||||
type: .standalone,
|
||||
threadId: item.message.threadId ?? 1,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: availableWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||
))
|
||||
}
|
||||
|
||||
if let replyMessage = replyMessage, hasReply {
|
||||
replyInfoApply = makeReplyInfoLayout(ChatMessageReplyInfoNode.Arguments(
|
||||
presentationData: item.presentationData,
|
||||
strings: item.presentationData.strings,
|
||||
context: item.context,
|
||||
type: .standalone,
|
||||
message: replyMessage,
|
||||
parentMessage: item.message,
|
||||
constrainedSize: CGSize(width: availableWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||
))
|
||||
}
|
||||
|
||||
if item.message.id.peerId != item.context.account.peerId && !item.message.id.peerId.isReplies {
|
||||
for attribute in item.message.attributes {
|
||||
if let attribute = attribute as? SourceReferenceMessageAttribute {
|
||||
|
@ -183,7 +183,7 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
||||
let context: AccountContext
|
||||
let controllerInteraction: ChatControllerInteraction
|
||||
let type: ChatMessageThreadInfoType
|
||||
let message: Message
|
||||
let threadId: Int64
|
||||
let parentMessage: Message
|
||||
let constrainedSize: CGSize
|
||||
let animationCache: AnimationCache?
|
||||
@ -195,7 +195,7 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
||||
context: AccountContext,
|
||||
controllerInteraction: ChatControllerInteraction,
|
||||
type: ChatMessageThreadInfoType,
|
||||
message: Message,
|
||||
threadId: Int64,
|
||||
parentMessage: Message,
|
||||
constrainedSize: CGSize,
|
||||
animationCache: AnimationCache?,
|
||||
@ -206,7 +206,7 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
||||
self.context = context
|
||||
self.controllerInteraction = controllerInteraction
|
||||
self.type = type
|
||||
self.message = message
|
||||
self.threadId = threadId
|
||||
self.parentMessage = parentMessage
|
||||
self.constrainedSize = constrainedSize
|
||||
self.animationCache = animationCache
|
||||
@ -318,7 +318,6 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
||||
var topicIconId: Int64?
|
||||
var topicIconColor: Int32 = 0
|
||||
if let _ = arguments.parentMessage.threadId, let channel = arguments.parentMessage.peers[arguments.parentMessage.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), let threadInfo = arguments.parentMessage.associatedThreadInfo {
|
||||
|
||||
topicTitle = threadInfo.title
|
||||
topicIconId = threadInfo.icon
|
||||
topicIconColor = threadInfo.iconColor
|
||||
@ -327,9 +326,10 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
||||
let backgroundColor: UIColor
|
||||
let textColor: UIColor
|
||||
let arrowIcon: UIImage?
|
||||
let generalThreadIcon: UIImage?
|
||||
switch arguments.type {
|
||||
case let .bubble(incoming):
|
||||
if topicIconId == nil, topicIconColor != 0, incoming {
|
||||
if topicIconId == nil, topicIconColor != 0, incoming, arguments.threadId != 1 {
|
||||
let colors = topicIconColors(for: topicIconColor)
|
||||
backgroundColor = UIColor(rgb: colors.0.last ?? 0x000000)
|
||||
textColor = UIColor(rgb: colors.1.first ?? 0x000000)
|
||||
@ -345,13 +345,15 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
||||
arrowIcon = PresentationResourcesChat.chatBubbleArrowOutgoingImage(arguments.presentationData.theme.theme)
|
||||
}
|
||||
}
|
||||
generalThreadIcon = incoming ? PresentationResourcesChat.chatGeneralThreadIncomingIcon(arguments.presentationData.theme.theme) : PresentationResourcesChat.chatGeneralThreadOutgoingIcon(arguments.presentationData.theme.theme)
|
||||
case .standalone:
|
||||
textColor = .white
|
||||
textColor = arguments.presentationData.theme.theme.chat.message.mediaOverlayControlColors.foregroundColor
|
||||
backgroundColor = .white
|
||||
arrowIcon = PresentationResourcesChat.chatBubbleArrowFreeImage(arguments.presentationData.theme.theme)
|
||||
generalThreadIcon = PresentationResourcesChat.chatGeneralThreadFreeIcon(arguments.presentationData.theme.theme)
|
||||
}
|
||||
|
||||
let placeholderColor: UIColor = arguments.message.effectivelyIncoming(arguments.context.account.peerId) ? arguments.presentationData.theme.theme.chat.message.incoming.mediaPlaceholderColor : arguments.presentationData.theme.theme.chat.message.outgoing.mediaPlaceholderColor
|
||||
let placeholderColor: UIColor = arguments.parentMessage.effectivelyIncoming(arguments.context.account.peerId) ? arguments.presentationData.theme.theme.chat.message.incoming.mediaPlaceholderColor : arguments.presentationData.theme.theme.chat.message.outgoing.mediaPlaceholderColor
|
||||
|
||||
let text = NSAttributedString(string: topicTitle, font: textFont, textColor: textColor)
|
||||
|
||||
@ -390,9 +392,7 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
node.pressed = {
|
||||
if let threadId = arguments.message.threadId {
|
||||
arguments.controllerInteraction.navigateToThreadMessage(arguments.parentMessage.id.peerId, threadId, arguments.parentMessage.id)
|
||||
}
|
||||
arguments.controllerInteraction.navigateToThreadMessage(arguments.parentMessage.id.peerId, arguments.threadId, arguments.parentMessage.id)
|
||||
}
|
||||
|
||||
if node.lineRects != lineRects {
|
||||
@ -480,7 +480,9 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
let titleTopicIconContent: EmojiStatusComponent.Content
|
||||
if let fileId = topicIconId, fileId != 0 {
|
||||
if arguments.threadId == 1 {
|
||||
titleTopicIconContent = .image(image: generalThreadIcon)
|
||||
} else if let fileId = topicIconId, fileId != 0 {
|
||||
titleTopicIconContent = .animation(content: .customEmoji(fileId: fileId), size: CGSize(width: 36.0, height: 36.0), placeholderColor: arguments.presentationData.theme.theme.list.mediaPlaceholderColor, themeColor: arguments.presentationData.theme.theme.list.itemAccentColor, loopMode: .count(1))
|
||||
} else {
|
||||
titleTopicIconContent = .topic(title: String(topicTitle.prefix(1)), color: topicIconColor, size: CGSize(width: 22.0, height: 22.0))
|
||||
|
@ -1714,7 +1714,14 @@ struct ChatRecentActionsEntry: Comparable, Identifiable {
|
||||
"Channel.AdminLog.TopicRemovedIcon" = "%1$@ removed topic %2$@ icon";*/
|
||||
|
||||
let authorTitle: String = author.flatMap(EnginePeer.init)?.displayTitle(strings: self.presentationData.strings, displayOrder: self.presentationData.nameDisplayOrder) ?? ""
|
||||
if prevInfo.isClosed != newInfo.isClosed {
|
||||
if prevInfo.isHidden != newInfo.isHidden {
|
||||
appendAttributedText(text: newInfo.isHidden ? self.presentationData.strings.Channel_AdminLog_TopicHidden(authorTitle, newInfo.info.title) : self.presentationData.strings.Channel_AdminLog_TopicUnhidden(authorTitle, newInfo.info.title), generateEntities: { index in
|
||||
if index == 0, let author = author {
|
||||
return [.TextMention(peerId: author.id)]
|
||||
}
|
||||
return []
|
||||
}, to: &text, entities: &entities)
|
||||
} else if prevInfo.isClosed != newInfo.isClosed {
|
||||
appendAttributedText(text: newInfo.isClosed ? self.presentationData.strings.Channel_AdminLog_TopicClosed(authorTitle, newInfo.info.title) : self.presentationData.strings.Channel_AdminLog_TopicReopened(authorTitle, newInfo.info.title), generateEntities: { index in
|
||||
if index == 0, let author = author {
|
||||
return [.TextMention(peerId: author.id)]
|
||||
|
@ -8,6 +8,7 @@ import Postbox
|
||||
import TelegramAudio
|
||||
import AccountContext
|
||||
import AVKit
|
||||
import UniversalMediaPlayer
|
||||
|
||||
public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInPictureSampleBufferPlaybackDelegate {
|
||||
public let content: UniversalVideoContent
|
||||
@ -37,6 +38,9 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP
|
||||
public var customClose: (() -> Void)?
|
||||
public var controlsAreShowingUpdated: ((Bool) -> Void)?
|
||||
|
||||
private var statusDisposable: Disposable?
|
||||
private var status: MediaPlayerStatus?
|
||||
|
||||
public init(postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, shouldBeDismissed: Signal<Bool, NoError> = .single(false), expand: @escaping () -> Void, close: @escaping () -> Void) {
|
||||
self.content = content
|
||||
self.defaultExpand = expand
|
||||
@ -124,6 +128,16 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP
|
||||
strongSelf.dismiss()
|
||||
closeImpl?()
|
||||
})
|
||||
|
||||
self.statusDisposable = (self.videoNode.status
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
self?.status = status
|
||||
})
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.shouldBeDismissedDisposable?.dispose()
|
||||
self.statusDisposable?.dispose()
|
||||
}
|
||||
|
||||
override public func didLoad() {
|
||||
@ -194,7 +208,10 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP
|
||||
}
|
||||
|
||||
public func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
|
||||
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: 10.0, preferredTimescale: CMTimeScale(30.0)))
|
||||
guard let status = self.status else {
|
||||
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)))
|
||||
}
|
||||
return CMTimeRange(start: CMTime(seconds: status.timestamp, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: status.duration, preferredTimescale: CMTimeScale(30.0)))
|
||||
}
|
||||
|
||||
public func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
|
||||
|
@ -400,9 +400,22 @@ public final class OngoingGroupCallContext {
|
||||
public var incomingVideoStats: [String: IncomingVideoStats]
|
||||
}
|
||||
|
||||
public final class Tone {
|
||||
public let samples: Data
|
||||
public let sampleRate: Int
|
||||
public let loopCount: Int
|
||||
|
||||
public init(samples: Data, sampleRate: Int, loopCount: Int) {
|
||||
self.samples = samples
|
||||
self.sampleRate = sampleRate
|
||||
self.loopCount = loopCount
|
||||
}
|
||||
}
|
||||
|
||||
private final class Impl {
|
||||
let queue: Queue
|
||||
let context: GroupCallThreadLocalContext
|
||||
let audioDevice: SharedCallAudioDevice?
|
||||
|
||||
let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max))
|
||||
|
||||
@ -421,6 +434,13 @@ public final class OngoingGroupCallContext {
|
||||
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
|
||||
self.queue = queue
|
||||
|
||||
self.audioDevice = nil
|
||||
/*#if DEBUG
|
||||
self.audioDevice = SharedCallAudioDevice(disableRecording: disableAudioInput)
|
||||
#else
|
||||
self.audioDevice = nil
|
||||
#endif*/
|
||||
|
||||
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
|
||||
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
|
||||
|
||||
@ -526,7 +546,8 @@ public final class OngoingGroupCallContext {
|
||||
enableNoiseSuppression: enableNoiseSuppression,
|
||||
disableAudioInput: disableAudioInput,
|
||||
preferX264: preferX264,
|
||||
logPath: logPath
|
||||
logPath: logPath,
|
||||
audioDevice: self.audioDevice
|
||||
)
|
||||
|
||||
let queue = self.queue
|
||||
@ -580,6 +601,7 @@ public final class OngoingGroupCallContext {
|
||||
return
|
||||
}
|
||||
#if os(iOS)
|
||||
self.audioDevice?.setManualAudioSessionIsActive(isActive)
|
||||
self.context.setManualAudioSessionIsActive(isActive)
|
||||
#endif
|
||||
}))
|
||||
@ -884,6 +906,17 @@ public final class OngoingGroupCallContext {
|
||||
completion(Stats(incomingVideoStats: incomingVideoStats))
|
||||
})
|
||||
}
|
||||
|
||||
func setTone(tone: Tone?) {
|
||||
let mappedTone = tone.flatMap { tone in
|
||||
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
|
||||
}
|
||||
if let audioDevice = self.audioDevice {
|
||||
audioDevice.setTone(mappedTone)
|
||||
} else {
|
||||
self.context.setTone(mappedTone)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private let queue = Queue()
|
||||
@ -1075,4 +1108,10 @@ public final class OngoingGroupCallContext {
|
||||
impl.getStats(completion: completion)
|
||||
}
|
||||
}
|
||||
|
||||
public func setTone(tone: Tone?) {
|
||||
self.impl.with { impl in
|
||||
impl.setTone(tone: tone)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -706,6 +706,40 @@ public final class OngoingCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public final class Tone {
|
||||
public let samples: Data
|
||||
public let sampleRate: Int
|
||||
public let loopCount: Int
|
||||
|
||||
public init(samples: Data, sampleRate: Int, loopCount: Int) {
|
||||
self.samples = samples
|
||||
self.sampleRate = sampleRate
|
||||
self.loopCount = loopCount
|
||||
}
|
||||
}
|
||||
|
||||
public final class AudioDevice {
|
||||
let impl: SharedCallAudioDevice
|
||||
|
||||
public static func create() -> AudioDevice? {
|
||||
return AudioDevice(impl: SharedCallAudioDevice(disableRecording: false))
|
||||
}
|
||||
|
||||
private init(impl: SharedCallAudioDevice) {
|
||||
self.impl = impl
|
||||
}
|
||||
|
||||
public func setIsAudioSessionActive(_ isActive: Bool) {
|
||||
self.impl.setManualAudioSessionIsActive(isActive)
|
||||
}
|
||||
|
||||
public func setTone(tone: Tone?) {
|
||||
self.impl.setTone(tone.flatMap { tone in
|
||||
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
public static func setupAudioSession() {
|
||||
OngoingCallThreadLocalContextWebrtc.setupAudioSession()
|
||||
}
|
||||
@ -751,7 +785,7 @@ public final class OngoingCallContext {
|
||||
|
||||
private var signalingConnectionManager: QueueLocalObject<CallSignalingConnectionManager>?
|
||||
|
||||
private let audioDevice: SharedCallAudioDevice?
|
||||
private let audioDevice: AudioDevice?
|
||||
|
||||
public static func versions(includeExperimental: Bool, includeReference: Bool) -> [(version: String, supportsVideo: Bool)] {
|
||||
#if os(iOS) && DEBUG && false
|
||||
@ -771,7 +805,7 @@ public final class OngoingCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public init(account: Account, callSessionManager: CallSessionManager, callId: CallId, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, enableTCP: Bool, enableStunMarking: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String, preferredVideoCodec: String?) {
|
||||
public init(account: Account, callSessionManager: CallSessionManager, callId: CallId, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, enableTCP: Bool, enableStunMarking: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String, preferredVideoCodec: String?, audioDevice: AudioDevice?) {
|
||||
let _ = setupLogs
|
||||
OngoingCallThreadLocalContext.applyServerConfig(serializedData)
|
||||
|
||||
@ -782,12 +816,6 @@ public final class OngoingCallContext {
|
||||
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
|
||||
let logPath = self.logPath
|
||||
|
||||
let audioDevice: SharedCallAudioDevice?
|
||||
if !"".isEmpty {
|
||||
audioDevice = SharedCallAudioDevice()
|
||||
} else {
|
||||
audioDevice = nil
|
||||
}
|
||||
self.audioDevice = audioDevice
|
||||
|
||||
let _ = try? FileManager.default.createDirectory(atPath: callLogsPath(account: account), withIntermediateDirectories: true, attributes: nil)
|
||||
@ -910,7 +938,7 @@ public final class OngoingCallContext {
|
||||
callSessionManager.sendSignalingData(internalId: internalId, data: data)
|
||||
}
|
||||
}
|
||||
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", audioDevice: audioDevice)
|
||||
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", audioDevice: audioDevice?.impl)
|
||||
|
||||
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
|
||||
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
|
||||
|
@ -11,14 +11,26 @@
|
||||
#define UIView NSView
|
||||
#endif
|
||||
|
||||
@interface CallAudioTone : NSObject
|
||||
|
||||
@property (nonatomic, strong, readonly) NSData * _Nonnull samples;
|
||||
@property (nonatomic, readonly) NSInteger sampleRate;
|
||||
@property (nonatomic, readonly) NSInteger loopCount;
|
||||
|
||||
- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount;
|
||||
|
||||
@end
|
||||
|
||||
@interface SharedCallAudioDevice : NSObject
|
||||
|
||||
- (instancetype _Nonnull)init;
|
||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording;
|
||||
|
||||
+ (void)setupAudioSession;
|
||||
|
||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
||||
|
||||
- (void)setTone:(CallAudioTone * _Nullable)tone;
|
||||
|
||||
@end
|
||||
|
||||
@interface OngoingCallConnectionDescriptionWebrtc : NSObject
|
||||
@ -385,12 +397,15 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
|
||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||
disableAudioInput:(bool)disableAudioInput
|
||||
preferX264:(bool)preferX264
|
||||
logPath:(NSString * _Nonnull)logPath;
|
||||
logPath:(NSString * _Nonnull)logPath
|
||||
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice;
|
||||
|
||||
- (void)stop;
|
||||
|
||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
||||
|
||||
- (void)setTone:(CallAudioTone * _Nullable)tone;
|
||||
|
||||
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast;
|
||||
|
||||
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
|
||||
|
@ -43,6 +43,28 @@
|
||||
#import "platform/darwin/TGRTCCVPixelBuffer.h"
|
||||
#include "rtc_base/logging.h"
|
||||
|
||||
@implementation CallAudioTone
|
||||
|
||||
- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_samples = samples;
|
||||
_sampleRate = sampleRate;
|
||||
_loopCount = loopCount;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (std::shared_ptr<tgcalls::CallAudioTone>)asTone {
|
||||
std::vector<int16_t> data;
|
||||
data.resize(_samples.length / 2);
|
||||
memcpy(data.data(), _samples.bytes, _samples.length);
|
||||
|
||||
return std::make_shared<tgcalls::CallAudioTone>(std::move(data), (int)_sampleRate, (int)_loopCount);
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
namespace tgcalls {
|
||||
|
||||
class SharedAudioDeviceModule {
|
||||
@ -50,51 +72,67 @@ public:
|
||||
virtual ~SharedAudioDeviceModule() = default;
|
||||
|
||||
public:
|
||||
virtual rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule() = 0;
|
||||
virtual rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> audioDeviceModule() = 0;
|
||||
virtual void start() = 0;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule {
|
||||
public:
|
||||
SharedAudioDeviceModuleImpl() {
|
||||
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) {
|
||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
} else {
|
||||
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
|
||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
});
|
||||
}
|
||||
SharedAudioDeviceModuleImpl(bool disableAudioInput) {
|
||||
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
||||
}
|
||||
|
||||
virtual ~SharedAudioDeviceModuleImpl() override {
|
||||
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) {
|
||||
if (_audioDeviceModule->Playing()) {
|
||||
_audioDeviceModule->StopPlayout();
|
||||
_audioDeviceModule->StopRecording();
|
||||
}
|
||||
_audioDeviceModule = nullptr;
|
||||
} else {
|
||||
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
|
||||
if (_audioDeviceModule->Playing()) {
|
||||
_audioDeviceModule->StopPlayout();
|
||||
_audioDeviceModule->StopRecording();
|
||||
}
|
||||
_audioDeviceModule = nullptr;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
virtual rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule() override {
|
||||
virtual rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> audioDeviceModule() override {
|
||||
return _audioDeviceModule;
|
||||
}
|
||||
|
||||
virtual void start() override {
|
||||
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
||||
|
||||
_audioDeviceModule->Init();
|
||||
if (!_audioDeviceModule->Playing()) {
|
||||
_audioDeviceModule->InitPlayout();
|
||||
//_audioDeviceModule->InitRecording();
|
||||
_audioDeviceModule->InternalStartPlayout();
|
||||
//_audioDeviceModule->InternalStartRecording();
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
|
||||
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _audioDeviceModule;
|
||||
};
|
||||
|
||||
@implementation SharedCallAudioDevice {
|
||||
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule;
|
||||
}
|
||||
|
||||
- (instancetype _Nonnull)init {
|
||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), []() mutable {
|
||||
return (tgcalls::SharedAudioDeviceModule *)(new SharedAudioDeviceModuleImpl());
|
||||
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), [disableRecording]() mutable {
|
||||
return (tgcalls::SharedAudioDeviceModule *)(new SharedAudioDeviceModuleImpl(disableRecording));
|
||||
}));
|
||||
}
|
||||
return self;
|
||||
@ -104,6 +142,12 @@ private:
|
||||
_audioDeviceModule.reset();
|
||||
}
|
||||
|
||||
- (void)setTone:(CallAudioTone * _Nullable)tone {
|
||||
_audioDeviceModule->perform([tone](tgcalls::SharedAudioDeviceModule *audioDeviceModule) {
|
||||
audioDeviceModule->audioDeviceModule()->setTone([tone asTone]);
|
||||
});
|
||||
}
|
||||
|
||||
- (std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>>)getAudioDeviceModule {
|
||||
return _audioDeviceModule;
|
||||
}
|
||||
@ -128,6 +172,12 @@ private:
|
||||
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
|
||||
}
|
||||
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
|
||||
|
||||
if (isAudioSessionActive) {
|
||||
_audioDeviceModule->perform([](tgcalls::SharedAudioDeviceModule *audioDeviceModule) {
|
||||
audioDeviceModule->start();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
@ -800,6 +850,9 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
||||
bool _useManualAudioSessionControl;
|
||||
SharedCallAudioDevice *_audioDevice;
|
||||
|
||||
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
|
||||
rtc::Thread *_currentAudioDeviceModuleThread;
|
||||
|
||||
OngoingCallNetworkTypeWebrtc _networkType;
|
||||
NSTimeInterval _callReceiveTimeout;
|
||||
NSTimeInterval _callRingTimeout;
|
||||
@ -1213,11 +1266,20 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}];
|
||||
},
|
||||
.createAudioDeviceModule = [audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||
.createAudioDeviceModule = [weakSelf, queue, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||
if (audioDeviceModule) {
|
||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||
} else {
|
||||
return rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
[queue dispatch:^{
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
if (strongSelf) {
|
||||
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
|
||||
strongSelf->_currentAudioDeviceModule = resultModule;
|
||||
}
|
||||
}];
|
||||
return resultModule;
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -1232,6 +1294,14 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
InternalVoipLoggingFunction(@"OngoingCallThreadLocalContext: dealloc");
|
||||
}
|
||||
|
||||
if (_currentAudioDeviceModuleThread) {
|
||||
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||
_currentAudioDeviceModule = nullptr;
|
||||
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||
});
|
||||
_currentAudioDeviceModuleThread = nullptr;
|
||||
}
|
||||
|
||||
if (_tgVoip != NULL) {
|
||||
[self stop:nil];
|
||||
}
|
||||
@ -1537,6 +1607,11 @@ private:
|
||||
|
||||
int _nextSinkId;
|
||||
NSMutableDictionary<NSNumber *, GroupCallVideoSink *> *_sinks;
|
||||
|
||||
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
|
||||
rtc::Thread *_currentAudioDeviceModuleThread;
|
||||
|
||||
SharedCallAudioDevice * _audioDevice;
|
||||
}
|
||||
|
||||
@end
|
||||
@ -1558,7 +1633,8 @@ private:
|
||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||
disableAudioInput:(bool)disableAudioInput
|
||||
preferX264:(bool)preferX264
|
||||
logPath:(NSString * _Nonnull)logPath {
|
||||
logPath:(NSString * _Nonnull)logPath
|
||||
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_queue = queue;
|
||||
@ -1570,6 +1646,12 @@ private:
|
||||
_networkStateUpdated = [networkStateUpdated copy];
|
||||
_videoCapturer = videoCapturer;
|
||||
|
||||
_audioDevice = audioDevice;
|
||||
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> audioDeviceModule;
|
||||
if (_audioDevice) {
|
||||
audioDeviceModule = [_audioDevice getAudioDeviceModule];
|
||||
}
|
||||
|
||||
tgcalls::VideoContentType _videoContentType;
|
||||
switch (videoContentType) {
|
||||
case OngoingGroupCallVideoContentTypeGeneric: {
|
||||
@ -1777,19 +1859,64 @@ private:
|
||||
|
||||
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
|
||||
},
|
||||
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit
|
||||
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
|
||||
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||
if (audioDeviceModule) {
|
||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||
} else {
|
||||
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
||||
[queue dispatch:^{
|
||||
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
|
||||
if (strongSelf) {
|
||||
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
|
||||
strongSelf->_currentAudioDeviceModule = resultModule;
|
||||
}
|
||||
}];
|
||||
return resultModule;
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
if (_currentAudioDeviceModuleThread) {
|
||||
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||
_currentAudioDeviceModule = nullptr;
|
||||
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||
});
|
||||
_currentAudioDeviceModuleThread = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)stop {
|
||||
if (_currentAudioDeviceModuleThread) {
|
||||
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||
_currentAudioDeviceModule = nullptr;
|
||||
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||
});
|
||||
_currentAudioDeviceModuleThread = nullptr;
|
||||
}
|
||||
|
||||
if (_instance) {
|
||||
_instance->stop();
|
||||
_instance.reset();
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setTone:(CallAudioTone * _Nullable)tone {
|
||||
if (_currentAudioDeviceModuleThread) {
|
||||
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||
if (currentAudioDeviceModule) {
|
||||
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule, tone]() {
|
||||
currentAudioDeviceModule->setTone([tone asTone]);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
|
||||
if (isAudioSessionActive) {
|
||||
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 97d616abe1dae6214b11eae19b3ec25cb88d98ce
|
||||
Subproject commit e7032ab6f7b305cbd1914e2d422646c2fd132b49
|
Loading…
x
Reference in New Issue
Block a user