mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
02a56a0d03
@ -414,6 +414,7 @@ official_apple_pay_merchants = [
|
|||||||
"merchant.psbank.test.telegramios",
|
"merchant.psbank.test.telegramios",
|
||||||
"merchant.psbank.prod.telegramios",
|
"merchant.psbank.prod.telegramios",
|
||||||
#"merchant.org.telegram.billinenet.test",
|
#"merchant.org.telegram.billinenet.test",
|
||||||
|
#"merchant.org.telegram.billinenet.prod",
|
||||||
#"merchant.org.telegram.portmone.test",
|
#"merchant.org.telegram.portmone.test",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -8261,14 +8261,16 @@ Sorry for the inconvenience.";
|
|||||||
"Channel.AdminLog.TopicRenamedWithRemovedIcon" = "%1$@ renamed topic %2$@ to %3$@ and removed icon";
|
"Channel.AdminLog.TopicRenamedWithRemovedIcon" = "%1$@ renamed topic %2$@ to %3$@ and removed icon";
|
||||||
"Channel.AdminLog.TopicChangedIcon" = "%1$@ changed topic %2$@ icon to %3$@";
|
"Channel.AdminLog.TopicChangedIcon" = "%1$@ changed topic %2$@ icon to %3$@";
|
||||||
"Channel.AdminLog.TopicRemovedIcon" = "%1$@ removed topic %2$@ icon";
|
"Channel.AdminLog.TopicRemovedIcon" = "%1$@ removed topic %2$@ icon";
|
||||||
|
"Channel.AdminLog.TopicUnhidden" = "%1$@ unhid topic %2$@";
|
||||||
|
"Channel.AdminLog.TopicHidden" = "%1$@ hid topic %2$@";
|
||||||
|
|
||||||
"Attachment.Pasteboard" = "Clipboard";
|
"Attachment.Pasteboard" = "Clipboard";
|
||||||
"Attachment.DiscardPasteboardAlertText" = "Discard pasted items?";
|
"Attachment.DiscardPasteboardAlertText" = "Discard pasted items?";
|
||||||
|
|
||||||
"Undo.DeletedTopic" = "Topic Deleted";
|
"Undo.DeletedTopic" = "Topic Deleted";
|
||||||
|
|
||||||
"ChatList.MaxThreadPinsFinalText_1" = "Sorry, you can't pin more than **%@** thread to the top. Unpin some that are currently pinned.";
|
"ChatList.MaxThreadPinsFinalText_1" = "Sorry, you can't pin more than %@ topics to the top. Unpin some that are currently pinned.";
|
||||||
"ChatList.MaxThreadPinsFinalText_any" = "Sorry, you can't pin more than **%@** threads to the top. Unpin some that are currently pinned.";
|
"ChatList.MaxThreadPinsFinalText_any" = "Sorry, you can't pin more than %@ topics to the top. Unpin some that are currently pinned.";
|
||||||
|
|
||||||
"EmojiSearch.SearchTopicIconsPlaceholder" = "Search Topic Icons";
|
"EmojiSearch.SearchTopicIconsPlaceholder" = "Search Topic Icons";
|
||||||
"EmojiSearch.SearchTopicIconsEmptyResult" = "No emoji found";
|
"EmojiSearch.SearchTopicIconsEmptyResult" = "No emoji found";
|
||||||
@ -8299,8 +8301,8 @@ Sorry for the inconvenience.";
|
|||||||
|
|
||||||
"Notification.ForumTopicHidden" = "Topic hidden";
|
"Notification.ForumTopicHidden" = "Topic hidden";
|
||||||
"Notification.ForumTopicUnhidden" = "Topic unhidden";
|
"Notification.ForumTopicUnhidden" = "Topic unhidden";
|
||||||
"Notification.ForumTopicHiddenAuthor" = "%1$@ hid topic";
|
"Notification.ForumTopicHiddenAuthor" = "%1$@ hid the topic";
|
||||||
"Notification.ForumTopicUnhiddenAuthor" = "%1$@ unhid topic";
|
"Notification.ForumTopicUnhiddenAuthor" = "%1$@ unhid the topic";
|
||||||
"Notification.OverviewTopicHidden" = "%1$@ hid %2$@ %3$@";
|
"Notification.OverviewTopicHidden" = "%1$@ hid %2$@ %3$@";
|
||||||
"Notification.OverviewTopicUnhidden" = "%1$@ unhid %2$@ %3$@";
|
"Notification.OverviewTopicUnhidden" = "%1$@ unhid %2$@ %3$@";
|
||||||
|
|
||||||
@ -8405,6 +8407,8 @@ Sorry for the inconvenience.";
|
|||||||
"GlobalAutodeleteSettings.AttemptDisabledGenericSelection" = "You can't enable auto-delete in this chat.";
|
"GlobalAutodeleteSettings.AttemptDisabledGenericSelection" = "You can't enable auto-delete in this chat.";
|
||||||
|
|
||||||
"EmojiSearch.SearchEmojiPlaceholder" = "Search Emoji";
|
"EmojiSearch.SearchEmojiPlaceholder" = "Search Emoji";
|
||||||
|
"StickersSearch.SearchStickersPlaceholder" = "Search Stickers";
|
||||||
|
"GifSearch.SearchGifPlaceholder" = "Search GIFs";
|
||||||
|
|
||||||
"MessageTimer.LargeShortSeconds_1" = "%@s";
|
"MessageTimer.LargeShortSeconds_1" = "%@s";
|
||||||
"MessageTimer.LargeShortSeconds_2" = "%@s";
|
"MessageTimer.LargeShortSeconds_2" = "%@s";
|
||||||
|
@ -865,6 +865,8 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
|
|||||||
|
|
||||||
if let layout = self.validLayout {
|
if let layout = self.validLayout {
|
||||||
self.tabContainerNode.update(size: CGSize(width: layout.size.width, height: 46.0), sideInset: layout.safeInsets.left, filters: self.tabContainerData?.0 ?? [], selectedFilter: self.chatListDisplayNode.effectiveContainerNode.currentItemFilter, isReordering: self.chatListDisplayNode.isReorderingFilters || (self.chatListDisplayNode.effectiveContainerNode.currentItemNode.currentState.editing && !self.chatListDisplayNode.didBeginSelectingChatsWhileEditing), isEditing: self.chatListDisplayNode.effectiveContainerNode.currentItemNode.currentState.editing, canReorderAllChats: self.isPremium, filtersLimit: self.tabContainerData?.2, transitionFraction: self.chatListDisplayNode.effectiveContainerNode.transitionFraction, presentationData: self.presentationData, transition: .immediate)
|
self.tabContainerNode.update(size: CGSize(width: layout.size.width, height: 46.0), sideInset: layout.safeInsets.left, filters: self.tabContainerData?.0 ?? [], selectedFilter: self.chatListDisplayNode.effectiveContainerNode.currentItemFilter, isReordering: self.chatListDisplayNode.isReorderingFilters || (self.chatListDisplayNode.effectiveContainerNode.currentItemNode.currentState.editing && !self.chatListDisplayNode.didBeginSelectingChatsWhileEditing), isEditing: self.chatListDisplayNode.effectiveContainerNode.currentItemNode.currentState.editing, canReorderAllChats: self.isPremium, filtersLimit: self.tabContainerData?.2, transitionFraction: self.chatListDisplayNode.effectiveContainerNode.transitionFraction, presentationData: self.presentationData, transition: .immediate)
|
||||||
|
|
||||||
|
self.requestUpdateHeaderContent(transition: .immediate)
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.isNodeLoaded {
|
if self.isNodeLoaded {
|
||||||
|
@ -721,7 +721,7 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
|||||||
})
|
})
|
||||||
case .index:
|
case .index:
|
||||||
var headerType: ChatListSearchItemHeaderType = .messages(location: nil)
|
var headerType: ChatListSearchItemHeaderType = .messages(location: nil)
|
||||||
if case .forum = location, let peer = peer.peer {
|
if case let .forum(peerId) = location, let peer = peer.peer, peer.id == peerId {
|
||||||
headerType = .messages(location: peer.compactDisplayTitle)
|
headerType = .messages(location: peer.compactDisplayTitle)
|
||||||
}
|
}
|
||||||
header = ChatListSearchItemHeader(type: headerType, theme: presentationData.theme, strings: presentationData.strings, actionTitle: nil, action: nil)
|
header = ChatListSearchItemHeader(type: headerType, theme: presentationData.theme, strings: presentationData.strings, actionTitle: nil, action: nil)
|
||||||
@ -738,15 +738,26 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
|||||||
} else {
|
} else {
|
||||||
let index: EngineChatList.Item.Index
|
let index: EngineChatList.Item.Index
|
||||||
var chatThreadInfo: ChatListItemContent.ThreadInfo?
|
var chatThreadInfo: ChatListItemContent.ThreadInfo?
|
||||||
|
chatThreadInfo = nil
|
||||||
|
var displayAsMessage = false
|
||||||
switch location {
|
switch location {
|
||||||
case .chatList:
|
case .chatList:
|
||||||
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||||
case .forum:
|
case let .forum(peerId):
|
||||||
if let threadId = message.threadId, let threadInfo = threadInfo {
|
let _ = peerId
|
||||||
chatThreadInfo = ChatListItemContent.ThreadInfo(id: threadId, info: threadInfo, isOwnedByMe: false, isClosed: false, isHidden: false)
|
let _ = threadInfo
|
||||||
index = .forum(pinnedIndex: .none, timestamp: message.index.timestamp, threadId: threadId, namespace: message.index.id.namespace, id: message.index.id.id)
|
|
||||||
|
displayAsMessage = true
|
||||||
|
|
||||||
|
if message.id.peerId == peerId {
|
||||||
|
if let threadId = message.threadId, let threadInfo = threadInfo {
|
||||||
|
chatThreadInfo = ChatListItemContent.ThreadInfo(id: threadId, info: threadInfo, isOwnedByMe: false, isClosed: false, isHidden: false)
|
||||||
|
index = .forum(pinnedIndex: .none, timestamp: message.index.timestamp, threadId: threadId, namespace: message.index.id.namespace, id: message.index.id.id)
|
||||||
|
} else {
|
||||||
|
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
index = .chatList( EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
index = .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: message.index))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ChatListItem(presentationData: presentationData, context: context, chatListLocation: location, filterData: nil, index: index, content: .peer(ChatListItemContent.PeerData(
|
return ChatListItem(presentationData: presentationData, context: context, chatListLocation: location, filterData: nil, index: index, content: .peer(ChatListItemContent.PeerData(
|
||||||
@ -762,7 +773,7 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
|
|||||||
inputActivities: nil,
|
inputActivities: nil,
|
||||||
promoInfo: nil,
|
promoInfo: nil,
|
||||||
ignoreUnreadBadge: true,
|
ignoreUnreadBadge: true,
|
||||||
displayAsMessage: false,
|
displayAsMessage: displayAsMessage,
|
||||||
hasFailedMessages: false,
|
hasFailedMessages: false,
|
||||||
forumTopicData: nil,
|
forumTopicData: nil,
|
||||||
topForumTopicItems: [],
|
topForumTopicItems: [],
|
||||||
@ -1217,7 +1228,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
|||||||
}
|
}
|
||||||
let previousRecentlySearchedPeersState = Atomic<SearchedPeersState?>(value: nil)
|
let previousRecentlySearchedPeersState = Atomic<SearchedPeersState?>(value: nil)
|
||||||
|
|
||||||
let foundItems = combineLatest(queue: .mainQueue(), searchQuery, searchOptions, downloadItems)
|
let foundItems: Signal<([ChatListSearchEntry], Bool)?, NoError> = combineLatest(queue: .mainQueue(), searchQuery, searchOptions, downloadItems)
|
||||||
|> mapToSignal { [weak self] query, options, downloadItems -> Signal<([ChatListSearchEntry], Bool)?, NoError> in
|
|> mapToSignal { [weak self] query, options, downloadItems -> Signal<([ChatListSearchEntry], Bool)?, NoError> in
|
||||||
if query == nil && options == nil && [.chats, .topics].contains(key) {
|
if query == nil && options == nil && [.chats, .topics].contains(key) {
|
||||||
let _ = currentRemotePeers.swap(nil)
|
let _ = currentRemotePeers.swap(nil)
|
||||||
@ -1464,55 +1475,113 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
|||||||
updateSearchContexts { _ in
|
updateSearchContexts { _ in
|
||||||
return ([:], true)
|
return ([:], true)
|
||||||
}
|
}
|
||||||
let foundRemoteMessages: Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError>
|
|
||||||
|
struct FoundRemoteMessages {
|
||||||
|
var messages: [EngineMessage]
|
||||||
|
var readCounters: [EnginePeer.Id: EnginePeerReadCounters]
|
||||||
|
var threadsData: [EngineMessage.Id: MessageHistoryThreadData]
|
||||||
|
var totalCount: Int32
|
||||||
|
|
||||||
|
init(messages: [EngineMessage], readCounters: [EnginePeer.Id: EnginePeerReadCounters], threadsData: [EngineMessage.Id: MessageHistoryThreadData], totalCount: Int32) {
|
||||||
|
self.messages = messages
|
||||||
|
self.readCounters = readCounters
|
||||||
|
self.threadsData = threadsData
|
||||||
|
self.totalCount = totalCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let foundRemoteMessages: Signal<([FoundRemoteMessages], Bool), NoError>
|
||||||
if peersFilter.contains(.doNotSearchMessages) {
|
if peersFilter.contains(.doNotSearchMessages) {
|
||||||
foundRemoteMessages = .single((([], [:], [:], 0), false))
|
foundRemoteMessages = .single(([FoundRemoteMessages(messages: [], readCounters: [:], threadsData: [:], totalCount: 0)], false))
|
||||||
} else {
|
} else {
|
||||||
if !finalQuery.isEmpty {
|
if !finalQuery.isEmpty {
|
||||||
addAppLogEvent(postbox: context.account.postbox, type: "search_global_query")
|
addAppLogEvent(postbox: context.account.postbox, type: "search_global_query")
|
||||||
}
|
}
|
||||||
|
|
||||||
let searchSignals: [Signal<(SearchMessagesResult, SearchMessagesState), NoError>] = searchLocations.map { searchLocation in
|
let searchSignals: [Signal<(SearchMessagesResult, SearchMessagesState), NoError>] = searchLocations.map { searchLocation in
|
||||||
return context.engine.messages.searchMessages(location: searchLocation, query: finalQuery, state: nil, limit: 50)
|
let limit: Int32
|
||||||
|
#if DEBUG
|
||||||
|
limit = 50
|
||||||
|
#else
|
||||||
|
limit = 50
|
||||||
|
#endif
|
||||||
|
return context.engine.messages.searchMessages(location: searchLocation, query: finalQuery, state: nil, limit: limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
let searchSignal = combineLatest(searchSignals)
|
let searchSignal = combineLatest(searchSignals)
|
||||||
|> map { results -> ChatListSearchMessagesResult in
|
|> map { results -> [ChatListSearchMessagesResult] in
|
||||||
let (result, updatedState) = results[0]
|
var mappedResults: [ChatListSearchMessagesResult] = []
|
||||||
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
for resultData in results {
|
||||||
|
let (result, updatedState) = resultData
|
||||||
|
|
||||||
|
mappedResults.append(ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState))
|
||||||
|
}
|
||||||
|
return mappedResults
|
||||||
}
|
}
|
||||||
|
|
||||||
let loadMore = searchContexts.get()
|
let loadMore = searchContexts.get()
|
||||||
|> mapToSignal { searchContexts -> Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError> in
|
|> mapToSignal { searchContexts -> Signal<([FoundRemoteMessages], Bool), NoError> in
|
||||||
if let searchContext = searchContexts[0], searchContext.result.hasMore {
|
for i in 0 ..< 2 {
|
||||||
if let _ = searchContext.loadMoreIndex {
|
if let searchContext = searchContexts[i], searchContext.result.hasMore {
|
||||||
return context.engine.messages.searchMessages(location: searchLocations[0], query: finalQuery, state: searchContext.result.state, limit: 80)
|
var restResults: [Int: FoundRemoteMessages] = [:]
|
||||||
|> map { result, updatedState -> ChatListSearchMessagesResult in
|
for j in 0 ..< 2 {
|
||||||
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
if j != i {
|
||||||
}
|
if let otherContext = searchContexts[j] {
|
||||||
|> mapToSignal { foundMessages -> Signal<(([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool), NoError> in
|
restResults[j] = FoundRemoteMessages(messages: otherContext.result.messages, readCounters: otherContext.result.readStates, threadsData: otherContext.result.threadInfo, totalCount: otherContext.result.totalCount)
|
||||||
updateSearchContexts { previous in
|
}
|
||||||
let updated = ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)
|
|
||||||
return ([0: updated], true)
|
|
||||||
}
|
}
|
||||||
return .complete()
|
|
||||||
}
|
}
|
||||||
} else {
|
if let _ = searchContext.loadMoreIndex {
|
||||||
return .single(((searchContext.result.messages, searchContext.result.readStates, searchContext.result.threadInfo, searchContext.result.totalCount), false))
|
return context.engine.messages.searchMessages(location: searchLocations[i], query: finalQuery, state: searchContext.result.state, limit: 80)
|
||||||
|
|> map { result, updatedState -> ChatListSearchMessagesResult in
|
||||||
|
return ChatListSearchMessagesResult(query: finalQuery, messages: result.messages.map({ EngineMessage($0) }).sorted(by: { $0.index > $1.index }), readStates: result.readStates.mapValues { EnginePeerReadCounters(state: $0, isMuted: false) }, threadInfo: result.threadInfo, hasMore: !result.completed, totalCount: result.totalCount, state: updatedState)
|
||||||
|
}
|
||||||
|
|> mapToSignal { foundMessages -> Signal<([FoundRemoteMessages], Bool), NoError> in
|
||||||
|
updateSearchContexts { previous in
|
||||||
|
let updated = ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)
|
||||||
|
var previous = previous
|
||||||
|
previous[i] = updated
|
||||||
|
return (previous, true)
|
||||||
|
}
|
||||||
|
return .complete()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var currentResults: [FoundRemoteMessages] = []
|
||||||
|
for i in 0 ..< 2 {
|
||||||
|
if let currentContext = searchContexts[i] {
|
||||||
|
currentResults.append(FoundRemoteMessages(messages: currentContext.result.messages, readCounters: currentContext.result.readStates, threadsData: currentContext.result.threadInfo, totalCount: currentContext.result.totalCount))
|
||||||
|
if currentContext.result.hasMore {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return .single((currentResults, false))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
return .complete()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return .complete()
|
||||||
}
|
}
|
||||||
|
|
||||||
foundRemoteMessages = .single((([], [:], [:], 0), true))
|
foundRemoteMessages = .single(([FoundRemoteMessages(messages: [], readCounters: [:], threadsData: [:], totalCount: 0)], true))
|
||||||
|> then(
|
|> then(
|
||||||
searchSignal
|
searchSignal
|
||||||
|> map { foundMessages -> (([EngineMessage], [EnginePeer.Id: EnginePeerReadCounters], [EngineMessage.Id: MessageHistoryThreadData], Int32), Bool) in
|
|> map { foundMessages -> ([FoundRemoteMessages], Bool) in
|
||||||
updateSearchContexts { _ in
|
updateSearchContexts { _ in
|
||||||
return ([0: ChatListSearchMessagesContext(result: foundMessages, loadMoreIndex: nil)], true)
|
var resultContexts: [Int: ChatListSearchMessagesContext] = [:]
|
||||||
|
for i in 0 ..< foundMessages.count {
|
||||||
|
resultContexts[i] = ChatListSearchMessagesContext(result: foundMessages[i], loadMoreIndex: nil)
|
||||||
|
}
|
||||||
|
return (resultContexts, true)
|
||||||
}
|
}
|
||||||
return ((foundMessages.messages, foundMessages.readStates, foundMessages.threadInfo, foundMessages.totalCount), false)
|
var result: [FoundRemoteMessages] = []
|
||||||
|
for i in 0 ..< foundMessages.count {
|
||||||
|
result.append(FoundRemoteMessages(messages: foundMessages[i].messages, readCounters: foundMessages[i].readStates, threadsData: foundMessages[i].threadInfo, totalCount: foundMessages[i].totalCount))
|
||||||
|
if foundMessages[i].hasMore {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (result, false)
|
||||||
}
|
}
|
||||||
|> delay(0.2, queue: Queue.concurrentDefaultQueue())
|
|> delay(0.2, queue: Queue.concurrentDefaultQueue())
|
||||||
|> then(loadMore)
|
|> then(loadMore)
|
||||||
@ -1766,25 +1835,33 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
|||||||
var firstHeaderId: Int64?
|
var firstHeaderId: Int64?
|
||||||
if !foundRemotePeers.2 {
|
if !foundRemotePeers.2 {
|
||||||
index = 0
|
index = 0
|
||||||
for message in foundRemoteMessages.0.0 {
|
var existingMessageIds = Set<MessageId>()
|
||||||
if searchState.deletedMessageIds.contains(message.id) {
|
for foundRemoteMessageSet in foundRemoteMessages.0 {
|
||||||
continue
|
for message in foundRemoteMessageSet.messages {
|
||||||
} else if message.id.namespace == Namespaces.Message.Cloud && searchState.deletedGlobalMessageIds.contains(message.id.id) {
|
if existingMessageIds.contains(message.id) {
|
||||||
continue
|
continue
|
||||||
}
|
|
||||||
let headerId = listMessageDateHeaderId(timestamp: message.timestamp)
|
|
||||||
if firstHeaderId == nil {
|
|
||||||
firstHeaderId = headerId
|
|
||||||
}
|
|
||||||
var peer = EngineRenderedPeer(message: message)
|
|
||||||
if let group = message.peers[message.id.peerId] as? TelegramGroup, let migrationReference = group.migrationReference {
|
|
||||||
if let channelPeer = message.peers[migrationReference.peerId] {
|
|
||||||
peer = EngineRenderedPeer(peer: EnginePeer(channelPeer))
|
|
||||||
}
|
}
|
||||||
|
existingMessageIds.insert(message.id)
|
||||||
|
|
||||||
|
if searchState.deletedMessageIds.contains(message.id) {
|
||||||
|
continue
|
||||||
|
} else if message.id.namespace == Namespaces.Message.Cloud && searchState.deletedGlobalMessageIds.contains(message.id.id) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
let headerId = listMessageDateHeaderId(timestamp: message.timestamp)
|
||||||
|
if firstHeaderId == nil {
|
||||||
|
firstHeaderId = headerId
|
||||||
|
}
|
||||||
|
var peer = EngineRenderedPeer(message: message)
|
||||||
|
if let group = message.peers[message.id.peerId] as? TelegramGroup, let migrationReference = group.migrationReference {
|
||||||
|
if let channelPeer = message.peers[migrationReference.peerId] {
|
||||||
|
peer = EngineRenderedPeer(peer: EnginePeer(channelPeer))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
entries.append(.message(message, peer, foundRemoteMessageSet.readCounters[message.id.peerId], foundRemoteMessageSet.threadsData[message.id]?.info, presentationData, foundRemoteMessageSet.totalCount, selectionState?.contains(message.id), headerId == firstHeaderId, .index(message.index), nil, .generic, false))
|
||||||
|
index += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
entries.append(.message(message, peer, foundRemoteMessages.0.1[message.id.peerId], foundRemoteMessages.0.2[message.id]?.info, presentationData, foundRemoteMessages.0.3, selectionState?.contains(message.id), headerId == firstHeaderId, .index(message.index), nil, .generic, false))
|
|
||||||
index += 1
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1807,16 +1884,25 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
|||||||
|
|
||||||
let loadMore = {
|
let loadMore = {
|
||||||
updateSearchContexts { previousMap in
|
updateSearchContexts { previousMap in
|
||||||
guard let previous = previousMap[0] else {
|
var updatedMap = previousMap
|
||||||
return ([:], false)
|
var isSearching = false
|
||||||
|
for i in 0 ..< 2 {
|
||||||
|
if let previous = updatedMap[i] {
|
||||||
|
if previous.loadMoreIndex != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
guard let last = previous.result.messages.last else {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
updatedMap[i] = ChatListSearchMessagesContext(result: previous.result, loadMoreIndex: last.index)
|
||||||
|
isSearching = true
|
||||||
|
|
||||||
|
if previous.result.hasMore {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if previous.loadMoreIndex != nil {
|
return (updatedMap, isSearching)
|
||||||
return ([0: previous], false)
|
|
||||||
}
|
|
||||||
guard let last = previous.result.messages.last else {
|
|
||||||
return ([0: previous], false)
|
|
||||||
}
|
|
||||||
return ([0: ChatListSearchMessagesContext(result: previous.result, loadMoreIndex: last.index)], true)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1919,6 +2005,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
|||||||
self.listNode.clearHighlightAnimated(true)
|
self.listNode.clearHighlightAnimated(true)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
chatListInteraction.isSearchMode = true
|
||||||
|
|
||||||
let listInteraction = ListMessageItemInteraction(openMessage: { [weak self] message, mode -> Bool in
|
let listInteraction = ListMessageItemInteraction(openMessage: { [weak self] message, mode -> Bool in
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
@ -2023,25 +2110,25 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
|||||||
})
|
})
|
||||||
|
|
||||||
self.searchDisposable.set((foundItems
|
self.searchDisposable.set((foundItems
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] entriesAndFlags in
|
|> deliverOnMainQueue).start(next: { [weak self] foundItems in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
let previousSelectedMessageIds = previousSelectedMessages.swap(strongSelf.selectedMessages)
|
let previousSelectedMessageIds = previousSelectedMessages.swap(strongSelf.selectedMessages)
|
||||||
|
|
||||||
let isSearching = entriesAndFlags?.1 ?? false
|
var entriesAndFlags = foundItems?.0
|
||||||
|
|
||||||
|
let isSearching = foundItems?.1 ?? false
|
||||||
strongSelf._isSearching.set(isSearching)
|
strongSelf._isSearching.set(isSearching)
|
||||||
|
|
||||||
if strongSelf.tagMask == .photoOrVideo {
|
if strongSelf.tagMask == .photoOrVideo {
|
||||||
var entries: [ChatListSearchEntry]? = entriesAndFlags?.0 ?? []
|
var entries: [ChatListSearchEntry]? = entriesAndFlags ?? []
|
||||||
if isSearching && (entries?.isEmpty ?? true) {
|
if isSearching && (entries?.isEmpty ?? true) {
|
||||||
entries = nil
|
entries = nil
|
||||||
}
|
}
|
||||||
strongSelf.mediaNode.updateHistory(entries: entries, totalCount: 0, updateType: .Initial)
|
strongSelf.mediaNode.updateHistory(entries: entries, totalCount: 0, updateType: .Initial)
|
||||||
}
|
}
|
||||||
|
|
||||||
var entriesAndFlags = entriesAndFlags
|
|
||||||
|
|
||||||
var peers: [EnginePeer] = []
|
var peers: [EnginePeer] = []
|
||||||
if let entries = entriesAndFlags?.0 {
|
if let entries = entriesAndFlags {
|
||||||
var filteredEntries: [ChatListSearchEntry] = []
|
var filteredEntries: [ChatListSearchEntry] = []
|
||||||
for entry in entries {
|
for entry in entries {
|
||||||
if case let .localPeer(peer, _, _, _, _, _, _, _, _) = entry {
|
if case let .localPeer(peer, _, _, _, _, _, _, _, _) = entry {
|
||||||
@ -2053,16 +2140,16 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if strongSelf.tagMask != nil || strongSelf.searchOptionsValue?.date != nil || strongSelf.searchOptionsValue?.peer != nil {
|
if strongSelf.tagMask != nil || strongSelf.searchOptionsValue?.date != nil || strongSelf.searchOptionsValue?.peer != nil {
|
||||||
entriesAndFlags?.0 = filteredEntries
|
entriesAndFlags = filteredEntries
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let previousEntries = previousSearchItems.swap(entriesAndFlags?.0)
|
let previousEntries = previousSearchItems.swap(entriesAndFlags)
|
||||||
let newEntries = entriesAndFlags?.0 ?? []
|
let newEntries = entriesAndFlags ?? []
|
||||||
|
|
||||||
let animated = (previousSelectedMessageIds == nil) != (strongSelf.selectedMessages == nil)
|
let animated = (previousSelectedMessageIds == nil) != (strongSelf.selectedMessages == nil)
|
||||||
let firstTime = previousEntries == nil
|
let firstTime = previousEntries == nil
|
||||||
var transition = chatListSearchContainerPreparedTransition(from: previousEntries ?? [], to: newEntries, displayingResults: entriesAndFlags?.0 != nil, isEmpty: !isSearching && (entriesAndFlags?.0.isEmpty ?? false), isLoading: isSearching, animated: animated, context: context, presentationData: strongSelf.presentationData, enableHeaders: true, filter: peersFilter, location: location, key: strongSelf.key, tagMask: tagMask, interaction: chatListInteraction, listInteraction: listInteraction, peerContextAction: { message, node, rect, gesture, location in
|
var transition = chatListSearchContainerPreparedTransition(from: previousEntries ?? [], to: newEntries, displayingResults: entriesAndFlags != nil, isEmpty: !isSearching && (entriesAndFlags?.isEmpty ?? false), isLoading: isSearching, animated: animated, context: context, presentationData: strongSelf.presentationData, enableHeaders: true, filter: peersFilter, location: location, key: strongSelf.key, tagMask: tagMask, interaction: chatListInteraction, listInteraction: listInteraction, peerContextAction: { message, node, rect, gesture, location in
|
||||||
interaction.peerContextAction?(message, node, rect, gesture, location)
|
interaction.peerContextAction?(message, node, rect, gesture, location)
|
||||||
}, toggleExpandLocalResults: {
|
}, toggleExpandLocalResults: {
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
@ -3115,8 +3202,9 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
|
|||||||
}, present: { _ in }, openForumThread: { _, _ in })
|
}, present: { _ in }, openForumThread: { _, _ in })
|
||||||
var isInlineMode = false
|
var isInlineMode = false
|
||||||
if case .topics = key {
|
if case .topics = key {
|
||||||
isInlineMode = true
|
isInlineMode = false
|
||||||
}
|
}
|
||||||
|
interaction.isSearchMode = true
|
||||||
interaction.isInlineMode = isInlineMode
|
interaction.isInlineMode = isInlineMode
|
||||||
|
|
||||||
let items = (0 ..< 2).compactMap { _ -> ListViewItem? in
|
let items = (0 ..< 2).compactMap { _ -> ListViewItem? in
|
||||||
|
@ -1295,8 +1295,8 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
let cachedPeerData = peerView.cachedData
|
let cachedPeerData = peerView.cachedData
|
||||||
if let cachedPeerData = cachedPeerData as? CachedUserData {
|
if let cachedPeerData = cachedPeerData as? CachedUserData, case let .known(maybePhoto) = cachedPeerData.photo {
|
||||||
if let photo = cachedPeerData.photo, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
|
if let photo = maybePhoto, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
|
||||||
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
||||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
||||||
let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
||||||
@ -1590,6 +1590,15 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let useChatListLayout: Bool
|
||||||
|
if case .chatList = item.chatListLocation {
|
||||||
|
useChatListLayout = true
|
||||||
|
} else if displayAsMessage {
|
||||||
|
useChatListLayout = true
|
||||||
|
} else {
|
||||||
|
useChatListLayout = false
|
||||||
|
}
|
||||||
|
|
||||||
let theme = item.presentationData.theme.chatList
|
let theme = item.presentationData.theme.chatList
|
||||||
|
|
||||||
var updatedTheme: PresentationTheme?
|
var updatedTheme: PresentationTheme?
|
||||||
@ -1653,7 +1662,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
|||||||
let avatarLeftInset: CGFloat
|
let avatarLeftInset: CGFloat
|
||||||
if item.interaction.isInlineMode {
|
if item.interaction.isInlineMode {
|
||||||
avatarLeftInset = 12.0
|
avatarLeftInset = 12.0
|
||||||
} else if case .forum = item.index {
|
} else if !useChatListLayout {
|
||||||
avatarLeftInset = 50.0
|
avatarLeftInset = 50.0
|
||||||
} else {
|
} else {
|
||||||
avatarLeftInset = 18.0 + avatarDiameter
|
avatarLeftInset = 18.0 + avatarDiameter
|
||||||
@ -2501,7 +2510,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
|||||||
var mainContentBoundsOffset: CGFloat
|
var mainContentBoundsOffset: CGFloat
|
||||||
var mainContentAlpha: CGFloat = 1.0
|
var mainContentAlpha: CGFloat = 1.0
|
||||||
|
|
||||||
if case .chatList = item.chatListLocation {
|
if useChatListLayout {
|
||||||
mainContentFrame = CGRect(origin: CGPoint(x: leftInset - 2.0, y: 0.0), size: CGSize(width: layout.contentSize.width, height: layout.contentSize.height))
|
mainContentFrame = CGRect(origin: CGPoint(x: leftInset - 2.0, y: 0.0), size: CGSize(width: layout.contentSize.width, height: layout.contentSize.height))
|
||||||
mainContentBoundsOffset = mainContentFrame.origin.x
|
mainContentBoundsOffset = mainContentFrame.origin.x
|
||||||
|
|
||||||
@ -2694,7 +2703,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let threadInfo = threadInfo {
|
if let threadInfo = threadInfo, !displayAsMessage {
|
||||||
let avatarIconView: ComponentHostView<Empty>
|
let avatarIconView: ComponentHostView<Empty>
|
||||||
if let current = strongSelf.avatarIconView {
|
if let current = strongSelf.avatarIconView {
|
||||||
avatarIconView = current
|
avatarIconView = current
|
||||||
@ -2742,7 +2751,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
|
|||||||
avatarIconView.removeFromSuperview()
|
avatarIconView.removeFromSuperview()
|
||||||
}
|
}
|
||||||
|
|
||||||
if case .forum = item.index {
|
if !useChatListLayout {
|
||||||
strongSelf.avatarContainerNode.isHidden = true
|
strongSelf.avatarContainerNode.isHidden = true
|
||||||
} else {
|
} else {
|
||||||
strongSelf.avatarContainerNode.isHidden = false
|
strongSelf.avatarContainerNode.isHidden = false
|
||||||
|
@ -95,6 +95,8 @@ public final class ChatListNodeInteraction {
|
|||||||
public var searchTextHighightState: String?
|
public var searchTextHighightState: String?
|
||||||
var highlightedChatLocation: ChatListHighlightedLocation?
|
var highlightedChatLocation: ChatListHighlightedLocation?
|
||||||
|
|
||||||
|
var isSearchMode: Bool = false
|
||||||
|
|
||||||
var isInlineMode: Bool = false
|
var isInlineMode: Bool = false
|
||||||
var inlineNavigationLocation: ChatListHighlightedLocation?
|
var inlineNavigationLocation: ChatListHighlightedLocation?
|
||||||
|
|
||||||
|
@ -1411,10 +1411,14 @@ open class TextNode: ASDisplayNode {
|
|||||||
context.setAllowsFontSubpixelQuantization(true)
|
context.setAllowsFontSubpixelQuantization(true)
|
||||||
context.setShouldSubpixelQuantizeFonts(true)
|
context.setShouldSubpixelQuantizeFonts(true)
|
||||||
|
|
||||||
|
var blendMode: CGBlendMode = .normal
|
||||||
|
|
||||||
var clearRects: [CGRect] = []
|
var clearRects: [CGRect] = []
|
||||||
if let layout = parameters as? TextNodeLayout {
|
if let layout = parameters as? TextNodeLayout {
|
||||||
if !isRasterizing || layout.backgroundColor != nil {
|
if !isRasterizing || layout.backgroundColor != nil {
|
||||||
context.setBlendMode(.copy)
|
context.setBlendMode(.copy)
|
||||||
|
blendMode = .copy
|
||||||
|
|
||||||
context.setFillColor((layout.backgroundColor ?? UIColor.clear).cgColor)
|
context.setFillColor((layout.backgroundColor ?? UIColor.clear).cgColor)
|
||||||
context.fill(bounds)
|
context.fill(bounds)
|
||||||
}
|
}
|
||||||
@ -1426,6 +1430,8 @@ open class TextNode: ASDisplayNode {
|
|||||||
|
|
||||||
if let (textStrokeColor, textStrokeWidth) = layout.textStroke {
|
if let (textStrokeColor, textStrokeWidth) = layout.textStroke {
|
||||||
context.setBlendMode(.normal)
|
context.setBlendMode(.normal)
|
||||||
|
blendMode = .normal
|
||||||
|
|
||||||
context.setLineCap(.round)
|
context.setLineCap(.round)
|
||||||
context.setLineJoin(.round)
|
context.setLineJoin(.round)
|
||||||
context.setStrokeColor(textStrokeColor.cgColor)
|
context.setStrokeColor(textStrokeColor.cgColor)
|
||||||
@ -1487,7 +1493,28 @@ open class TextNode: ASDisplayNode {
|
|||||||
if attributes["Attribute__EmbeddedItem"] != nil {
|
if attributes["Attribute__EmbeddedItem"] != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var fixCoupleEmoji = false
|
||||||
|
if glyphCount == 2, let font = attributes["NSFont"] as? UIFont, font.fontName.contains("ColorEmoji"), let string = layout.attributedString {
|
||||||
|
let range = CTRunGetStringRange(run)
|
||||||
|
let substring = string.attributedSubstring(from: NSMakeRange(range.location, range.length)).string
|
||||||
|
|
||||||
|
let heart = Unicode.Scalar(0x2764)!
|
||||||
|
let man = Unicode.Scalar(0x1F468)!
|
||||||
|
let woman = Unicode.Scalar(0x1F469)!
|
||||||
|
|
||||||
|
if substring.unicodeScalars.contains(heart) && (substring.unicodeScalars.contains(man) || substring.unicodeScalars.contains(woman)) {
|
||||||
|
fixCoupleEmoji = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if fixCoupleEmoji {
|
||||||
|
context.setBlendMode(.normal)
|
||||||
|
}
|
||||||
CTRunDraw(run, context, CFRangeMake(0, glyphCount))
|
CTRunDraw(run, context, CFRangeMake(0, glyphCount))
|
||||||
|
if fixCoupleEmoji {
|
||||||
|
context.setBlendMode(blendMode)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -517,7 +517,7 @@ private final class PictureInPictureContentImpl: NSObject, PictureInPictureConte
|
|||||||
guard let status = self.status else {
|
guard let status = self.status else {
|
||||||
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)))
|
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)))
|
||||||
}
|
}
|
||||||
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: status.duration, preferredTimescale: CMTimeScale(30.0)))
|
return CMTimeRange(start: CMTime(seconds: status.timestamp, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: status.duration, preferredTimescale: CMTimeScale(30.0)))
|
||||||
}
|
}
|
||||||
|
|
||||||
public func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
|
public func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
|
||||||
|
@ -104,7 +104,9 @@ final class AppIconsDemoComponent: Component {
|
|||||||
position = CGPoint(x: availableSize.width * 0.5, y: availableSize.height * 0.5)
|
position = CGPoint(x: availableSize.width * 0.5, y: availableSize.height * 0.5)
|
||||||
}
|
}
|
||||||
|
|
||||||
view.center = position.offsetBy(dx: availableSize.width / 2.0, dy: 0.0)
|
if !self.animating {
|
||||||
|
view.center = position.offsetBy(dx: availableSize.width / 2.0, dy: 0.0)
|
||||||
|
}
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
}
|
}
|
||||||
@ -126,7 +128,10 @@ final class AppIconsDemoComponent: Component {
|
|||||||
return availableSize
|
return availableSize
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private var animating = false
|
||||||
func animateIn(availableSize: CGSize) {
|
func animateIn(availableSize: CGSize) {
|
||||||
|
self.animating = true
|
||||||
|
|
||||||
var i = 0
|
var i = 0
|
||||||
for view in self.imageViews {
|
for view in self.imageViews {
|
||||||
let from: CGPoint
|
let from: CGPoint
|
||||||
@ -146,9 +151,17 @@ final class AppIconsDemoComponent: Component {
|
|||||||
delay = 0.0
|
delay = 0.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let initialPosition = view.layer.position
|
||||||
|
view.layer.position = initialPosition.offsetBy(dx: from.x, dy: from.y)
|
||||||
|
|
||||||
Queue.mainQueue().after(delay) {
|
Queue.mainQueue().after(delay) {
|
||||||
|
view.layer.position = initialPosition
|
||||||
view.layer.animateScale(from: 3.0, to: 1.0, duration: 0.5, delay: 0.0, timingFunction: kCAMediaTimingFunctionSpring)
|
view.layer.animateScale(from: 3.0, to: 1.0, duration: 0.5, delay: 0.0, timingFunction: kCAMediaTimingFunctionSpring)
|
||||||
view.layer.animatePosition(from: from, to: CGPoint(), duration: 0.5, delay: 0.0, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
view.layer.animatePosition(from: from, to: CGPoint(), duration: 0.5, delay: 0.0, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||||
|
|
||||||
|
if i == 2 {
|
||||||
|
self.animating = false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
|
@ -110,6 +110,9 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
|||||||
private let joinButtonTitleNode: ImmediateTextNode
|
private let joinButtonTitleNode: ImmediateTextNode
|
||||||
private let joinButtonBackgroundNode: ASImageNode
|
private let joinButtonBackgroundNode: ASImageNode
|
||||||
|
|
||||||
|
private var previewImageNode: ASImageNode?
|
||||||
|
private var previewImage: UIImage?
|
||||||
|
|
||||||
private var audioLevelView: VoiceBlobView?
|
private var audioLevelView: VoiceBlobView?
|
||||||
|
|
||||||
private let micButton: HighlightTrackingButtonNode
|
private let micButton: HighlightTrackingButtonNode
|
||||||
@ -139,6 +142,7 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
|||||||
private let membersDisposable = MetaDisposable()
|
private let membersDisposable = MetaDisposable()
|
||||||
private let isMutedDisposable = MetaDisposable()
|
private let isMutedDisposable = MetaDisposable()
|
||||||
private let audioLevelDisposable = MetaDisposable()
|
private let audioLevelDisposable = MetaDisposable()
|
||||||
|
private var imageDisposable: Disposable?
|
||||||
|
|
||||||
private var callState: PresentationGroupCallState?
|
private var callState: PresentationGroupCallState?
|
||||||
|
|
||||||
@ -233,6 +237,8 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
|||||||
self.isMutedDisposable.dispose()
|
self.isMutedDisposable.dispose()
|
||||||
self.audioLevelGeneratorTimer?.invalidate()
|
self.audioLevelGeneratorTimer?.invalidate()
|
||||||
self.updateTimer?.invalidate()
|
self.updateTimer?.invalidate()
|
||||||
|
self.imageDisposable?.dispose()
|
||||||
|
self.audioLevelDisposable.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
public override func didLoad() {
|
public override func didLoad() {
|
||||||
@ -366,6 +372,11 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
|||||||
self.avatarsContent = self.avatarsContext.update(peers: [], animated: false)
|
self.avatarsContent = self.avatarsContext.update(peers: [], animated: false)
|
||||||
} else {
|
} else {
|
||||||
self.avatarsContent = self.avatarsContext.update(peers: data.topParticipants.map { EnginePeer($0.peer) }, animated: false)
|
self.avatarsContent = self.avatarsContext.update(peers: data.topParticipants.map { EnginePeer($0.peer) }, animated: false)
|
||||||
|
|
||||||
|
if let imageDisposable = self.imageDisposable {
|
||||||
|
self.imageDisposable = nil
|
||||||
|
imageDisposable.dispose()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.textNode.attributedText = NSAttributedString(string: membersText, font: Font.regular(13.0), textColor: self.theme.chat.inputPanel.secondaryTextColor)
|
self.textNode.attributedText = NSAttributedString(string: membersText, font: Font.regular(13.0), textColor: self.theme.chat.inputPanel.secondaryTextColor)
|
||||||
@ -484,6 +495,67 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
|||||||
updateAudioLevels = true
|
updateAudioLevels = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if DEBUG
|
||||||
|
if data.info.isStream {
|
||||||
|
if self.imageDisposable == nil {
|
||||||
|
let engine = self.context.engine
|
||||||
|
let info = data.info
|
||||||
|
self.imageDisposable = (engine.calls.getAudioBroadcastDataSource(callId: info.id, accessHash: info.accessHash)
|
||||||
|
|> mapToSignal { source -> Signal<Data?, NoError> in
|
||||||
|
guard let source else {
|
||||||
|
return .single(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
let time = engine.calls.requestStreamState(dataSource: source, callId: info.id, accessHash: info.accessHash)
|
||||||
|
|> map { state -> Int64? in
|
||||||
|
guard let state else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return state.channels.first?.latestTimestamp
|
||||||
|
}
|
||||||
|
|
||||||
|
return time
|
||||||
|
|> mapToSignal { latestTimestamp -> Signal<Data?, NoError> in
|
||||||
|
guard let latestTimestamp else {
|
||||||
|
return .single(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
let durationMilliseconds: Int64 = 32000
|
||||||
|
let bufferOffset: Int64 = 1 * durationMilliseconds
|
||||||
|
let timestampId = (latestTimestamp / durationMilliseconds) * durationMilliseconds - bufferOffset
|
||||||
|
|
||||||
|
return engine.calls.getVideoBroadcastPart(dataSource: source, callId: info.id, accessHash: info.accessHash, timestampIdMilliseconds: timestampId, durationMilliseconds: durationMilliseconds, channelId: 2, quality: 0)
|
||||||
|
|> mapToSignal { result -> Signal<Data?, NoError> in
|
||||||
|
switch result.status {
|
||||||
|
case let .data(data):
|
||||||
|
return .single(data)
|
||||||
|
case .notReady, .resyncNeeded, .rejoinNeeded:
|
||||||
|
return .single(nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] data in
|
||||||
|
guard let self, let data else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var image: UIImage?
|
||||||
|
for i in 0 ..< 100 {
|
||||||
|
image = UIImage(data: data.subdata(in: i ..< data.count))
|
||||||
|
if image != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.previewImage = image
|
||||||
|
if let (size, leftInset, rightInset) = self.validLayout {
|
||||||
|
self.updateLayout(size: size, leftInset: leftInset, rightInset: rightInset, transition: .animated(duration: 0.2, curve: .easeInOut))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
if let (size, leftInset, rightInset) = self.validLayout {
|
if let (size, leftInset, rightInset) = self.validLayout {
|
||||||
self.updateLayout(size: size, leftInset: leftInset, rightInset: rightInset, transition: .animated(duration: 0.2, curve: .easeInOut))
|
self.updateLayout(size: size, leftInset: leftInset, rightInset: rightInset, transition: .animated(duration: 0.2, curve: .easeInOut))
|
||||||
}
|
}
|
||||||
@ -609,6 +681,26 @@ public final class GroupCallNavigationAccessoryPanel: ASDisplayNode {
|
|||||||
staticTransition.updateFrame(node: self.joinButtonBackgroundNode, frame: CGRect(origin: CGPoint(), size: joinButtonFrame.size))
|
staticTransition.updateFrame(node: self.joinButtonBackgroundNode, frame: CGRect(origin: CGPoint(), size: joinButtonFrame.size))
|
||||||
staticTransition.updateFrame(node: self.joinButtonTitleNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((joinButtonFrame.width - joinButtonTitleSize.width) / 2.0), y: floorToScreenPixels((joinButtonFrame.height - joinButtonTitleSize.height) / 2.0)), size: joinButtonTitleSize))
|
staticTransition.updateFrame(node: self.joinButtonTitleNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((joinButtonFrame.width - joinButtonTitleSize.width) / 2.0), y: floorToScreenPixels((joinButtonFrame.height - joinButtonTitleSize.height) / 2.0)), size: joinButtonTitleSize))
|
||||||
|
|
||||||
|
if let previewImage = self.previewImage {
|
||||||
|
let previewImageNode: ASImageNode
|
||||||
|
if let current = self.previewImageNode {
|
||||||
|
previewImageNode = current
|
||||||
|
} else {
|
||||||
|
previewImageNode = ASImageNode()
|
||||||
|
previewImageNode.clipsToBounds = true
|
||||||
|
previewImageNode.cornerRadius = 8.0
|
||||||
|
previewImageNode.contentMode = .scaleAspectFill
|
||||||
|
self.previewImageNode = previewImageNode
|
||||||
|
self.addSubnode(previewImageNode)
|
||||||
|
}
|
||||||
|
previewImageNode.image = previewImage
|
||||||
|
let previewSize = CGSize(width: 40.0, height: 40.0)
|
||||||
|
previewImageNode.frame = CGRect(origin: CGPoint(x: joinButtonFrame.minX - previewSize.width - 8.0, y: joinButtonFrame.minY + floor((joinButtonFrame.height - previewSize.height) / 2.0)), size: previewSize)
|
||||||
|
} else if let previewImageNode = self.previewImageNode {
|
||||||
|
self.previewImageNode = nil
|
||||||
|
previewImageNode.removeFromSupernode()
|
||||||
|
}
|
||||||
|
|
||||||
let micButtonSize = CGSize(width: 36.0, height: 36.0)
|
let micButtonSize = CGSize(width: 36.0, height: 36.0)
|
||||||
let micButtonFrame = CGRect(origin: CGPoint(x: size.width - rightInset - 7.0 - micButtonSize.width, y: floor((panelHeight - micButtonSize.height) / 2.0)), size: micButtonSize)
|
let micButtonFrame = CGRect(origin: CGPoint(x: size.width - rightInset - 7.0 - micButtonSize.width, y: floor((panelHeight - micButtonSize.height) / 2.0)), size: micButtonSize)
|
||||||
staticTransition.updateFrame(node: self.micButton, frame: micButtonFrame)
|
staticTransition.updateFrame(node: self.micButton, frame: micButtonFrame)
|
||||||
|
@ -15,180 +15,6 @@ import AccountContext
|
|||||||
import DeviceProximity
|
import DeviceProximity
|
||||||
import PhoneNumberFormat
|
import PhoneNumberFormat
|
||||||
|
|
||||||
final class PresentationCallToneRenderer {
|
|
||||||
let queue: Queue
|
|
||||||
|
|
||||||
let tone: PresentationCallTone
|
|
||||||
|
|
||||||
private let toneRenderer: MediaPlayerAudioRenderer
|
|
||||||
private var toneRendererAudioSession: MediaPlayerAudioSessionCustomControl?
|
|
||||||
private var toneRendererAudioSessionActivated = false
|
|
||||||
private let audioLevelPipe = ValuePipe<Float>()
|
|
||||||
|
|
||||||
init(tone: PresentationCallTone, completed: (() -> Void)? = nil) {
|
|
||||||
let queue = Queue.mainQueue()
|
|
||||||
self.queue = queue
|
|
||||||
|
|
||||||
self.tone = tone
|
|
||||||
|
|
||||||
var controlImpl: ((MediaPlayerAudioSessionCustomControl) -> Disposable)?
|
|
||||||
|
|
||||||
self.toneRenderer = MediaPlayerAudioRenderer(audioSession: .custom({ control in
|
|
||||||
return controlImpl?(control) ?? EmptyDisposable
|
|
||||||
}), playAndRecord: false, useVoiceProcessingMode: true, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {})
|
|
||||||
|
|
||||||
controlImpl = { [weak self] control in
|
|
||||||
queue.async {
|
|
||||||
if let strongSelf = self {
|
|
||||||
strongSelf.toneRendererAudioSession = control
|
|
||||||
if strongSelf.toneRendererAudioSessionActivated {
|
|
||||||
control.activate()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ActionDisposable {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let toneDataOffset = Atomic<Int>(value: 0)
|
|
||||||
|
|
||||||
let toneData = Atomic<Data?>(value: nil)
|
|
||||||
let reportedCompletion = Atomic<Bool>(value: false)
|
|
||||||
|
|
||||||
self.toneRenderer.beginRequestingFrames(queue: DispatchQueue.global(), takeFrame: {
|
|
||||||
var data = toneData.with { $0 }
|
|
||||||
if data == nil {
|
|
||||||
data = presentationCallToneData(tone)
|
|
||||||
if data != nil {
|
|
||||||
let _ = toneData.swap(data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
guard let toneData = data else {
|
|
||||||
if !reportedCompletion.swap(true) {
|
|
||||||
completed?()
|
|
||||||
}
|
|
||||||
return .finished
|
|
||||||
}
|
|
||||||
|
|
||||||
let toneDataMaxOffset: Int?
|
|
||||||
if let loopCount = tone.loopCount {
|
|
||||||
toneDataMaxOffset = (data?.count ?? 0) * loopCount
|
|
||||||
} else {
|
|
||||||
toneDataMaxOffset = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
let frameSize = 44100
|
|
||||||
|
|
||||||
var takeOffset: Int?
|
|
||||||
let _ = toneDataOffset.modify { current in
|
|
||||||
takeOffset = current
|
|
||||||
return current + frameSize
|
|
||||||
}
|
|
||||||
|
|
||||||
if let takeOffset = takeOffset {
|
|
||||||
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset >= toneDataMaxOffset {
|
|
||||||
if !reportedCompletion.swap(true) {
|
|
||||||
Queue.mainQueue().after(1.0, {
|
|
||||||
completed?()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return .finished
|
|
||||||
}
|
|
||||||
|
|
||||||
var blockBuffer: CMBlockBuffer?
|
|
||||||
|
|
||||||
let bytes = malloc(frameSize)!
|
|
||||||
toneData.withUnsafeBytes { dataBuffer -> Void in
|
|
||||||
guard let dataBytes = dataBuffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
var takenCount = 0
|
|
||||||
while takenCount < frameSize {
|
|
||||||
let dataOffset = (takeOffset + takenCount) % toneData.count
|
|
||||||
let dataCount = min(frameSize - takenCount, toneData.count - dataOffset)
|
|
||||||
//print("take from \(dataOffset) count: \(dataCount)")
|
|
||||||
memcpy(bytes.advanced(by: takenCount), dataBytes.advanced(by: dataOffset), dataCount)
|
|
||||||
takenCount += dataCount
|
|
||||||
|
|
||||||
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset + takenCount >= toneDataMaxOffset {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if takenCount < frameSize {
|
|
||||||
//print("fill with zeros from \(takenCount) count: \(frameSize - takenCount)")
|
|
||||||
memset(bytes.advanced(by: takenCount), 0, frameSize - takenCount)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*if let toneDataMaxOffset = toneDataMaxOffset, takeOffset + frameSize > toneDataMaxOffset {
|
|
||||||
let validCount = max(0, toneDataMaxOffset - takeOffset)
|
|
||||||
memset(bytes.advanced(by: validCount), 0, frameSize - validCount)
|
|
||||||
print("clear from \(validCount) count: \(frameSize - validCount)")
|
|
||||||
}*/
|
|
||||||
|
|
||||||
let status = CMBlockBufferCreateWithMemoryBlock(allocator: nil, memoryBlock: bytes, blockLength: frameSize, blockAllocator: nil, customBlockSource: nil, offsetToData: 0, dataLength: frameSize, flags: 0, blockBufferOut: &blockBuffer)
|
|
||||||
if status != noErr {
|
|
||||||
if !reportedCompletion.swap(true) {
|
|
||||||
completed?()
|
|
||||||
}
|
|
||||||
return .finished
|
|
||||||
}
|
|
||||||
|
|
||||||
let sampleCount = frameSize / 2
|
|
||||||
|
|
||||||
let pts = CMTime(value: Int64(takeOffset / 2), timescale: 44100)
|
|
||||||
var timingInfo = CMSampleTimingInfo(duration: CMTime(value: Int64(sampleCount), timescale: 44100), presentationTimeStamp: pts, decodeTimeStamp: pts)
|
|
||||||
var sampleBuffer: CMSampleBuffer?
|
|
||||||
var sampleSize = frameSize
|
|
||||||
guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: nil, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
|
|
||||||
if !reportedCompletion.swap(true) {
|
|
||||||
completed?()
|
|
||||||
}
|
|
||||||
return .finished
|
|
||||||
}
|
|
||||||
|
|
||||||
if let sampleBuffer = sampleBuffer {
|
|
||||||
return .frame(MediaTrackFrame(type: .audio, sampleBuffer: sampleBuffer, resetDecoder: false, decoded: true))
|
|
||||||
} else {
|
|
||||||
if !reportedCompletion.swap(true) {
|
|
||||||
completed?()
|
|
||||||
}
|
|
||||||
return .finished
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if !reportedCompletion.swap(true) {
|
|
||||||
completed?()
|
|
||||||
}
|
|
||||||
return .finished
|
|
||||||
}
|
|
||||||
})
|
|
||||||
self.toneRenderer.start()
|
|
||||||
self.toneRenderer.setRate(1.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
deinit {
|
|
||||||
assert(self.queue.isCurrent())
|
|
||||||
self.toneRenderer.stop()
|
|
||||||
}
|
|
||||||
|
|
||||||
func setAudioSessionActive(_ value: Bool) {
|
|
||||||
if self.toneRendererAudioSessionActivated != value {
|
|
||||||
self.toneRendererAudioSessionActivated = value
|
|
||||||
if let control = self.toneRendererAudioSession {
|
|
||||||
if value {
|
|
||||||
self.toneRenderer.setRate(1.0)
|
|
||||||
control.activate()
|
|
||||||
} else {
|
|
||||||
self.toneRenderer.setRate(0.0)
|
|
||||||
control.deactivate()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public final class PresentationCallImpl: PresentationCall {
|
public final class PresentationCallImpl: PresentationCall {
|
||||||
public let context: AccountContext
|
public let context: AccountContext
|
||||||
private let audioSession: ManagedAudioSession
|
private let audioSession: ManagedAudioSession
|
||||||
@ -221,6 +47,7 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
private var callContextState: OngoingCallContextState?
|
private var callContextState: OngoingCallContextState?
|
||||||
private var ongoingContext: OngoingCallContext?
|
private var ongoingContext: OngoingCallContext?
|
||||||
private var ongoingContextStateDisposable: Disposable?
|
private var ongoingContextStateDisposable: Disposable?
|
||||||
|
private var sharedAudioDevice: OngoingCallContext.AudioDevice?
|
||||||
private var requestedVideoAspect: Float?
|
private var requestedVideoAspect: Float?
|
||||||
private var reception: Int32?
|
private var reception: Int32?
|
||||||
private var receptionDisposable: Disposable?
|
private var receptionDisposable: Disposable?
|
||||||
@ -282,7 +109,7 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
private var audioSessionActiveDisposable: Disposable?
|
private var audioSessionActiveDisposable: Disposable?
|
||||||
private var isAudioSessionActive = false
|
private var isAudioSessionActive = false
|
||||||
|
|
||||||
private var toneRenderer: PresentationCallToneRenderer?
|
private var currentTone: PresentationCallTone?
|
||||||
|
|
||||||
private var droppedCall = false
|
private var droppedCall = false
|
||||||
private var dropCallKitCallTimer: SwiftSignalKit.Timer?
|
private var dropCallKitCallTimer: SwiftSignalKit.Timer?
|
||||||
@ -463,6 +290,12 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
|
||||||
|
self.sharedAudioDevice = nil
|
||||||
|
} else {
|
||||||
|
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create()
|
||||||
|
}
|
||||||
|
|
||||||
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] value in
|
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
@ -702,7 +535,7 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
|
|
||||||
let updatedConnections = connections
|
let updatedConnections = connections
|
||||||
|
|
||||||
let ongoingContext = OngoingCallContext(account: self.context.account, callSessionManager: self.callSessionManager, callId: id, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: updatedConnections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, enableTCP: self.enableTCP, enableStunMarking: self.enableStunMarking, audioSessionActive: self.audioSessionActive.get(), logName: logName, preferredVideoCodec: self.preferredVideoCodec)
|
let ongoingContext = OngoingCallContext(account: self.context.account, callSessionManager: self.callSessionManager, callId: id, internalId: self.internalId, proxyServer: proxyServer, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: updatedConnections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, enableTCP: self.enableTCP, enableStunMarking: self.enableStunMarking, audioSessionActive: self.audioSessionActive.get(), logName: logName, preferredVideoCodec: self.preferredVideoCodec, audioDevice: self.sharedAudioDevice)
|
||||||
self.ongoingContext = ongoingContext
|
self.ongoingContext = ongoingContext
|
||||||
ongoingContext.setIsMuted(self.isMutedValue)
|
ongoingContext.setIsMuted(self.isMutedValue)
|
||||||
if let requestedVideoAspect = self.requestedVideoAspect {
|
if let requestedVideoAspect = self.requestedVideoAspect {
|
||||||
@ -864,26 +697,19 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if tone != self.toneRenderer?.tone {
|
if tone != self.currentTone {
|
||||||
if let tone = tone {
|
self.currentTone = tone
|
||||||
if "".isEmpty {
|
self.sharedAudioDevice?.setTone(tone: tone.flatMap(presentationCallToneData).flatMap { data in
|
||||||
let _ = tone
|
return OngoingCallContext.Tone(samples: data, sampleRate: 48000, loopCount: tone?.loopCount ?? 1000000)
|
||||||
} else {
|
})
|
||||||
let toneRenderer = PresentationCallToneRenderer(tone: tone)
|
|
||||||
self.toneRenderer = toneRenderer
|
|
||||||
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.toneRenderer = nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func updateIsAudioSessionActive(_ value: Bool) {
|
private func updateIsAudioSessionActive(_ value: Bool) {
|
||||||
if self.isAudioSessionActive != value {
|
if self.isAudioSessionActive != value {
|
||||||
self.isAudioSessionActive = value
|
self.isAudioSessionActive = value
|
||||||
self.toneRenderer?.setAudioSessionActive(value)
|
|
||||||
}
|
}
|
||||||
|
self.sharedAudioDevice?.setIsAudioSessionActive(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func answer() {
|
public func answer() {
|
||||||
|
@ -4,12 +4,12 @@ import AVFoundation
|
|||||||
private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Data? {
|
private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Data? {
|
||||||
let outputSettings: [String: Any] = [
|
let outputSettings: [String: Any] = [
|
||||||
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
|
AVFormatIDKey: kAudioFormatLinearPCM as NSNumber,
|
||||||
AVSampleRateKey: 44100.0 as NSNumber,
|
AVSampleRateKey: 48000.0 as NSNumber,
|
||||||
AVLinearPCMBitDepthKey: 16 as NSNumber,
|
AVLinearPCMBitDepthKey: 16 as NSNumber,
|
||||||
AVLinearPCMIsNonInterleaved: false as NSNumber,
|
AVLinearPCMIsNonInterleaved: false as NSNumber,
|
||||||
AVLinearPCMIsFloatKey: false as NSNumber,
|
AVLinearPCMIsFloatKey: false as NSNumber,
|
||||||
AVLinearPCMIsBigEndianKey: false as NSNumber,
|
AVLinearPCMIsBigEndianKey: false as NSNumber,
|
||||||
AVNumberOfChannelsKey: 2 as NSNumber
|
AVNumberOfChannelsKey: 1 as NSNumber
|
||||||
]
|
]
|
||||||
|
|
||||||
let nsName: NSString = name as NSString
|
let nsName: NSString = name as NSString
|
||||||
@ -63,9 +63,9 @@ private func loadToneData(name: String, addSilenceDuration: Double = 0.0) -> Dat
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !addSilenceDuration.isZero {
|
if !addSilenceDuration.isZero {
|
||||||
let sampleRate = 44100
|
let sampleRate = 48000
|
||||||
let numberOfSamples = Int(Double(sampleRate) * addSilenceDuration)
|
let numberOfSamples = Int(Double(sampleRate) * addSilenceDuration)
|
||||||
let numberOfChannels = 2
|
let numberOfChannels = 1
|
||||||
let numberOfBytes = numberOfSamples * 2 * numberOfChannels
|
let numberOfBytes = numberOfSamples * 2 * numberOfChannels
|
||||||
|
|
||||||
data.append(Data(count: numberOfBytes))
|
data.append(Data(count: numberOfBytes))
|
||||||
|
@ -433,6 +433,15 @@ private extension CurrentImpl {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setTone(tone: OngoingGroupCallContext.Tone?) {
|
||||||
|
switch self {
|
||||||
|
case let .call(callContext):
|
||||||
|
callContext.setTone(tone: tone)
|
||||||
|
case .mediaStream:
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func groupCallLogsPath(account: Account) -> String {
|
public func groupCallLogsPath(account: Account) -> String {
|
||||||
@ -823,7 +832,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|
|
||||||
private var didStartConnectingOnce: Bool = false
|
private var didStartConnectingOnce: Bool = false
|
||||||
private var didConnectOnce: Bool = false
|
private var didConnectOnce: Bool = false
|
||||||
private var toneRenderer: PresentationCallToneRenderer?
|
|
||||||
|
|
||||||
private var videoCapturer: OngoingCallVideoCapturer?
|
private var videoCapturer: OngoingCallVideoCapturer?
|
||||||
private var useFrontCamera: Bool = true
|
private var useFrontCamera: Bool = true
|
||||||
@ -1841,7 +1849,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
if isConnecting {
|
if isConnecting {
|
||||||
strongSelf.beginTone(tone: .groupConnecting)
|
strongSelf.beginTone(tone: .groupConnecting)
|
||||||
} else {
|
} else {
|
||||||
strongSelf.toneRenderer = nil
|
strongSelf.beginTone(tone: nil)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2470,15 +2478,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
private func updateIsAudioSessionActive(_ value: Bool) {
|
private func updateIsAudioSessionActive(_ value: Bool) {
|
||||||
if self.isAudioSessionActive != value {
|
if self.isAudioSessionActive != value {
|
||||||
self.isAudioSessionActive = value
|
self.isAudioSessionActive = value
|
||||||
self.toneRenderer?.setAudioSessionActive(value)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func beginTone(tone: PresentationCallTone) {
|
private func beginTone(tone: PresentationCallTone?) {
|
||||||
if "".isEmpty {
|
if self.isStream, let tone {
|
||||||
return
|
|
||||||
}
|
|
||||||
if self.isStream {
|
|
||||||
switch tone {
|
switch tone {
|
||||||
case .groupJoined, .groupLeft:
|
case .groupJoined, .groupLeft:
|
||||||
return
|
return
|
||||||
@ -2486,21 +2490,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
var completed: (() -> Void)?
|
if let tone, let toneData = presentationCallToneData(tone) {
|
||||||
let toneRenderer = PresentationCallToneRenderer(tone: tone, completed: {
|
self.genericCallContext?.setTone(tone: OngoingGroupCallContext.Tone(
|
||||||
completed?()
|
samples: toneData,
|
||||||
})
|
sampleRate: 48000,
|
||||||
completed = { [weak self, weak toneRenderer] in
|
loopCount: tone.loopCount ?? 100000
|
||||||
Queue.mainQueue().async {
|
))
|
||||||
guard let strongSelf = self, let toneRenderer = toneRenderer, toneRenderer === strongSelf.toneRenderer else {
|
} else {
|
||||||
return
|
self.genericCallContext?.setTone(tone: nil)
|
||||||
}
|
|
||||||
strongSelf.toneRenderer = nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.toneRenderer = toneRenderer
|
|
||||||
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public func playTone(_ tone: PresentationGroupCallTone) {
|
public func playTone(_ tone: PresentationGroupCallTone) {
|
||||||
|
@ -50,6 +50,34 @@ public enum CachedPeerAutoremoveTimeout: Equatable, PostboxCoding {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public enum CachedPeerProfilePhoto: Equatable, PostboxCoding {
|
||||||
|
case unknown
|
||||||
|
case known(TelegramMediaImage?)
|
||||||
|
|
||||||
|
public init(decoder: PostboxDecoder) {
|
||||||
|
switch decoder.decodeInt32ForKey("_v", orElse: 0) {
|
||||||
|
case 1:
|
||||||
|
self = .known(decoder.decodeObjectForKey("v", decoder: { TelegramMediaImage(decoder: $0) }) as? TelegramMediaImage)
|
||||||
|
default:
|
||||||
|
self = .unknown
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func encode(_ encoder: PostboxEncoder) {
|
||||||
|
switch self {
|
||||||
|
case .unknown:
|
||||||
|
encoder.encodeInt32(0, forKey: "_v")
|
||||||
|
case let .known(value):
|
||||||
|
encoder.encodeInt32(1, forKey: "_v")
|
||||||
|
if let value = value {
|
||||||
|
encoder.encodeObject(value, forKey: "v")
|
||||||
|
} else {
|
||||||
|
encoder.encodeNil(forKey: "v")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public struct CachedPremiumGiftOption: Equatable, PostboxCoding {
|
public struct CachedPremiumGiftOption: Equatable, PostboxCoding {
|
||||||
public let months: Int32
|
public let months: Int32
|
||||||
public let currency: String
|
public let currency: String
|
||||||
@ -123,7 +151,7 @@ public final class CachedUserData: CachedPeerData {
|
|||||||
public let hasScheduledMessages: Bool
|
public let hasScheduledMessages: Bool
|
||||||
public let autoremoveTimeout: CachedPeerAutoremoveTimeout
|
public let autoremoveTimeout: CachedPeerAutoremoveTimeout
|
||||||
public let themeEmoticon: String?
|
public let themeEmoticon: String?
|
||||||
public let photo: TelegramMediaImage?
|
public let photo: CachedPeerProfilePhoto
|
||||||
public let premiumGiftOptions: [CachedPremiumGiftOption]
|
public let premiumGiftOptions: [CachedPremiumGiftOption]
|
||||||
public let voiceMessagesAvailable: Bool
|
public let voiceMessagesAvailable: Bool
|
||||||
|
|
||||||
@ -145,14 +173,14 @@ public final class CachedUserData: CachedPeerData {
|
|||||||
self.hasScheduledMessages = false
|
self.hasScheduledMessages = false
|
||||||
self.autoremoveTimeout = .unknown
|
self.autoremoveTimeout = .unknown
|
||||||
self.themeEmoticon = nil
|
self.themeEmoticon = nil
|
||||||
self.photo = nil
|
self.photo = .unknown
|
||||||
self.premiumGiftOptions = []
|
self.premiumGiftOptions = []
|
||||||
self.voiceMessagesAvailable = true
|
self.voiceMessagesAvailable = true
|
||||||
self.peerIds = Set()
|
self.peerIds = Set()
|
||||||
self.messageIds = Set()
|
self.messageIds = Set()
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(about: String?, botInfo: BotInfo?, peerStatusSettings: PeerStatusSettings?, pinnedMessageId: MessageId?, isBlocked: Bool, commonGroupCount: Int32, voiceCallsAvailable: Bool, videoCallsAvailable: Bool, callsPrivate: Bool, canPinMessages: Bool, hasScheduledMessages: Bool, autoremoveTimeout: CachedPeerAutoremoveTimeout, themeEmoticon: String?, photo: TelegramMediaImage?, premiumGiftOptions: [CachedPremiumGiftOption], voiceMessagesAvailable: Bool) {
|
public init(about: String?, botInfo: BotInfo?, peerStatusSettings: PeerStatusSettings?, pinnedMessageId: MessageId?, isBlocked: Bool, commonGroupCount: Int32, voiceCallsAvailable: Bool, videoCallsAvailable: Bool, callsPrivate: Bool, canPinMessages: Bool, hasScheduledMessages: Bool, autoremoveTimeout: CachedPeerAutoremoveTimeout, themeEmoticon: String?, photo: CachedPeerProfilePhoto, premiumGiftOptions: [CachedPremiumGiftOption], voiceMessagesAvailable: Bool) {
|
||||||
self.about = about
|
self.about = about
|
||||||
self.botInfo = botInfo
|
self.botInfo = botInfo
|
||||||
self.peerStatusSettings = peerStatusSettings
|
self.peerStatusSettings = peerStatusSettings
|
||||||
@ -204,12 +232,8 @@ public final class CachedUserData: CachedPeerData {
|
|||||||
self.autoremoveTimeout = decoder.decodeObjectForKey("artv", decoder: CachedPeerAutoremoveTimeout.init(decoder:)) as? CachedPeerAutoremoveTimeout ?? .unknown
|
self.autoremoveTimeout = decoder.decodeObjectForKey("artv", decoder: CachedPeerAutoremoveTimeout.init(decoder:)) as? CachedPeerAutoremoveTimeout ?? .unknown
|
||||||
self.themeEmoticon = decoder.decodeOptionalStringForKey("te")
|
self.themeEmoticon = decoder.decodeOptionalStringForKey("te")
|
||||||
|
|
||||||
if let photo = decoder.decodeObjectForKey("ph", decoder: { TelegramMediaImage(decoder: $0) }) as? TelegramMediaImage {
|
self.photo = decoder.decodeObjectForKey("phv", decoder: CachedPeerProfilePhoto.init(decoder:)) as? CachedPeerProfilePhoto ?? .unknown
|
||||||
self.photo = photo
|
|
||||||
} else {
|
|
||||||
self.photo = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
self.premiumGiftOptions = decoder.decodeObjectArrayWithDecoderForKey("pgo") as [CachedPremiumGiftOption]
|
self.premiumGiftOptions = decoder.decodeObjectArrayWithDecoderForKey("pgo") as [CachedPremiumGiftOption]
|
||||||
self.voiceMessagesAvailable = decoder.decodeInt32ForKey("vma", orElse: 0) != 0
|
self.voiceMessagesAvailable = decoder.decodeInt32ForKey("vma", orElse: 0) != 0
|
||||||
|
|
||||||
@ -261,12 +285,8 @@ public final class CachedUserData: CachedPeerData {
|
|||||||
encoder.encodeNil(forKey: "te")
|
encoder.encodeNil(forKey: "te")
|
||||||
}
|
}
|
||||||
|
|
||||||
if let photo = self.photo {
|
encoder.encodeObject(self.photo, forKey: "phv")
|
||||||
encoder.encodeObject(photo, forKey: "ph")
|
|
||||||
} else {
|
|
||||||
encoder.encodeNil(forKey: "ph")
|
|
||||||
}
|
|
||||||
|
|
||||||
encoder.encodeObjectArray(self.premiumGiftOptions, forKey: "pgo")
|
encoder.encodeObjectArray(self.premiumGiftOptions, forKey: "pgo")
|
||||||
encoder.encodeInt32(self.voiceMessagesAvailable ? 1 : 0, forKey: "vma")
|
encoder.encodeInt32(self.voiceMessagesAvailable ? 1 : 0, forKey: "vma")
|
||||||
}
|
}
|
||||||
@ -338,7 +358,7 @@ public final class CachedUserData: CachedPeerData {
|
|||||||
return CachedUserData(about: self.about, botInfo: self.botInfo, peerStatusSettings: self.peerStatusSettings, pinnedMessageId: self.pinnedMessageId, isBlocked: self.isBlocked, commonGroupCount: self.commonGroupCount, voiceCallsAvailable: self.voiceCallsAvailable, videoCallsAvailable: self.videoCallsAvailable, callsPrivate: self.callsPrivate, canPinMessages: self.canPinMessages, hasScheduledMessages: self.hasScheduledMessages, autoremoveTimeout: self.autoremoveTimeout, themeEmoticon: themeEmoticon, photo: self.photo, premiumGiftOptions: self.premiumGiftOptions, voiceMessagesAvailable: self.voiceMessagesAvailable)
|
return CachedUserData(about: self.about, botInfo: self.botInfo, peerStatusSettings: self.peerStatusSettings, pinnedMessageId: self.pinnedMessageId, isBlocked: self.isBlocked, commonGroupCount: self.commonGroupCount, voiceCallsAvailable: self.voiceCallsAvailable, videoCallsAvailable: self.videoCallsAvailable, callsPrivate: self.callsPrivate, canPinMessages: self.canPinMessages, hasScheduledMessages: self.hasScheduledMessages, autoremoveTimeout: self.autoremoveTimeout, themeEmoticon: themeEmoticon, photo: self.photo, premiumGiftOptions: self.premiumGiftOptions, voiceMessagesAvailable: self.voiceMessagesAvailable)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func withUpdatedPhoto(_ photo: TelegramMediaImage?) -> CachedUserData {
|
public func withUpdatedPhoto(_ photo: CachedPeerProfilePhoto) -> CachedUserData {
|
||||||
return CachedUserData(about: self.about, botInfo: self.botInfo, peerStatusSettings: self.peerStatusSettings, pinnedMessageId: self.pinnedMessageId, isBlocked: self.isBlocked, commonGroupCount: self.commonGroupCount, voiceCallsAvailable: self.voiceCallsAvailable, videoCallsAvailable: self.videoCallsAvailable, callsPrivate: self.callsPrivate, canPinMessages: self.canPinMessages, hasScheduledMessages: self.hasScheduledMessages, autoremoveTimeout: self.autoremoveTimeout, themeEmoticon: self.themeEmoticon, photo: photo, premiumGiftOptions: self.premiumGiftOptions, voiceMessagesAvailable: self.voiceMessagesAvailable)
|
return CachedUserData(about: self.about, botInfo: self.botInfo, peerStatusSettings: self.peerStatusSettings, pinnedMessageId: self.pinnedMessageId, isBlocked: self.isBlocked, commonGroupCount: self.commonGroupCount, voiceCallsAvailable: self.voiceCallsAvailable, videoCallsAvailable: self.videoCallsAvailable, callsPrivate: self.callsPrivate, canPinMessages: self.canPinMessages, hasScheduledMessages: self.hasScheduledMessages, autoremoveTimeout: self.autoremoveTimeout, themeEmoticon: self.themeEmoticon, photo: photo, premiumGiftOptions: self.premiumGiftOptions, voiceMessagesAvailable: self.voiceMessagesAvailable)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -739,7 +739,11 @@ public extension TelegramEngine.EngineData.Item {
|
|||||||
preconditionFailure()
|
preconditionFailure()
|
||||||
}
|
}
|
||||||
if let cachedData = view.cachedPeerData as? CachedUserData {
|
if let cachedData = view.cachedPeerData as? CachedUserData {
|
||||||
return .known(cachedData.photo)
|
if case let .known(value) = cachedData.photo {
|
||||||
|
return .known(value)
|
||||||
|
} else {
|
||||||
|
return .unknown
|
||||||
|
}
|
||||||
} else if let cachedData = view.cachedPeerData as? CachedGroupData {
|
} else if let cachedData = view.cachedPeerData as? CachedGroupData {
|
||||||
return .known(cachedData.photo)
|
return .known(cachedData.photo)
|
||||||
} else if let cachedData = view.cachedPeerData as? CachedChannelData {
|
} else if let cachedData = view.cachedPeerData as? CachedChannelData {
|
||||||
|
@ -117,7 +117,10 @@ private func mergedState(transaction: Transaction, seedConfiguration: SeedConfig
|
|||||||
|
|
||||||
var peerIdsSet: Set<PeerId> = Set()
|
var peerIdsSet: Set<PeerId> = Set()
|
||||||
var readStates: [PeerId: CombinedPeerReadState] = [:]
|
var readStates: [PeerId: CombinedPeerReadState] = [:]
|
||||||
var threadInfo:[MessageId : MessageHistoryThreadData] = [:]
|
var threadInfo: [MessageId : MessageHistoryThreadData] = [:]
|
||||||
|
if let state = state {
|
||||||
|
threadInfo = state.threadInfo
|
||||||
|
}
|
||||||
|
|
||||||
var renderedMessages: [Message] = []
|
var renderedMessages: [Message] = []
|
||||||
for message in messages {
|
for message in messages {
|
||||||
|
@ -35,10 +35,12 @@ public enum AdminLogEventAction {
|
|||||||
public struct ForumTopicInfo {
|
public struct ForumTopicInfo {
|
||||||
public var info: EngineMessageHistoryThread.Info
|
public var info: EngineMessageHistoryThread.Info
|
||||||
public var isClosed: Bool
|
public var isClosed: Bool
|
||||||
|
public var isHidden: Bool
|
||||||
|
|
||||||
public init(info: EngineMessageHistoryThread.Info, isClosed: Bool) {
|
public init(info: EngineMessageHistoryThread.Info, isClosed: Bool, isHidden: Bool) {
|
||||||
self.info = info
|
self.info = info
|
||||||
self.isClosed = isClosed
|
self.isClosed = isClosed
|
||||||
|
self.isHidden = isHidden
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -302,17 +304,17 @@ func channelAdminLogEvents(postbox: Postbox, network: Network, peerId: PeerId, m
|
|||||||
let prevInfo: AdminLogEventAction.ForumTopicInfo
|
let prevInfo: AdminLogEventAction.ForumTopicInfo
|
||||||
switch prevTopic {
|
switch prevTopic {
|
||||||
case let .forumTopic(flags, _, _, title, iconColor, iconEmojiId, _, _, _, _, _, _, _, _, _):
|
case let .forumTopic(flags, _, _, title, iconColor, iconEmojiId, _, _, _, _, _, _, _, _, _):
|
||||||
prevInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: title, icon: iconEmojiId, iconColor: iconColor), isClosed: (flags & (1 << 2)) != 0)
|
prevInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: title, icon: iconEmojiId, iconColor: iconColor), isClosed: (flags & (1 << 2)) != 0, isHidden: (flags & (1 << 6)) != 0)
|
||||||
case .forumTopicDeleted:
|
case .forumTopicDeleted:
|
||||||
prevInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: "", icon: nil, iconColor: 0), isClosed: false)
|
prevInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: "", icon: nil, iconColor: 0), isClosed: false, isHidden: false)
|
||||||
}
|
}
|
||||||
|
|
||||||
let newInfo: AdminLogEventAction.ForumTopicInfo
|
let newInfo: AdminLogEventAction.ForumTopicInfo
|
||||||
switch newTopic {
|
switch newTopic {
|
||||||
case let .forumTopic(flags, _, _, title, iconColor, iconEmojiId, _, _, _, _, _, _, _, _, _):
|
case let .forumTopic(flags, _, _, title, iconColor, iconEmojiId, _, _, _, _, _, _, _, _, _):
|
||||||
newInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: title, icon: iconEmojiId, iconColor: iconColor), isClosed: (flags & (1 << 2)) != 0)
|
newInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: title, icon: iconEmojiId, iconColor: iconColor), isClosed: (flags & (1 << 2)) != 0, isHidden: (flags & (1 << 6)) != 0)
|
||||||
case .forumTopicDeleted:
|
case .forumTopicDeleted:
|
||||||
newInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: "", icon: nil, iconColor: 0), isClosed: false)
|
newInfo = AdminLogEventAction.ForumTopicInfo(info: EngineMessageHistoryThread.Info(title: "", icon: nil, iconColor: 0), isClosed: false, isHidden: false)
|
||||||
}
|
}
|
||||||
|
|
||||||
action = .editTopic(prevInfo: prevInfo, newInfo: newInfo)
|
action = .editTopic(prevInfo: prevInfo, newInfo: newInfo)
|
||||||
|
@ -267,7 +267,7 @@ func _internal_fetchAndUpdateCachedPeerData(accountPeerId: PeerId, peerId rawPee
|
|||||||
return previous.withUpdatedAbout(userFullAbout).withUpdatedBotInfo(botInfo).withUpdatedCommonGroupCount(userFullCommonChatsCount).withUpdatedIsBlocked(isBlocked).withUpdatedVoiceCallsAvailable(voiceCallsAvailable).withUpdatedVideoCallsAvailable(videoCallsAvailable).withUpdatedCallsPrivate(callsPrivate).withUpdatedCanPinMessages(canPinMessages).withUpdatedPeerStatusSettings(peerStatusSettings).withUpdatedPinnedMessageId(pinnedMessageId).withUpdatedHasScheduledMessages(hasScheduledMessages)
|
return previous.withUpdatedAbout(userFullAbout).withUpdatedBotInfo(botInfo).withUpdatedCommonGroupCount(userFullCommonChatsCount).withUpdatedIsBlocked(isBlocked).withUpdatedVoiceCallsAvailable(voiceCallsAvailable).withUpdatedVideoCallsAvailable(videoCallsAvailable).withUpdatedCallsPrivate(callsPrivate).withUpdatedCanPinMessages(canPinMessages).withUpdatedPeerStatusSettings(peerStatusSettings).withUpdatedPinnedMessageId(pinnedMessageId).withUpdatedHasScheduledMessages(hasScheduledMessages)
|
||||||
.withUpdatedAutoremoveTimeout(autoremoveTimeout)
|
.withUpdatedAutoremoveTimeout(autoremoveTimeout)
|
||||||
.withUpdatedThemeEmoticon(userFullThemeEmoticon)
|
.withUpdatedThemeEmoticon(userFullThemeEmoticon)
|
||||||
.withUpdatedPhoto(photo)
|
.withUpdatedPhoto(.known(photo))
|
||||||
.withUpdatedPremiumGiftOptions(premiumGiftOptions)
|
.withUpdatedPremiumGiftOptions(premiumGiftOptions)
|
||||||
.withUpdatedVoiceMessagesAvailable(voiceMessagesAvailable)
|
.withUpdatedVoiceMessagesAvailable(voiceMessagesAvailable)
|
||||||
}
|
}
|
||||||
|
@ -290,6 +290,9 @@ public enum PresentationResourceKey: Int32 {
|
|||||||
case chatKeyboardActionButtonWebAppIcon
|
case chatKeyboardActionButtonWebAppIcon
|
||||||
|
|
||||||
case chatGeneralThreadIcon
|
case chatGeneralThreadIcon
|
||||||
|
case chatGeneralThreadIncomingIcon
|
||||||
|
case chatGeneralThreadOutgoingIcon
|
||||||
|
case chatGeneralThreadFreeIcon
|
||||||
|
|
||||||
case uploadToneIcon
|
case uploadToneIcon
|
||||||
}
|
}
|
||||||
|
@ -1325,4 +1325,22 @@ public struct PresentationResourcesChat {
|
|||||||
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Info/GeneralIcon"), color: theme.rootController.navigationBar.controlColor)
|
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Info/GeneralIcon"), color: theme.rootController.navigationBar.controlColor)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static func chatGeneralThreadIncomingIcon(_ theme: PresentationTheme) -> UIImage? {
|
||||||
|
return theme.image(PresentationResourceKey.chatGeneralThreadIncomingIcon.rawValue, { theme in
|
||||||
|
return generateTintedImage(image: UIImage(bundleImageName: "Chat List/GeneralTopicIcon"), color: theme.chat.message.incoming.accentTextColor)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public static func chatGeneralThreadOutgoingIcon(_ theme: PresentationTheme) -> UIImage? {
|
||||||
|
return theme.image(PresentationResourceKey.chatGeneralThreadOutgoingIcon.rawValue, { theme in
|
||||||
|
return generateTintedImage(image: UIImage(bundleImageName: "Chat List/GeneralTopicIcon"), color: theme.chat.message.outgoing.accentTextColor)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public static func chatGeneralThreadFreeIcon(_ theme: PresentationTheme) -> UIImage? {
|
||||||
|
return theme.image(PresentationResourceKey.chatGeneralThreadFreeIcon.rawValue, { theme in
|
||||||
|
return generateTintedImage(image: UIImage(bundleImageName: "Chat List/GeneralTopicIcon"), color: theme.chat.message.mediaOverlayControlColors.foregroundColor)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -123,8 +123,8 @@ final class ChatAvatarNavigationNode: ASDisplayNode {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
let cachedPeerData = peerView.cachedData
|
let cachedPeerData = peerView.cachedData
|
||||||
if let cachedPeerData = cachedPeerData as? CachedUserData {
|
if let cachedPeerData = cachedPeerData as? CachedUserData, case let .known(maybePhoto) = cachedPeerData.photo {
|
||||||
if let photo = cachedPeerData.photo, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
|
if let photo = maybePhoto, let video = smallestVideoRepresentation(photo.videoRepresentations), let peerReference = PeerReference(peer._asPeer()) {
|
||||||
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
||||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
||||||
let videoContent = NativeVideoContent(id: .profileVideo(videoId, "header"), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
let videoContent = NativeVideoContent(id: .profileVideo(videoId, "header"), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
||||||
|
@ -501,6 +501,7 @@ final class ChatEntityKeyboardInputNode: ChatInputNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||||
return EmojiPagerContentComponent(
|
return EmojiPagerContentComponent(
|
||||||
id: "stickers",
|
id: "stickers",
|
||||||
context: context,
|
context: context,
|
||||||
@ -537,7 +538,7 @@ final class ChatEntityKeyboardInputNode: ChatInputNode {
|
|||||||
itemLayoutType: .detailed,
|
itemLayoutType: .detailed,
|
||||||
itemContentUniqueId: nil,
|
itemContentUniqueId: nil,
|
||||||
warpContentsOnEdges: false,
|
warpContentsOnEdges: false,
|
||||||
displaySearchWithPlaceholder: "Search Stickers",
|
displaySearchWithPlaceholder: presentationData.strings.StickersSearch_SearchStickersPlaceholder,
|
||||||
searchInitiallyHidden: false,
|
searchInitiallyHidden: false,
|
||||||
searchIsPlaceholderOnly: true,
|
searchIsPlaceholderOnly: true,
|
||||||
emptySearchResults: nil,
|
emptySearchResults: nil,
|
||||||
@ -748,6 +749,7 @@ final class ChatEntityKeyboardInputNode: ChatInputNode {
|
|||||||
return !savedGifs.isEmpty
|
return !savedGifs.isEmpty
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||||
let gifItems: Signal<EntityKeyboardGifContent, NoError>
|
let gifItems: Signal<EntityKeyboardGifContent, NoError>
|
||||||
switch subject {
|
switch subject {
|
||||||
case .recent:
|
case .recent:
|
||||||
@ -769,7 +771,7 @@ final class ChatEntityKeyboardInputNode: ChatInputNode {
|
|||||||
items: items,
|
items: items,
|
||||||
isLoading: false,
|
isLoading: false,
|
||||||
loadMoreToken: nil,
|
loadMoreToken: nil,
|
||||||
displaySearchWithPlaceholder: "Search GIFs",
|
displaySearchWithPlaceholder: presentationData.strings.GifSearch_SearchGifPlaceholder,
|
||||||
searchInitiallyHidden: false
|
searchInitiallyHidden: false
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -1186,6 +1186,7 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var replyMessage: Message?
|
||||||
for attribute in item.message.attributes {
|
for attribute in item.message.attributes {
|
||||||
if let attribute = attribute as? InlineBotMessageAttribute {
|
if let attribute = attribute as? InlineBotMessageAttribute {
|
||||||
var inlineBotNameString: String?
|
var inlineBotNameString: String?
|
||||||
@ -1205,51 +1206,48 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
|
|||||||
viaBotApply = viaBotLayout(TextNodeLayoutArguments(attributedString: botString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: max(0, availableContentWidth), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
viaBotApply = viaBotLayout(TextNodeLayoutArguments(attributedString: botString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: max(0, availableContentWidth), height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let replyAttribute = attribute as? ReplyMessageAttribute, let replyMessage = item.message.associatedMessages[replyAttribute.messageId] {
|
if let replyAttribute = attribute as? ReplyMessageAttribute {
|
||||||
var hasReply = true
|
replyMessage = item.message.associatedMessages[replyAttribute.messageId]
|
||||||
|
|
||||||
if case let .replyThread(replyThreadMessage) = item.chatLocation, replyThreadMessage.messageId == replyAttribute.messageId {
|
|
||||||
hasReply = false
|
|
||||||
}
|
|
||||||
|
|
||||||
if case .peer = item.chatLocation, replyMessage.threadId != nil, case let .peer(peerId) = item.chatLocation, peerId == replyMessage.id.peerId, let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum) {
|
|
||||||
if let threadId = item.message.threadId, Int64(replyMessage.id.id) == threadId {
|
|
||||||
hasReply = false
|
|
||||||
}
|
|
||||||
|
|
||||||
threadInfoApply = makeThreadInfoLayout(ChatMessageThreadInfoNode.Arguments(
|
|
||||||
presentationData: item.presentationData,
|
|
||||||
strings: item.presentationData.strings,
|
|
||||||
context: item.context,
|
|
||||||
controllerInteraction: item.controllerInteraction,
|
|
||||||
type: .standalone,
|
|
||||||
message: replyMessage,
|
|
||||||
parentMessage: item.message,
|
|
||||||
constrainedSize: CGSize(width: availableContentWidth, height: CGFloat.greatestFiniteMagnitude),
|
|
||||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
|
||||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
if hasReply {
|
|
||||||
replyInfoApply = makeReplyInfoLayout(ChatMessageReplyInfoNode.Arguments(
|
|
||||||
presentationData: item.presentationData,
|
|
||||||
strings: item.presentationData.strings,
|
|
||||||
context: item.context,
|
|
||||||
type: .standalone,
|
|
||||||
message: replyMessage,
|
|
||||||
parentMessage: item.message,
|
|
||||||
constrainedSize: CGSize(width: availableContentWidth, height: CGFloat.greatestFiniteMagnitude),
|
|
||||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
|
||||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else if let attribute = attribute as? ReplyMarkupMessageAttribute, attribute.flags.contains(.inline), !attribute.rows.isEmpty {
|
} else if let attribute = attribute as? ReplyMarkupMessageAttribute, attribute.flags.contains(.inline), !attribute.rows.isEmpty {
|
||||||
replyMarkup = attribute
|
replyMarkup = attribute
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var hasReply = replyMessage != nil
|
||||||
|
if case let .peer(peerId) = item.chatLocation, (peerId == replyMessage?.id.peerId || item.message.threadId == 1), let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||||
|
if let threadId = item.message.threadId, let replyMessage = replyMessage, Int64(replyMessage.id.id) == threadId {
|
||||||
|
hasReply = false
|
||||||
|
}
|
||||||
|
|
||||||
|
threadInfoApply = makeThreadInfoLayout(ChatMessageThreadInfoNode.Arguments(
|
||||||
|
presentationData: item.presentationData,
|
||||||
|
strings: item.presentationData.strings,
|
||||||
|
context: item.context,
|
||||||
|
controllerInteraction: item.controllerInteraction,
|
||||||
|
type: .standalone,
|
||||||
|
threadId: item.message.threadId ?? 1,
|
||||||
|
parentMessage: item.message,
|
||||||
|
constrainedSize: CGSize(width: availableContentWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||||
|
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||||
|
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
if let replyMessage = replyMessage, hasReply {
|
||||||
|
replyInfoApply = makeReplyInfoLayout(ChatMessageReplyInfoNode.Arguments(
|
||||||
|
presentationData: item.presentationData,
|
||||||
|
strings: item.presentationData.strings,
|
||||||
|
context: item.context,
|
||||||
|
type: .standalone,
|
||||||
|
message: replyMessage,
|
||||||
|
parentMessage: item.message,
|
||||||
|
constrainedSize: CGSize(width: availableContentWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||||
|
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||||
|
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
if item.message.id.peerId != item.context.account.peerId && !item.message.id.peerId.isReplies {
|
if item.message.id.peerId != item.context.account.peerId && !item.message.id.peerId.isReplies {
|
||||||
for attribute in item.message.attributes {
|
for attribute in item.message.attributes {
|
||||||
if let attribute = attribute as? SourceReferenceMessageAttribute {
|
if let attribute = attribute as? SourceReferenceMessageAttribute {
|
||||||
|
@ -1737,6 +1737,9 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
|
|||||||
if replyMessage != nil {
|
if replyMessage != nil {
|
||||||
displayHeader = true
|
displayHeader = true
|
||||||
}
|
}
|
||||||
|
if !displayHeader, case .peer = item.chatLocation, let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||||
|
displayHeader = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let firstNodeTopPosition: ChatMessageBubbleRelativePosition
|
let firstNodeTopPosition: ChatMessageBubbleRelativePosition
|
||||||
@ -1963,8 +1966,8 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
|
|||||||
}
|
}
|
||||||
|
|
||||||
var hasReply = replyMessage != nil
|
var hasReply = replyMessage != nil
|
||||||
if !isInstantVideo, let replyMessage = replyMessage, replyMessage.threadId != nil, case let .peer(peerId) = item.chatLocation, peerId == replyMessage.id.peerId, let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
if !isInstantVideo, case let .peer(peerId) = item.chatLocation, (peerId == replyMessage?.id.peerId || item.message.threadId == 1), let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||||
if let threadId = item.message.threadId, Int64(replyMessage.id.id) == threadId {
|
if let threadId = item.message.threadId, let replyMessage = replyMessage, Int64(replyMessage.id.id) == threadId {
|
||||||
hasReply = false
|
hasReply = false
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1980,7 +1983,7 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
|
|||||||
context: item.context,
|
context: item.context,
|
||||||
controllerInteraction: item.controllerInteraction,
|
controllerInteraction: item.controllerInteraction,
|
||||||
type: .bubble(incoming: incoming),
|
type: .bubble(incoming: incoming),
|
||||||
message: replyMessage,
|
threadId: item.message.threadId ?? 1,
|
||||||
parentMessage: item.message,
|
parentMessage: item.message,
|
||||||
constrainedSize: CGSize(width: maximumNodeWidth - layoutConstants.text.bubbleInsets.left - layoutConstants.text.bubbleInsets.right, height: CGFloat.greatestFiniteMagnitude),
|
constrainedSize: CGSize(width: maximumNodeWidth - layoutConstants.text.bubbleInsets.left - layoutConstants.text.bubbleInsets.right, height: CGFloat.greatestFiniteMagnitude),
|
||||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||||
|
@ -531,8 +531,8 @@ final class ChatMessageAvatarHeaderNode: ListViewItemHeaderNode {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
let cachedPeerData = peerView.cachedData
|
let cachedPeerData = peerView.cachedData
|
||||||
if let cachedPeerData = cachedPeerData as? CachedUserData {
|
if let cachedPeerData = cachedPeerData as? CachedUserData, case let .known(maybePhoto) = cachedPeerData.photo {
|
||||||
if let photo = cachedPeerData.photo, let video = photo.videoRepresentations.last, let peerReference = PeerReference(peer) {
|
if let photo = maybePhoto, let video = photo.videoRepresentations.last, let peerReference = PeerReference(peer) {
|
||||||
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
|
||||||
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
|
||||||
let videoContent = NativeVideoContent(id: .profileVideo(videoId, "\(Int32.random(in: 0 ..< Int32.max))"), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
let videoContent = NativeVideoContent(id: .profileVideo(videoId, "\(Int32.random(in: 0 ..< Int32.max))"), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false)
|
||||||
|
@ -614,6 +614,7 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var replyMessage: Message?
|
||||||
for attribute in item.message.attributes {
|
for attribute in item.message.attributes {
|
||||||
if let attribute = attribute as? InlineBotMessageAttribute {
|
if let attribute = attribute as? InlineBotMessageAttribute {
|
||||||
var inlineBotNameString: String?
|
var inlineBotNameString: String?
|
||||||
@ -634,49 +635,48 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let replyAttribute = attribute as? ReplyMessageAttribute, let replyMessage = item.message.associatedMessages[replyAttribute.messageId] {
|
|
||||||
var hasReply = true
|
if let replyAttribute = attribute as? ReplyMessageAttribute {
|
||||||
|
replyMessage = item.message.associatedMessages[replyAttribute.messageId]
|
||||||
if case let .replyThread(replyThreadMessage) = item.chatLocation, replyThreadMessage.messageId == replyAttribute.messageId {
|
|
||||||
hasReply = false
|
|
||||||
}
|
|
||||||
|
|
||||||
if case .peer = item.chatLocation, replyMessage.threadId != nil, case let .peer(peerId) = item.chatLocation, peerId == replyMessage.id.peerId, let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum) {
|
|
||||||
if let threadId = item.message.threadId, Int64(replyMessage.id.id) == threadId {
|
|
||||||
hasReply = false
|
|
||||||
}
|
|
||||||
threadInfoApply = makeThreadInfoLayout(ChatMessageThreadInfoNode.Arguments(
|
|
||||||
presentationData: item.presentationData,
|
|
||||||
strings: item.presentationData.strings,
|
|
||||||
context: item.context,
|
|
||||||
controllerInteraction: item.controllerInteraction,
|
|
||||||
type: .standalone,
|
|
||||||
message: replyMessage,
|
|
||||||
parentMessage: item.message,
|
|
||||||
constrainedSize: CGSize(width: availableWidth, height: CGFloat.greatestFiniteMagnitude),
|
|
||||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
|
||||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
if hasReply {
|
|
||||||
replyInfoApply = makeReplyInfoLayout(ChatMessageReplyInfoNode.Arguments(
|
|
||||||
presentationData: item.presentationData,
|
|
||||||
strings: item.presentationData.strings,
|
|
||||||
context: item.context,
|
|
||||||
type: .standalone,
|
|
||||||
message: replyMessage,
|
|
||||||
parentMessage: item.message,
|
|
||||||
constrainedSize: CGSize(width: availableWidth, height: CGFloat.greatestFiniteMagnitude),
|
|
||||||
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
|
||||||
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
|
||||||
))
|
|
||||||
}
|
|
||||||
} else if let attribute = attribute as? ReplyMarkupMessageAttribute, attribute.flags.contains(.inline), !attribute.rows.isEmpty {
|
} else if let attribute = attribute as? ReplyMarkupMessageAttribute, attribute.flags.contains(.inline), !attribute.rows.isEmpty {
|
||||||
replyMarkup = attribute
|
replyMarkup = attribute
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var hasReply = replyMessage != nil
|
||||||
|
if case let .peer(peerId) = item.chatLocation, (peerId == replyMessage?.id.peerId || item.message.threadId == 1), let channel = item.message.peers[item.message.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), item.message.associatedThreadInfo != nil {
|
||||||
|
if let threadId = item.message.threadId, let replyMessage = replyMessage, Int64(replyMessage.id.id) == threadId {
|
||||||
|
hasReply = false
|
||||||
|
}
|
||||||
|
|
||||||
|
threadInfoApply = makeThreadInfoLayout(ChatMessageThreadInfoNode.Arguments(
|
||||||
|
presentationData: item.presentationData,
|
||||||
|
strings: item.presentationData.strings,
|
||||||
|
context: item.context,
|
||||||
|
controllerInteraction: item.controllerInteraction,
|
||||||
|
type: .standalone,
|
||||||
|
threadId: item.message.threadId ?? 1,
|
||||||
|
parentMessage: item.message,
|
||||||
|
constrainedSize: CGSize(width: availableWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||||
|
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||||
|
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
if let replyMessage = replyMessage, hasReply {
|
||||||
|
replyInfoApply = makeReplyInfoLayout(ChatMessageReplyInfoNode.Arguments(
|
||||||
|
presentationData: item.presentationData,
|
||||||
|
strings: item.presentationData.strings,
|
||||||
|
context: item.context,
|
||||||
|
type: .standalone,
|
||||||
|
message: replyMessage,
|
||||||
|
parentMessage: item.message,
|
||||||
|
constrainedSize: CGSize(width: availableWidth, height: CGFloat.greatestFiniteMagnitude),
|
||||||
|
animationCache: item.controllerInteraction.presentationContext.animationCache,
|
||||||
|
animationRenderer: item.controllerInteraction.presentationContext.animationRenderer
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
if item.message.id.peerId != item.context.account.peerId && !item.message.id.peerId.isReplies {
|
if item.message.id.peerId != item.context.account.peerId && !item.message.id.peerId.isReplies {
|
||||||
for attribute in item.message.attributes {
|
for attribute in item.message.attributes {
|
||||||
if let attribute = attribute as? SourceReferenceMessageAttribute {
|
if let attribute = attribute as? SourceReferenceMessageAttribute {
|
||||||
|
@ -183,7 +183,7 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
|||||||
let context: AccountContext
|
let context: AccountContext
|
||||||
let controllerInteraction: ChatControllerInteraction
|
let controllerInteraction: ChatControllerInteraction
|
||||||
let type: ChatMessageThreadInfoType
|
let type: ChatMessageThreadInfoType
|
||||||
let message: Message
|
let threadId: Int64
|
||||||
let parentMessage: Message
|
let parentMessage: Message
|
||||||
let constrainedSize: CGSize
|
let constrainedSize: CGSize
|
||||||
let animationCache: AnimationCache?
|
let animationCache: AnimationCache?
|
||||||
@ -195,7 +195,7 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
|||||||
context: AccountContext,
|
context: AccountContext,
|
||||||
controllerInteraction: ChatControllerInteraction,
|
controllerInteraction: ChatControllerInteraction,
|
||||||
type: ChatMessageThreadInfoType,
|
type: ChatMessageThreadInfoType,
|
||||||
message: Message,
|
threadId: Int64,
|
||||||
parentMessage: Message,
|
parentMessage: Message,
|
||||||
constrainedSize: CGSize,
|
constrainedSize: CGSize,
|
||||||
animationCache: AnimationCache?,
|
animationCache: AnimationCache?,
|
||||||
@ -206,7 +206,7 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
|||||||
self.context = context
|
self.context = context
|
||||||
self.controllerInteraction = controllerInteraction
|
self.controllerInteraction = controllerInteraction
|
||||||
self.type = type
|
self.type = type
|
||||||
self.message = message
|
self.threadId = threadId
|
||||||
self.parentMessage = parentMessage
|
self.parentMessage = parentMessage
|
||||||
self.constrainedSize = constrainedSize
|
self.constrainedSize = constrainedSize
|
||||||
self.animationCache = animationCache
|
self.animationCache = animationCache
|
||||||
@ -318,7 +318,6 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
|||||||
var topicIconId: Int64?
|
var topicIconId: Int64?
|
||||||
var topicIconColor: Int32 = 0
|
var topicIconColor: Int32 = 0
|
||||||
if let _ = arguments.parentMessage.threadId, let channel = arguments.parentMessage.peers[arguments.parentMessage.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), let threadInfo = arguments.parentMessage.associatedThreadInfo {
|
if let _ = arguments.parentMessage.threadId, let channel = arguments.parentMessage.peers[arguments.parentMessage.id.peerId] as? TelegramChannel, channel.flags.contains(.isForum), let threadInfo = arguments.parentMessage.associatedThreadInfo {
|
||||||
|
|
||||||
topicTitle = threadInfo.title
|
topicTitle = threadInfo.title
|
||||||
topicIconId = threadInfo.icon
|
topicIconId = threadInfo.icon
|
||||||
topicIconColor = threadInfo.iconColor
|
topicIconColor = threadInfo.iconColor
|
||||||
@ -327,9 +326,10 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
|||||||
let backgroundColor: UIColor
|
let backgroundColor: UIColor
|
||||||
let textColor: UIColor
|
let textColor: UIColor
|
||||||
let arrowIcon: UIImage?
|
let arrowIcon: UIImage?
|
||||||
|
let generalThreadIcon: UIImage?
|
||||||
switch arguments.type {
|
switch arguments.type {
|
||||||
case let .bubble(incoming):
|
case let .bubble(incoming):
|
||||||
if topicIconId == nil, topicIconColor != 0, incoming {
|
if topicIconId == nil, topicIconColor != 0, incoming, arguments.threadId != 1 {
|
||||||
let colors = topicIconColors(for: topicIconColor)
|
let colors = topicIconColors(for: topicIconColor)
|
||||||
backgroundColor = UIColor(rgb: colors.0.last ?? 0x000000)
|
backgroundColor = UIColor(rgb: colors.0.last ?? 0x000000)
|
||||||
textColor = UIColor(rgb: colors.1.first ?? 0x000000)
|
textColor = UIColor(rgb: colors.1.first ?? 0x000000)
|
||||||
@ -345,13 +345,15 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
|||||||
arrowIcon = PresentationResourcesChat.chatBubbleArrowOutgoingImage(arguments.presentationData.theme.theme)
|
arrowIcon = PresentationResourcesChat.chatBubbleArrowOutgoingImage(arguments.presentationData.theme.theme)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
generalThreadIcon = incoming ? PresentationResourcesChat.chatGeneralThreadIncomingIcon(arguments.presentationData.theme.theme) : PresentationResourcesChat.chatGeneralThreadOutgoingIcon(arguments.presentationData.theme.theme)
|
||||||
case .standalone:
|
case .standalone:
|
||||||
textColor = .white
|
textColor = arguments.presentationData.theme.theme.chat.message.mediaOverlayControlColors.foregroundColor
|
||||||
backgroundColor = .white
|
backgroundColor = .white
|
||||||
arrowIcon = PresentationResourcesChat.chatBubbleArrowFreeImage(arguments.presentationData.theme.theme)
|
arrowIcon = PresentationResourcesChat.chatBubbleArrowFreeImage(arguments.presentationData.theme.theme)
|
||||||
|
generalThreadIcon = PresentationResourcesChat.chatGeneralThreadFreeIcon(arguments.presentationData.theme.theme)
|
||||||
}
|
}
|
||||||
|
|
||||||
let placeholderColor: UIColor = arguments.message.effectivelyIncoming(arguments.context.account.peerId) ? arguments.presentationData.theme.theme.chat.message.incoming.mediaPlaceholderColor : arguments.presentationData.theme.theme.chat.message.outgoing.mediaPlaceholderColor
|
let placeholderColor: UIColor = arguments.parentMessage.effectivelyIncoming(arguments.context.account.peerId) ? arguments.presentationData.theme.theme.chat.message.incoming.mediaPlaceholderColor : arguments.presentationData.theme.theme.chat.message.outgoing.mediaPlaceholderColor
|
||||||
|
|
||||||
let text = NSAttributedString(string: topicTitle, font: textFont, textColor: textColor)
|
let text = NSAttributedString(string: topicTitle, font: textFont, textColor: textColor)
|
||||||
|
|
||||||
@ -390,9 +392,7 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
node.pressed = {
|
node.pressed = {
|
||||||
if let threadId = arguments.message.threadId {
|
arguments.controllerInteraction.navigateToThreadMessage(arguments.parentMessage.id.peerId, arguments.threadId, arguments.parentMessage.id)
|
||||||
arguments.controllerInteraction.navigateToThreadMessage(arguments.parentMessage.id.peerId, threadId, arguments.parentMessage.id)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.lineRects != lineRects {
|
if node.lineRects != lineRects {
|
||||||
@ -480,7 +480,9 @@ class ChatMessageThreadInfoNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let titleTopicIconContent: EmojiStatusComponent.Content
|
let titleTopicIconContent: EmojiStatusComponent.Content
|
||||||
if let fileId = topicIconId, fileId != 0 {
|
if arguments.threadId == 1 {
|
||||||
|
titleTopicIconContent = .image(image: generalThreadIcon)
|
||||||
|
} else if let fileId = topicIconId, fileId != 0 {
|
||||||
titleTopicIconContent = .animation(content: .customEmoji(fileId: fileId), size: CGSize(width: 36.0, height: 36.0), placeholderColor: arguments.presentationData.theme.theme.list.mediaPlaceholderColor, themeColor: arguments.presentationData.theme.theme.list.itemAccentColor, loopMode: .count(1))
|
titleTopicIconContent = .animation(content: .customEmoji(fileId: fileId), size: CGSize(width: 36.0, height: 36.0), placeholderColor: arguments.presentationData.theme.theme.list.mediaPlaceholderColor, themeColor: arguments.presentationData.theme.theme.list.itemAccentColor, loopMode: .count(1))
|
||||||
} else {
|
} else {
|
||||||
titleTopicIconContent = .topic(title: String(topicTitle.prefix(1)), color: topicIconColor, size: CGSize(width: 22.0, height: 22.0))
|
titleTopicIconContent = .topic(title: String(topicTitle.prefix(1)), color: topicIconColor, size: CGSize(width: 22.0, height: 22.0))
|
||||||
|
@ -1714,7 +1714,14 @@ struct ChatRecentActionsEntry: Comparable, Identifiable {
|
|||||||
"Channel.AdminLog.TopicRemovedIcon" = "%1$@ removed topic %2$@ icon";*/
|
"Channel.AdminLog.TopicRemovedIcon" = "%1$@ removed topic %2$@ icon";*/
|
||||||
|
|
||||||
let authorTitle: String = author.flatMap(EnginePeer.init)?.displayTitle(strings: self.presentationData.strings, displayOrder: self.presentationData.nameDisplayOrder) ?? ""
|
let authorTitle: String = author.flatMap(EnginePeer.init)?.displayTitle(strings: self.presentationData.strings, displayOrder: self.presentationData.nameDisplayOrder) ?? ""
|
||||||
if prevInfo.isClosed != newInfo.isClosed {
|
if prevInfo.isHidden != newInfo.isHidden {
|
||||||
|
appendAttributedText(text: newInfo.isHidden ? self.presentationData.strings.Channel_AdminLog_TopicHidden(authorTitle, newInfo.info.title) : self.presentationData.strings.Channel_AdminLog_TopicUnhidden(authorTitle, newInfo.info.title), generateEntities: { index in
|
||||||
|
if index == 0, let author = author {
|
||||||
|
return [.TextMention(peerId: author.id)]
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
}, to: &text, entities: &entities)
|
||||||
|
} else if prevInfo.isClosed != newInfo.isClosed {
|
||||||
appendAttributedText(text: newInfo.isClosed ? self.presentationData.strings.Channel_AdminLog_TopicClosed(authorTitle, newInfo.info.title) : self.presentationData.strings.Channel_AdminLog_TopicReopened(authorTitle, newInfo.info.title), generateEntities: { index in
|
appendAttributedText(text: newInfo.isClosed ? self.presentationData.strings.Channel_AdminLog_TopicClosed(authorTitle, newInfo.info.title) : self.presentationData.strings.Channel_AdminLog_TopicReopened(authorTitle, newInfo.info.title), generateEntities: { index in
|
||||||
if index == 0, let author = author {
|
if index == 0, let author = author {
|
||||||
return [.TextMention(peerId: author.id)]
|
return [.TextMention(peerId: author.id)]
|
||||||
|
@ -8,6 +8,7 @@ import Postbox
|
|||||||
import TelegramAudio
|
import TelegramAudio
|
||||||
import AccountContext
|
import AccountContext
|
||||||
import AVKit
|
import AVKit
|
||||||
|
import UniversalMediaPlayer
|
||||||
|
|
||||||
public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInPictureSampleBufferPlaybackDelegate {
|
public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInPictureSampleBufferPlaybackDelegate {
|
||||||
public let content: UniversalVideoContent
|
public let content: UniversalVideoContent
|
||||||
@ -37,6 +38,9 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP
|
|||||||
public var customClose: (() -> Void)?
|
public var customClose: (() -> Void)?
|
||||||
public var controlsAreShowingUpdated: ((Bool) -> Void)?
|
public var controlsAreShowingUpdated: ((Bool) -> Void)?
|
||||||
|
|
||||||
|
private var statusDisposable: Disposable?
|
||||||
|
private var status: MediaPlayerStatus?
|
||||||
|
|
||||||
public init(postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, shouldBeDismissed: Signal<Bool, NoError> = .single(false), expand: @escaping () -> Void, close: @escaping () -> Void) {
|
public init(postbox: Postbox, audioSession: ManagedAudioSession, manager: UniversalVideoManager, content: UniversalVideoContent, shouldBeDismissed: Signal<Bool, NoError> = .single(false), expand: @escaping () -> Void, close: @escaping () -> Void) {
|
||||||
self.content = content
|
self.content = content
|
||||||
self.defaultExpand = expand
|
self.defaultExpand = expand
|
||||||
@ -124,6 +128,16 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP
|
|||||||
strongSelf.dismiss()
|
strongSelf.dismiss()
|
||||||
closeImpl?()
|
closeImpl?()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
self.statusDisposable = (self.videoNode.status
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||||
|
self?.status = status
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
deinit {
|
||||||
|
self.shouldBeDismissedDisposable?.dispose()
|
||||||
|
self.statusDisposable?.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
override public func didLoad() {
|
override public func didLoad() {
|
||||||
@ -194,7 +208,10 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode, AVPictureInP
|
|||||||
}
|
}
|
||||||
|
|
||||||
public func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
|
public func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
|
||||||
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: 10.0, preferredTimescale: CMTimeScale(30.0)))
|
guard let status = self.status else {
|
||||||
|
return CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(30.0)))
|
||||||
|
}
|
||||||
|
return CMTimeRange(start: CMTime(seconds: status.timestamp, preferredTimescale: CMTimeScale(30.0)), duration: CMTime(seconds: status.duration, preferredTimescale: CMTimeScale(30.0)))
|
||||||
}
|
}
|
||||||
|
|
||||||
public func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
|
public func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
|
||||||
|
@ -400,9 +400,22 @@ public final class OngoingGroupCallContext {
|
|||||||
public var incomingVideoStats: [String: IncomingVideoStats]
|
public var incomingVideoStats: [String: IncomingVideoStats]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final class Tone {
|
||||||
|
public let samples: Data
|
||||||
|
public let sampleRate: Int
|
||||||
|
public let loopCount: Int
|
||||||
|
|
||||||
|
public init(samples: Data, sampleRate: Int, loopCount: Int) {
|
||||||
|
self.samples = samples
|
||||||
|
self.sampleRate = sampleRate
|
||||||
|
self.loopCount = loopCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private final class Impl {
|
private final class Impl {
|
||||||
let queue: Queue
|
let queue: Queue
|
||||||
let context: GroupCallThreadLocalContext
|
let context: GroupCallThreadLocalContext
|
||||||
|
let audioDevice: SharedCallAudioDevice?
|
||||||
|
|
||||||
let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max))
|
let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max))
|
||||||
|
|
||||||
@ -421,6 +434,13 @@ public final class OngoingGroupCallContext {
|
|||||||
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
|
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
|
||||||
|
self.audioDevice = nil
|
||||||
|
/*#if DEBUG
|
||||||
|
self.audioDevice = SharedCallAudioDevice(disableRecording: disableAudioInput)
|
||||||
|
#else
|
||||||
|
self.audioDevice = nil
|
||||||
|
#endif*/
|
||||||
|
|
||||||
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
|
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
|
||||||
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
|
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
|
||||||
|
|
||||||
@ -526,7 +546,8 @@ public final class OngoingGroupCallContext {
|
|||||||
enableNoiseSuppression: enableNoiseSuppression,
|
enableNoiseSuppression: enableNoiseSuppression,
|
||||||
disableAudioInput: disableAudioInput,
|
disableAudioInput: disableAudioInput,
|
||||||
preferX264: preferX264,
|
preferX264: preferX264,
|
||||||
logPath: logPath
|
logPath: logPath,
|
||||||
|
audioDevice: self.audioDevice
|
||||||
)
|
)
|
||||||
|
|
||||||
let queue = self.queue
|
let queue = self.queue
|
||||||
@ -580,6 +601,7 @@ public final class OngoingGroupCallContext {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
#if os(iOS)
|
#if os(iOS)
|
||||||
|
self.audioDevice?.setManualAudioSessionIsActive(isActive)
|
||||||
self.context.setManualAudioSessionIsActive(isActive)
|
self.context.setManualAudioSessionIsActive(isActive)
|
||||||
#endif
|
#endif
|
||||||
}))
|
}))
|
||||||
@ -884,6 +906,17 @@ public final class OngoingGroupCallContext {
|
|||||||
completion(Stats(incomingVideoStats: incomingVideoStats))
|
completion(Stats(incomingVideoStats: incomingVideoStats))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setTone(tone: Tone?) {
|
||||||
|
let mappedTone = tone.flatMap { tone in
|
||||||
|
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
|
||||||
|
}
|
||||||
|
if let audioDevice = self.audioDevice {
|
||||||
|
audioDevice.setTone(mappedTone)
|
||||||
|
} else {
|
||||||
|
self.context.setTone(mappedTone)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private let queue = Queue()
|
private let queue = Queue()
|
||||||
@ -1075,4 +1108,10 @@ public final class OngoingGroupCallContext {
|
|||||||
impl.getStats(completion: completion)
|
impl.getStats(completion: completion)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func setTone(tone: Tone?) {
|
||||||
|
self.impl.with { impl in
|
||||||
|
impl.setTone(tone: tone)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -706,6 +706,40 @@ public final class OngoingCallContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final class Tone {
|
||||||
|
public let samples: Data
|
||||||
|
public let sampleRate: Int
|
||||||
|
public let loopCount: Int
|
||||||
|
|
||||||
|
public init(samples: Data, sampleRate: Int, loopCount: Int) {
|
||||||
|
self.samples = samples
|
||||||
|
self.sampleRate = sampleRate
|
||||||
|
self.loopCount = loopCount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public final class AudioDevice {
|
||||||
|
let impl: SharedCallAudioDevice
|
||||||
|
|
||||||
|
public static func create() -> AudioDevice? {
|
||||||
|
return AudioDevice(impl: SharedCallAudioDevice(disableRecording: false))
|
||||||
|
}
|
||||||
|
|
||||||
|
private init(impl: SharedCallAudioDevice) {
|
||||||
|
self.impl = impl
|
||||||
|
}
|
||||||
|
|
||||||
|
public func setIsAudioSessionActive(_ isActive: Bool) {
|
||||||
|
self.impl.setManualAudioSessionIsActive(isActive)
|
||||||
|
}
|
||||||
|
|
||||||
|
public func setTone(tone: Tone?) {
|
||||||
|
self.impl.setTone(tone.flatMap { tone in
|
||||||
|
CallAudioTone(samples: tone.samples, sampleRate: tone.sampleRate, loopCount: tone.loopCount)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static func setupAudioSession() {
|
public static func setupAudioSession() {
|
||||||
OngoingCallThreadLocalContextWebrtc.setupAudioSession()
|
OngoingCallThreadLocalContextWebrtc.setupAudioSession()
|
||||||
}
|
}
|
||||||
@ -751,7 +785,7 @@ public final class OngoingCallContext {
|
|||||||
|
|
||||||
private var signalingConnectionManager: QueueLocalObject<CallSignalingConnectionManager>?
|
private var signalingConnectionManager: QueueLocalObject<CallSignalingConnectionManager>?
|
||||||
|
|
||||||
private let audioDevice: SharedCallAudioDevice?
|
private let audioDevice: AudioDevice?
|
||||||
|
|
||||||
public static func versions(includeExperimental: Bool, includeReference: Bool) -> [(version: String, supportsVideo: Bool)] {
|
public static func versions(includeExperimental: Bool, includeReference: Bool) -> [(version: String, supportsVideo: Bool)] {
|
||||||
#if os(iOS) && DEBUG && false
|
#if os(iOS) && DEBUG && false
|
||||||
@ -771,7 +805,7 @@ public final class OngoingCallContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(account: Account, callSessionManager: CallSessionManager, callId: CallId, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, enableTCP: Bool, enableStunMarking: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String, preferredVideoCodec: String?) {
|
public init(account: Account, callSessionManager: CallSessionManager, callId: CallId, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, enableTCP: Bool, enableStunMarking: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String, preferredVideoCodec: String?, audioDevice: AudioDevice?) {
|
||||||
let _ = setupLogs
|
let _ = setupLogs
|
||||||
OngoingCallThreadLocalContext.applyServerConfig(serializedData)
|
OngoingCallThreadLocalContext.applyServerConfig(serializedData)
|
||||||
|
|
||||||
@ -782,12 +816,6 @@ public final class OngoingCallContext {
|
|||||||
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
|
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
|
||||||
let logPath = self.logPath
|
let logPath = self.logPath
|
||||||
|
|
||||||
let audioDevice: SharedCallAudioDevice?
|
|
||||||
if !"".isEmpty {
|
|
||||||
audioDevice = SharedCallAudioDevice()
|
|
||||||
} else {
|
|
||||||
audioDevice = nil
|
|
||||||
}
|
|
||||||
self.audioDevice = audioDevice
|
self.audioDevice = audioDevice
|
||||||
|
|
||||||
let _ = try? FileManager.default.createDirectory(atPath: callLogsPath(account: account), withIntermediateDirectories: true, attributes: nil)
|
let _ = try? FileManager.default.createDirectory(atPath: callLogsPath(account: account), withIntermediateDirectories: true, attributes: nil)
|
||||||
@ -910,7 +938,7 @@ public final class OngoingCallContext {
|
|||||||
callSessionManager.sendSignalingData(internalId: internalId, data: data)
|
callSessionManager.sendSignalingData(internalId: internalId, data: data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", audioDevice: audioDevice)
|
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", audioDevice: audioDevice?.impl)
|
||||||
|
|
||||||
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
|
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
|
||||||
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
|
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
|
||||||
|
@ -11,14 +11,26 @@
|
|||||||
#define UIView NSView
|
#define UIView NSView
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@interface CallAudioTone : NSObject
|
||||||
|
|
||||||
|
@property (nonatomic, strong, readonly) NSData * _Nonnull samples;
|
||||||
|
@property (nonatomic, readonly) NSInteger sampleRate;
|
||||||
|
@property (nonatomic, readonly) NSInteger loopCount;
|
||||||
|
|
||||||
|
- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
@interface SharedCallAudioDevice : NSObject
|
@interface SharedCallAudioDevice : NSObject
|
||||||
|
|
||||||
- (instancetype _Nonnull)init;
|
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording;
|
||||||
|
|
||||||
+ (void)setupAudioSession;
|
+ (void)setupAudioSession;
|
||||||
|
|
||||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
||||||
|
|
||||||
|
- (void)setTone:(CallAudioTone * _Nullable)tone;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface OngoingCallConnectionDescriptionWebrtc : NSObject
|
@interface OngoingCallConnectionDescriptionWebrtc : NSObject
|
||||||
@ -385,12 +397,15 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
|
|||||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||||
disableAudioInput:(bool)disableAudioInput
|
disableAudioInput:(bool)disableAudioInput
|
||||||
preferX264:(bool)preferX264
|
preferX264:(bool)preferX264
|
||||||
logPath:(NSString * _Nonnull)logPath;
|
logPath:(NSString * _Nonnull)logPath
|
||||||
|
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice;
|
||||||
|
|
||||||
- (void)stop;
|
- (void)stop;
|
||||||
|
|
||||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
|
||||||
|
|
||||||
|
- (void)setTone:(CallAudioTone * _Nullable)tone;
|
||||||
|
|
||||||
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast;
|
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast;
|
||||||
|
|
||||||
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
|
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
|
||||||
|
@ -43,6 +43,28 @@
|
|||||||
#import "platform/darwin/TGRTCCVPixelBuffer.h"
|
#import "platform/darwin/TGRTCCVPixelBuffer.h"
|
||||||
#include "rtc_base/logging.h"
|
#include "rtc_base/logging.h"
|
||||||
|
|
||||||
|
@implementation CallAudioTone
|
||||||
|
|
||||||
|
- (instancetype _Nonnull)initWithSamples:(NSData * _Nonnull)samples sampleRate:(NSInteger)sampleRate loopCount:(NSInteger)loopCount {
|
||||||
|
self = [super init];
|
||||||
|
if (self != nil) {
|
||||||
|
_samples = samples;
|
||||||
|
_sampleRate = sampleRate;
|
||||||
|
_loopCount = loopCount;
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (std::shared_ptr<tgcalls::CallAudioTone>)asTone {
|
||||||
|
std::vector<int16_t> data;
|
||||||
|
data.resize(_samples.length / 2);
|
||||||
|
memcpy(data.data(), _samples.bytes, _samples.length);
|
||||||
|
|
||||||
|
return std::make_shared<tgcalls::CallAudioTone>(std::move(data), (int)_sampleRate, (int)_loopCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
namespace tgcalls {
|
namespace tgcalls {
|
||||||
|
|
||||||
class SharedAudioDeviceModule {
|
class SharedAudioDeviceModule {
|
||||||
@ -50,51 +72,67 @@ public:
|
|||||||
virtual ~SharedAudioDeviceModule() = default;
|
virtual ~SharedAudioDeviceModule() = default;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
virtual rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule() = 0;
|
virtual rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> audioDeviceModule() = 0;
|
||||||
|
virtual void start() = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule {
|
class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule {
|
||||||
public:
|
public:
|
||||||
SharedAudioDeviceModuleImpl() {
|
SharedAudioDeviceModuleImpl(bool disableAudioInput) {
|
||||||
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) {
|
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
||||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
||||||
} else {
|
|
||||||
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
|
|
||||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual ~SharedAudioDeviceModuleImpl() override {
|
virtual ~SharedAudioDeviceModuleImpl() override {
|
||||||
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) {
|
if (tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent()) {
|
||||||
|
if (_audioDeviceModule->Playing()) {
|
||||||
|
_audioDeviceModule->StopPlayout();
|
||||||
|
_audioDeviceModule->StopRecording();
|
||||||
|
}
|
||||||
_audioDeviceModule = nullptr;
|
_audioDeviceModule = nullptr;
|
||||||
} else {
|
} else {
|
||||||
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
|
tgcalls::StaticThreads::getThreads()->getWorkerThread()->BlockingCall([&]() {
|
||||||
|
if (_audioDeviceModule->Playing()) {
|
||||||
|
_audioDeviceModule->StopPlayout();
|
||||||
|
_audioDeviceModule->StopRecording();
|
||||||
|
}
|
||||||
_audioDeviceModule = nullptr;
|
_audioDeviceModule = nullptr;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
virtual rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule() override {
|
virtual rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> audioDeviceModule() override {
|
||||||
return _audioDeviceModule;
|
return _audioDeviceModule;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
virtual void start() override {
|
||||||
|
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
||||||
|
|
||||||
|
_audioDeviceModule->Init();
|
||||||
|
if (!_audioDeviceModule->Playing()) {
|
||||||
|
_audioDeviceModule->InitPlayout();
|
||||||
|
//_audioDeviceModule->InitRecording();
|
||||||
|
_audioDeviceModule->InternalStartPlayout();
|
||||||
|
//_audioDeviceModule->InternalStartRecording();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
|
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _audioDeviceModule;
|
||||||
};
|
};
|
||||||
|
|
||||||
@implementation SharedCallAudioDevice {
|
@implementation SharedCallAudioDevice {
|
||||||
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule;
|
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype _Nonnull)init {
|
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self != nil) {
|
if (self != nil) {
|
||||||
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), []() mutable {
|
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), [disableRecording]() mutable {
|
||||||
return (tgcalls::SharedAudioDeviceModule *)(new SharedAudioDeviceModuleImpl());
|
return (tgcalls::SharedAudioDeviceModule *)(new SharedAudioDeviceModuleImpl(disableRecording));
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -104,6 +142,12 @@ private:
|
|||||||
_audioDeviceModule.reset();
|
_audioDeviceModule.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setTone:(CallAudioTone * _Nullable)tone {
|
||||||
|
_audioDeviceModule->perform([tone](tgcalls::SharedAudioDeviceModule *audioDeviceModule) {
|
||||||
|
audioDeviceModule->audioDeviceModule()->setTone([tone asTone]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
- (std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>>)getAudioDeviceModule {
|
- (std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>>)getAudioDeviceModule {
|
||||||
return _audioDeviceModule;
|
return _audioDeviceModule;
|
||||||
}
|
}
|
||||||
@ -128,6 +172,12 @@ private:
|
|||||||
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
|
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
|
||||||
}
|
}
|
||||||
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
|
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
|
||||||
|
|
||||||
|
if (isAudioSessionActive) {
|
||||||
|
_audioDeviceModule->perform([](tgcalls::SharedAudioDeviceModule *audioDeviceModule) {
|
||||||
|
audioDeviceModule->start();
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
@ -800,6 +850,9 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
|||||||
bool _useManualAudioSessionControl;
|
bool _useManualAudioSessionControl;
|
||||||
SharedCallAudioDevice *_audioDevice;
|
SharedCallAudioDevice *_audioDevice;
|
||||||
|
|
||||||
|
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
|
||||||
|
rtc::Thread *_currentAudioDeviceModuleThread;
|
||||||
|
|
||||||
OngoingCallNetworkTypeWebrtc _networkType;
|
OngoingCallNetworkTypeWebrtc _networkType;
|
||||||
NSTimeInterval _callReceiveTimeout;
|
NSTimeInterval _callReceiveTimeout;
|
||||||
NSTimeInterval _callRingTimeout;
|
NSTimeInterval _callRingTimeout;
|
||||||
@ -1213,11 +1266,20 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
|||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
},
|
},
|
||||||
.createAudioDeviceModule = [audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
.createAudioDeviceModule = [weakSelf, queue, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||||
if (audioDeviceModule) {
|
if (audioDeviceModule) {
|
||||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||||
} else {
|
} else {
|
||||||
return rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||||
|
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||||
|
[queue dispatch:^{
|
||||||
|
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||||
|
if (strongSelf) {
|
||||||
|
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
|
||||||
|
strongSelf->_currentAudioDeviceModule = resultModule;
|
||||||
|
}
|
||||||
|
}];
|
||||||
|
return resultModule;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -1232,6 +1294,14 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
|||||||
InternalVoipLoggingFunction(@"OngoingCallThreadLocalContext: dealloc");
|
InternalVoipLoggingFunction(@"OngoingCallThreadLocalContext: dealloc");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (_currentAudioDeviceModuleThread) {
|
||||||
|
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||||
|
_currentAudioDeviceModule = nullptr;
|
||||||
|
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||||
|
});
|
||||||
|
_currentAudioDeviceModuleThread = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
if (_tgVoip != NULL) {
|
if (_tgVoip != NULL) {
|
||||||
[self stop:nil];
|
[self stop:nil];
|
||||||
}
|
}
|
||||||
@ -1537,6 +1607,11 @@ private:
|
|||||||
|
|
||||||
int _nextSinkId;
|
int _nextSinkId;
|
||||||
NSMutableDictionary<NSNumber *, GroupCallVideoSink *> *_sinks;
|
NSMutableDictionary<NSNumber *, GroupCallVideoSink *> *_sinks;
|
||||||
|
|
||||||
|
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
|
||||||
|
rtc::Thread *_currentAudioDeviceModuleThread;
|
||||||
|
|
||||||
|
SharedCallAudioDevice * _audioDevice;
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
@ -1558,7 +1633,8 @@ private:
|
|||||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||||
disableAudioInput:(bool)disableAudioInput
|
disableAudioInput:(bool)disableAudioInput
|
||||||
preferX264:(bool)preferX264
|
preferX264:(bool)preferX264
|
||||||
logPath:(NSString * _Nonnull)logPath {
|
logPath:(NSString * _Nonnull)logPath
|
||||||
|
audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self != nil) {
|
if (self != nil) {
|
||||||
_queue = queue;
|
_queue = queue;
|
||||||
@ -1570,6 +1646,12 @@ private:
|
|||||||
_networkStateUpdated = [networkStateUpdated copy];
|
_networkStateUpdated = [networkStateUpdated copy];
|
||||||
_videoCapturer = videoCapturer;
|
_videoCapturer = videoCapturer;
|
||||||
|
|
||||||
|
_audioDevice = audioDevice;
|
||||||
|
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> audioDeviceModule;
|
||||||
|
if (_audioDevice) {
|
||||||
|
audioDeviceModule = [_audioDevice getAudioDeviceModule];
|
||||||
|
}
|
||||||
|
|
||||||
tgcalls::VideoContentType _videoContentType;
|
tgcalls::VideoContentType _videoContentType;
|
||||||
switch (videoContentType) {
|
switch (videoContentType) {
|
||||||
case OngoingGroupCallVideoContentTypeGeneric: {
|
case OngoingGroupCallVideoContentTypeGeneric: {
|
||||||
@ -1777,19 +1859,64 @@ private:
|
|||||||
|
|
||||||
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
|
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
|
||||||
},
|
},
|
||||||
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit
|
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
|
||||||
|
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput, audioDeviceModule](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||||
|
if (audioDeviceModule) {
|
||||||
|
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||||
|
} else {
|
||||||
|
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||||
|
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
||||||
|
[queue dispatch:^{
|
||||||
|
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
|
||||||
|
if (strongSelf) {
|
||||||
|
strongSelf->_currentAudioDeviceModuleThread = audioDeviceModuleThread;
|
||||||
|
strongSelf->_currentAudioDeviceModule = resultModule;
|
||||||
|
}
|
||||||
|
}];
|
||||||
|
return resultModule;
|
||||||
|
}
|
||||||
|
}
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)dealloc {
|
||||||
|
if (_currentAudioDeviceModuleThread) {
|
||||||
|
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||||
|
_currentAudioDeviceModule = nullptr;
|
||||||
|
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||||
|
});
|
||||||
|
_currentAudioDeviceModuleThread = nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
- (void)stop {
|
- (void)stop {
|
||||||
|
if (_currentAudioDeviceModuleThread) {
|
||||||
|
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||||
|
_currentAudioDeviceModule = nullptr;
|
||||||
|
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule]() {
|
||||||
|
});
|
||||||
|
_currentAudioDeviceModuleThread = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
if (_instance) {
|
if (_instance) {
|
||||||
_instance->stop();
|
_instance->stop();
|
||||||
_instance.reset();
|
_instance.reset();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setTone:(CallAudioTone * _Nullable)tone {
|
||||||
|
if (_currentAudioDeviceModuleThread) {
|
||||||
|
auto currentAudioDeviceModule = _currentAudioDeviceModule;
|
||||||
|
if (currentAudioDeviceModule) {
|
||||||
|
_currentAudioDeviceModuleThread->PostTask([currentAudioDeviceModule, tone]() {
|
||||||
|
currentAudioDeviceModule->setTone([tone asTone]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
|
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
|
||||||
if (isAudioSessionActive) {
|
if (isAudioSessionActive) {
|
||||||
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
|
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit 97d616abe1dae6214b11eae19b3ec25cb88d98ce
|
Subproject commit e7032ab6f7b305cbd1914e2d422646c2fd132b49
|
Loading…
x
Reference in New Issue
Block a user