[WIP] Dynamic video streaming

This commit is contained in:
Isaac 2024-09-18 01:04:29 +08:00
parent 7cbb5c784d
commit 923587b0da
141 changed files with 3365 additions and 1995 deletions

View File

@ -1707,7 +1707,7 @@ private final class NotificationServiceHandler {
} else if let file = media as? TelegramMediaFile {
resource = file.resource
for attribute in file.attributes {
if case let .Video(_, _, _, preloadSize, _) = attribute {
if case let .Video(_, _, _, preloadSize, _, _) = attribute {
fetchSize = preloadSize.flatMap(Int64.init)
}
}

View File

@ -20,8 +20,8 @@ public func freeMediaFileResourceInteractiveFetched(account: Account, userLocati
return fetchedMediaResource(mediaBox: account.postbox.mediaBox, userLocation: userLocation, userContentType: MediaResourceUserContentType(file: fileReference.media), reference: fileReference.resourceReference(resource))
}
public func freeMediaFileResourceInteractiveFetched(postbox: Postbox, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, resource: MediaResource) -> Signal<FetchResourceSourceType, FetchResourceError> {
return fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: userLocation, userContentType: MediaResourceUserContentType(file: fileReference.media), reference: fileReference.resourceReference(resource))
public func freeMediaFileResourceInteractiveFetched(postbox: Postbox, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, resource: MediaResource, range: (Range<Int64>, MediaBoxFetchPriority)? = nil) -> Signal<FetchResourceSourceType, FetchResourceError> {
return fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: userLocation, userContentType: MediaResourceUserContentType(file: fileReference.media), reference: fileReference.resourceReference(resource), range: range)
}
public func cancelFreeMediaFileInteractiveFetch(account: Account, file: TelegramMediaFile) {

View File

@ -18,7 +18,7 @@ public func isMediaStreamable(message: Message, media: TelegramMediaFile) -> Boo
return false
}
for attribute in media.attributes {
if case let .Video(_, _, flags, _, _) = attribute {
if case let .Video(_, _, flags, _, _, _) = attribute {
if flags.contains(.supportsStreaming) {
return true
}
@ -41,7 +41,7 @@ public func isMediaStreamable(media: TelegramMediaFile) -> Bool {
return false
}
for attribute in media.attributes {
if case let .Video(_, _, flags, _, _) = attribute {
if case let .Video(_, _, flags, _, _, _) = attribute {
if flags.contains(.supportsStreaming) {
return true
}

View File

@ -413,6 +413,7 @@ public protocol PresentationGroupCall: AnyObject {
var members: Signal<PresentationGroupCallMembers?, NoError> { get }
var audioLevels: Signal<[(EnginePeer.Id, UInt32, Float, Bool)], NoError> { get }
var myAudioLevel: Signal<Float, NoError> { get }
var myAudioLevelAndSpeaking: Signal<(Float, Bool), NoError> { get }
var isMuted: Signal<Bool, NoError> { get }
var isNoiseSuppressionEnabled: Signal<Bool, NoError> { get }

View File

@ -10,6 +10,11 @@ import UniversalMediaPlayer
import AVFoundation
import RangeSet
public enum UniversalVideoContentVideoQuality: Equatable {
case auto
case quality(Int)
}
public protocol UniversalVideoContentNode: AnyObject {
var ready: Signal<Void, NoError> { get }
var status: Signal<MediaPlayerStatus, NoError> { get }
@ -29,6 +34,8 @@ public protocol UniversalVideoContentNode: AnyObject {
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool)
func setBaseRate(_ baseRate: Double)
func setVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality)
func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])?
func addPlaybackCompleted(_ f: @escaping () -> Void) -> Int
func removePlaybackCompleted(_ index: Int)
func fetchControl(_ control: UniversalVideoNodeFetchControl)
@ -329,6 +336,24 @@ public final class UniversalVideoNode: ASDisplayNode {
})
}
public func setVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality) {
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
if let contentNode = contentNode {
contentNode.setVideoQuality(videoQuality)
}
})
}
public func videoQualityState() -> (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])? {
var result: (current: Int, preferred: UniversalVideoContentVideoQuality, available: [Int])?
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
if let contentNode {
result = contentNode.videoQualityState()
}
})
return result
}
public func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd = .loopDisablingSound) {
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
if let contentNode = contentNode {

View File

@ -206,7 +206,7 @@ public final class AvatarVideoNode: ASDisplayNode {
self.backgroundNode.image = nil
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil, coverTime: nil)]))
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: []))
let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false, storeAfterDownload: nil)
if videoContent.id != self.videoContent?.id {
self.videoNode?.removeFromSupernode()

View File

@ -460,7 +460,7 @@ public final class ChatImportActivityScreen: ViewController {
if let path = getAppBundle().path(forResource: "BlankVideo", ofType: "m4v"), let size = fileSize(path) {
let decoration = ChatBubbleVideoDecoration(corners: ImageCorners(), nativeSize: CGSize(width: 100.0, height: 100.0), contentMode: .aspectFit, backgroundColor: .black)
let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil, coverTime: nil)])
let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: [])
let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil)

View File

@ -4749,7 +4749,7 @@ public final class ChatListSearchShimmerNode: ASDisplayNode {
return ListMessageItem(presentationData: ChatPresentationData(presentationData: presentationData), context: context, chatLocation: .peer(id: peer1.id), interaction: ListMessageItemInteraction.default, message: message._asMessage(), selection: hasSelection ? .selectable(selected: false) : .none, displayHeader: false, customHeader: nil, hintIsLink: true, isGlobalSearchResult: true)
case .files:
var media: [EngineMedia] = []
media.append(.file(TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: 0, attributes: [.FileName(fileName: "Text.txt")])))
media.append(.file(TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: 0, attributes: [.FileName(fileName: "Text.txt")], alternativeRepresentations: [])))
let message = EngineMessage(
stableId: 0,
stableVersion: 0,
@ -4780,7 +4780,7 @@ public final class ChatListSearchShimmerNode: ASDisplayNode {
return ListMessageItem(presentationData: ChatPresentationData(presentationData: presentationData), context: context, chatLocation: .peer(id: peer1.id), interaction: ListMessageItemInteraction.default, message: message._asMessage(), selection: hasSelection ? .selectable(selected: false) : .none, displayHeader: false, customHeader: nil, hintIsLink: false, isGlobalSearchResult: true)
case .music:
var media: [EngineMedia] = []
media.append(.file(TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: [.Audio(isVoice: false, duration: 0, title: nil, performer: nil, waveform: Data())])))
media.append(.file(TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: [.Audio(isVoice: false, duration: 0, title: nil, performer: nil, waveform: Data())], alternativeRepresentations: [])))
let message = EngineMessage(
stableId: 0,
stableVersion: 0,
@ -4811,7 +4811,7 @@ public final class ChatListSearchShimmerNode: ASDisplayNode {
return ListMessageItem(presentationData: ChatPresentationData(presentationData: presentationData), context: context, chatLocation: .peer(id: peer1.id), interaction: ListMessageItemInteraction.default, message: message._asMessage(), selection: hasSelection ? .selectable(selected: false) : .none, displayHeader: false, customHeader: nil, hintIsLink: false, isGlobalSearchResult: true)
case .voice:
var media: [EngineMedia] = []
media.append(.file(TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: [.Audio(isVoice: true, duration: 0, title: nil, performer: nil, waveform: Data())])))
media.append(.file(TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: [.Audio(isVoice: true, duration: 0, title: nil, performer: nil, waveform: Data())], alternativeRepresentations: [])))
let message = EngineMessage(
stableId: 0,
stableVersion: 0,

View File

@ -2584,7 +2584,7 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
case let .preview(dimensions, immediateThumbnailData, videoDuration):
if let immediateThumbnailData {
if let videoDuration {
let thumbnailMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: index), partialReference: nil, resource: EmptyMediaResource(), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Video(duration: Double(videoDuration), size: dimensions ?? PixelDimensions(width: 1, height: 1), flags: [], preloadSize: nil, coverTime: nil)])
let thumbnailMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: index), partialReference: nil, resource: EmptyMediaResource(), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Video(duration: Double(videoDuration), size: dimensions ?? PixelDimensions(width: 1, height: 1), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: [])
contentImageSpecs.append(ContentImageSpec(message: message, media: .file(thumbnailMedia), size: fitSize))
} else {
let thumbnailMedia = TelegramMediaImage(imageId: MediaId(namespace: 0, id: index), representations: [], immediateThumbnailData: immediateThumbnailData, reference: nil, partialReference: nil, flags: [])

View File

@ -246,7 +246,7 @@ public func chatListItemStrings(strings: PresentationStrings, nameDisplayOrder:
processed = true
break inner
}
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(.instantRoundVideo) {
messageText = strings.Message_VideoMessage
processed = true

View File

@ -795,6 +795,7 @@ final class ContextControllerExtractedPresentationNode: ASDisplayNode, ContextCo
if case .animateOut = stateTransition {
contentRect.origin.y = self.contentRectDebugNode.frame.maxY - contentRect.size.height
}
contentRect.size.height = 200.0
} else {
return
}

View File

@ -105,6 +105,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case disableCallV2(Bool)
case experimentalCallMute(Bool)
case liveStreamV2(Bool)
case dynamicStreaming(Bool)
case preferredVideoCodec(Int, String, String?, Bool)
case disableVideoAspectScaling(Bool)
case enableNetworkFramework(Bool)
@ -129,7 +130,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return DebugControllerSection.web.rawValue
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
return DebugControllerSection.experiments.rawValue
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .disableCallV2, .experimentalCallMute, .liveStreamV2:
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .disableCallV2, .experimentalCallMute, .liveStreamV2, .dynamicStreaming:
return DebugControllerSection.experiments.rawValue
case .logTranslationRecognition, .resetTranslationStates:
return DebugControllerSection.translation.rawValue
@ -248,8 +249,10 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return 52
case .liveStreamV2:
return 53
case .dynamicStreaming:
return 54
case let .preferredVideoCodec(index, _, _, _):
return 54 + index
return 55 + index
case .disableVideoAspectScaling:
return 100
case .enableNetworkFramework:
@ -338,7 +341,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(gzippedData.count), isSecretRelated: false)
context.account.postbox.mediaBox.storeResourceData(fileResource.id, data: gzippedData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
@ -418,7 +421,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(logData.count), isSecretRelated: false)
context.account.postbox.mediaBox.storeResourceData(fileResource.id, data: logData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(logData.count), attributes: [.FileName(fileName: "Log-iOS-Short.txt")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(logData.count), attributes: [.FileName(fileName: "Log-iOS-Short.txt")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
@ -504,7 +507,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(gzippedData.count), isSecretRelated: false)
context.account.postbox.mediaBox.storeResourceData(fileResource.id, data: gzippedData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
@ -588,7 +591,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(gzippedData.count), isSecretRelated: false)
context.account.postbox.mediaBox.storeResourceData(fileResource.id, data: gzippedData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
@ -673,7 +676,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(gzippedData.count), isSecretRelated: false)
context.account.postbox.mediaBox.storeResourceData(fileResource.id, data: gzippedData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
@ -726,7 +729,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
let messages = logs.map { (name, path) -> EnqueueMessage in
let id = Int64.random(in: Int64.min ... Int64.max)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: id), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: nil, attributes: [.FileName(fileName: name)])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: id), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: nil, attributes: [.FileName(fileName: name)], alternativeRepresentations: [])
return .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
}
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: messages).start()
@ -835,7 +838,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(gzippedData.count), isSecretRelated: false)
context.account.postbox.mediaBox.storeResourceData(fileResource.id, data: gzippedData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/zip", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-All.txt.zip")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/zip", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-All.txt.zip")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
@ -890,7 +893,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(allStatsData.count), isSecretRelated: false)
context.account.postbox.mediaBox.storeResourceData(fileResource.id, data: allStatsData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/zip", size: Int64(allStatsData.count), attributes: [.FileName(fileName: "StorageReport.txt")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/zip", size: Int64(allStatsData.count), attributes: [.FileName(fileName: "StorageReport.txt")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
@ -1348,6 +1351,16 @@ private enum DebugControllerEntry: ItemListNodeEntry {
})
}).start()
})
case let .dynamicStreaming(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Dynamic Streaming", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
settings.dynamicStreaming = value
return PreferencesEntry(settings)
})
}).start()
})
case let .preferredVideoCodec(_, title, value, isSelected):
return ItemListCheckboxItem(presentationData: presentationData, title: title, style: .right, checked: isSelected, zeroSeparatorInsets: false, sectionId: self.section, action: {
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
@ -1505,6 +1518,7 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
entries.append(.disableCallV2(experimentalSettings.disableCallV2))
entries.append(.experimentalCallMute(experimentalSettings.experimentalCallMute))
entries.append(.liveStreamV2(experimentalSettings.liveStreamV2))
entries.append(.dynamicStreaming(experimentalSettings.dynamicStreaming))
}
/*let codecs: [(String, String?)] = [
@ -1673,7 +1687,7 @@ public func triggerDebugSendLogsUI(context: AccountContext, additionalInfo: Stri
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(gzippedData.count), isSecretRelated: false)
context.account.postbox.mediaBox.storeResourceData(fileResource.id, data: gzippedData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(gzippedData.count), attributes: [.FileName(fileName: "Log-iOS-Full.txt.zip")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()

View File

@ -859,7 +859,7 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, ASScroll
} else if let media = media as? TelegramMediaFile, !media.isAnimated {
for attribute in media.attributes {
switch attribute {
case let .Video(_, dimensions, _, _, _):
case let .Video(_, dimensions, _, _, _, _):
isVideo = true
if dimensions.height > 0 {
if CGFloat(dimensions.width) / CGFloat(dimensions.height) > 1.33 {

View File

@ -245,7 +245,11 @@ public func galleryItemForEntry(
content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), loopVideo: true, enableSound: false, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file))
} else {
if true || (file.mimeType == "video/mpeg4" || file.mimeType == "video/mov" || file.mimeType == "video/mp4") {
content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), streamVideo: .conservative, loopVideo: loopVideos, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file))
if NativeVideoContent.isHLSVideo(file: file), context.sharedContext.immediateExperimentalUISettings.dynamicStreaming {
content = HLSVideoContent(id: .message(message.id, message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), streamVideo: streamVideos, loopVideo: loopVideos)
} else {
content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), streamVideo: .conservative, loopVideo: loopVideos, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file))
}
} else {
content = PlatformVideoContent(id: .message(message.id, message.stableId, file.fileId), userLocation: .peer(message.id.peerId), content: .file(.message(message: MessageReference(message), media: file)), streamVideo: streamVideos, loopVideo: loopVideos)
}

View File

@ -1096,6 +1096,8 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
var hasLinkedStickers = false
if let content = item.content as? NativeVideoContent {
hasLinkedStickers = content.fileReference.media.hasLinkedStickers
} else if let content = item.content as? HLSVideoContent {
hasLinkedStickers = content.fileReference.media.hasLinkedStickers
}
var disablePictureInPicture = false
@ -1241,7 +1243,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
}
if let file = file {
for attribute in file.attributes {
if case let .Video(duration, _, _, _, _) = attribute, duration >= 30 {
if case let .Video(duration, _, _, _, _, _) = attribute, duration >= 30 {
hintSeekable = true
break
}
@ -1532,6 +1534,8 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
if let _ = item.content as? NativeVideoContent {
self.playbackRate = item.playbackRate()
} else if let _ = item.content as? HLSVideoContent {
self.playbackRate = item.playbackRate()
} else if let _ = item.content as? WebEmbedVideoContent {
self.playbackRate = item.playbackRate()
}
@ -1602,6 +1606,8 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
if isLocal || isStreamable {
return true
}
} else if let item = self.item, let _ = item.content as? HLSVideoContent {
return true
} else if let item = self.item, let _ = item.content as? PlatformVideoContent {
return true
}
@ -1619,6 +1625,8 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
var isAnimated = false
if let item = self.item, let content = item.content as? NativeVideoContent {
isAnimated = content.fileReference.media.isAnimated
} else if let item = self.item, let content = item.content as? HLSVideoContent {
isAnimated = content.fileReference.media.isAnimated
}
self.hideStatusNodeUntilCentrality = false
@ -1712,6 +1720,11 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
if let time = item.timecode {
seek = .timecode(time)
}
} else if let content = item.content as? HLSVideoContent {
isAnimated = content.fileReference.media.isAnimated
if let time = item.timecode {
seek = .timecode(time)
}
} else if let _ = item.content as? WebEmbedVideoContent {
if let time = item.timecode {
seek = .timecode(time)
@ -1743,6 +1756,9 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
if !item.isSecret, let content = item.content as? NativeVideoContent, content.duration <= 30 {
return .loop
}
if !item.isSecret, let content = item.content as? HLSVideoContent, content.duration <= 30 {
return .loop
}
}
return .stop
}
@ -2700,6 +2716,35 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
items.append(.separator)
if let videoQualityState = strongSelf.videoNode?.videoQualityState(), !videoQualityState.available.isEmpty {
//TODO:localize
let qualityText: String
switch videoQualityState.preferred {
case .auto:
if videoQualityState.current != 0 {
qualityText = "Auto (\(videoQualityState.current)p)"
} else {
qualityText = "Auto"
}
case let .quality(value):
qualityText = "\(value)p"
}
items.append(.action(ContextMenuActionItem(text: "Video Quality", textLayout: .secondLineWithValue(qualityText), icon: { _ in
return nil
}, action: { c, _ in
guard let strongSelf = self else {
c?.dismiss(completion: nil)
return
}
c?.setItems(.single(ContextController.Items(content: .list(strongSelf.contextMenuVideoQualityItems(dismiss: dismiss)))), minHeight: nil, animated: true)
})))
items.append(.separator)
}
if let (message, _, _) = strongSelf.contentInfo() {
let context = strongSelf.context
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.SharedMedia_ViewInChat, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/GoToMessage"), color: theme.contextMenu.primaryColor)}, action: { [weak self] _, f in
@ -2881,6 +2926,80 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
}
}
private func contextMenuVideoQualityItems(dismiss: @escaping () -> Void) -> [ContextMenuItem] {
guard let videoNode = self.videoNode else {
return []
}
guard let qualityState = videoNode.videoQualityState(), !qualityState.available.isEmpty else {
return []
}
var items: [ContextMenuItem] = []
items.append(.action(ContextMenuActionItem(text: self.presentationData.strings.Common_Back, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Back"), color: theme.actionSheet.primaryTextColor)
}, iconPosition: .left, action: { [weak self] c, _ in
guard let self else {
c?.dismiss(completion: nil)
return
}
c?.setItems(self.contextMenuMainItems(dismiss: dismiss) |> map { ContextController.Items(content: .list($0)) }, minHeight: nil, animated: true)
})))
do {
let isSelected = qualityState.preferred == .auto
let qualityText: String
if qualityState.current != 0 {
qualityText = "Auto (\(qualityState.current)p)"
} else {
qualityText = "Auto"
}
items.append(.action(ContextMenuActionItem(text: qualityText, icon: { _ in
if isSelected {
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: .white)
} else {
return nil
}
}, action: { [weak self] _, f in
f(.default)
guard let self, let videoNode = self.videoNode else {
return
}
videoNode.setVideoQuality(.auto)
/*if let controller = strongSelf.galleryController() as? GalleryController {
controller.updateSharedPlaybackRate(rate)
}*/
})))
}
for quality in qualityState.available {
//TODO:release
let isSelected = qualityState.preferred == .quality(quality)
items.append(.action(ContextMenuActionItem(text: "\(quality)p", icon: { _ in
if isSelected {
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: .white)
} else {
return nil
}
}, action: { [weak self] _, f in
f(.default)
guard let self, let videoNode = self.videoNode else {
return
}
videoNode.setVideoQuality(.quality(quality))
/*if let controller = strongSelf.galleryController() as? GalleryController {
controller.updateSharedPlaybackRate(rate)
}*/
})))
}
return items
}
private var isAirPlayActive = false
private var externalVideoPlayer: ExternalVideoPlayer?
func beginAirPlaySetup() {

View File

@ -590,7 +590,7 @@ extension InAppPurchaseManager: SKPaymentTransactionObserver {
let fileResource = LocalFileMediaResource(fileId: id, size: Int64(receiptData.count), isSecretRelated: false)
self.engine.account.postbox.mediaBox.storeResourceData(fileResource.id, data: receiptData)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(receiptData.count), attributes: [.FileName(fileName: "Receipt.dat")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: fileResource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: Int64(receiptData.count), attributes: [.FileName(fileName: "Receipt.dat")], alternativeRepresentations: [])
let message: EnqueueMessage = .message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: self.engine.account, peerId: self.engine.account.peerId, messages: [message]).start()

View File

@ -53,7 +53,7 @@ final class InstantPageMediaPlaylistItem: SharedMediaPlaylistItem {
} else {
return SharedMediaPlaybackData(type: .music, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false))
}
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(.instantRoundVideo) {
return SharedMediaPlaybackData(type: .instantVideo, source: .telegramFile(reference: .webPage(webPage: WebpageReference(self.webPage), media: file), isCopyProtected: false, isViewOnce: false))
} else {
@ -99,7 +99,7 @@ final class InstantPageMediaPlaylistItem: SharedMediaPlaylistItem {
return SharedMediaPlaybackDisplayData.music(title: updatedTitle, performer: updatedPerformer, albumArt: albumArt, long: false, caption: nil)
}
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(.instantRoundVideo) {
return SharedMediaPlaybackDisplayData.instantVideo(author: nil, peer: nil, timestamp: 0)
} else {

View File

@ -294,11 +294,11 @@ public func legacyEnqueueGifMessage(account: Account, data: Data, correlationId:
let finalDimensions = TGMediaVideoConverter.dimensions(for: dimensions, adjustments: nil, preset: TGMediaVideoConversionPresetAnimation)
var fileAttributes: [TelegramMediaFileAttribute] = []
fileAttributes.append(.Video(duration: 0.0, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil))
fileAttributes.append(.Video(duration: 0.0, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil, videoCodec: nil))
fileAttributes.append(.FileName(fileName: fileName))
fileAttributes.append(.Animated)
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: fileAttributes)
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: fileAttributes, alternativeRepresentations: [])
subscriber.putNext(.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: media), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: correlationId, bubbleUpEmojiOrStickersets: []))
subscriber.putCompletion()
} else {
@ -336,11 +336,11 @@ public func legacyEnqueueVideoMessage(account: Account, data: Data, correlationI
let finalDimensions = TGMediaVideoConverter.dimensions(for: dimensions, adjustments: nil, preset: TGMediaVideoConversionPresetAnimation)
var fileAttributes: [TelegramMediaFileAttribute] = []
fileAttributes.append(.Video(duration: 0.0, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil))
fileAttributes.append(.Video(duration: 0.0, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil, videoCodec: nil))
fileAttributes.append(.FileName(fileName: fileName))
fileAttributes.append(.Animated)
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: fileAttributes)
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: fileAttributes, alternativeRepresentations: [])
subscriber.putNext(.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: media), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: correlationId, bubbleUpEmojiOrStickersets: []))
subscriber.putCompletion()
} else {
@ -506,7 +506,7 @@ public func legacyAssetPickerEnqueueMessages(context: AccountContext, account: A
media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: format == .jxl ? "image/jxl" : "image/jpeg", size: nil, attributes: [
.FileName(fileName: format == .jxl ? "image\(sizeSide)-q\(quality).jxl" : "image\(sizeSide)-q\(quality).jpg"),
.ImageSize(size: PixelDimensions(scaledSize))
])
], alternativeRepresentations: [])
var attributes: [MessageAttribute] = []
if let timer = item.timer, timer > 0 && (timer <= 60 || timer == viewOnceTimeout) {
@ -651,7 +651,7 @@ public func legacyAssetPickerEnqueueMessages(context: AccountContext, account: A
var randomId: Int64 = 0
arc4random_buf(&randomId, 8)
let resource = LocalFileReferenceMediaResource(localFilePath: path, randomId: randomId)
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: nil, attributes: [.FileName(fileName: name)])
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: nil, attributes: [.FileName(fileName: name)], alternativeRepresentations: [])
var attributes: [MessageAttribute] = []
let text = trimChatInputText(convertMarkdownToAttributes(caption ?? NSAttributedString()))
@ -704,7 +704,7 @@ public func legacyAssetPickerEnqueueMessages(context: AccountContext, account: A
var randomId: Int64 = 0
arc4random_buf(&randomId, 8)
let resource = PhotoLibraryMediaResource(localIdentifier: asset.localIdentifier, uniqueId: Int64.random(in: Int64.min ... Int64.max))
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: nil, attributes: [.FileName(fileName: name)])
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: nil, attributes: [.FileName(fileName: name)], alternativeRepresentations: [])
var attributes: [MessageAttribute] = []
let text = trimChatInputText(convertMarkdownToAttributes(caption ?? NSAttributedString()))
@ -857,7 +857,7 @@ public func legacyAssetPickerEnqueueMessages(context: AccountContext, account: A
fileAttributes.append(.Animated)
}
if !asFile {
fileAttributes.append(.Video(duration: finalDuration, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil))
fileAttributes.append(.Video(duration: finalDuration, size: PixelDimensions(finalDimensions), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil, videoCodec: nil))
if let adjustments = adjustments {
if adjustments.sendAsGif {
fileAttributes.append(.Animated)
@ -891,7 +891,7 @@ public func legacyAssetPickerEnqueueMessages(context: AccountContext, account: A
fileAttributes.append(.HasLinkedStickers)
}
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: fileAttributes)
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: fileAttributes, alternativeRepresentations: [])
if let timer = item.timer, timer > 0 && (timer <= 60 || timer == viewOnceTimeout) {
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: Int32(timer), countdownBeginTime: nil))

View File

@ -187,7 +187,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
let subject: ShareControllerSubject
var actionCompletionText: String?
if let video = entry.videoRepresentations.last, let peerReference = PeerReference(peer._asPeer()) {
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil)]))
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: []))
subject = .media(videoFileReference.abstract)
actionCompletionText = strongSelf.presentationData.strings.Gallery_VideoSaved
} else {
@ -279,7 +279,7 @@ final class PeerAvatarImageGalleryItemNode: ZoomableContentGalleryItemNode {
if let video = entry.videoRepresentations.last, let peerReference = PeerReference(self.peer._asPeer()) {
if video != previousVideoRepresentations?.last {
let mediaManager = self.context.sharedContext.mediaManager
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: entry.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil)]))
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: entry.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: []))
let videoContent = NativeVideoContent(id: .profileVideo(id, category), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: true, useLargeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil)
let videoNode = UniversalVideoNode(postbox: self.context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: GalleryVideoDecoration(), content: videoContent, priority: .overlay)
videoNode.isUserInteractionEnabled = false

View File

@ -515,7 +515,7 @@ public final class PeerInfoAvatarListItemNode: ASDisplayNode {
self.isReady.set(.single(true))
}
} else if let video = videoRepresentations.last, let peerReference = PeerReference(self.peer._asPeer()) {
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil)]))
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.representation.resource, previewRepresentations: representations.map { $0.representation }, videoThumbnails: [], immediateThumbnailData: immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.representation.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: []))
let videoContent = NativeVideoContent(id: .profileVideo(id, nil), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.representation.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: fullSizeOnly, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.representation.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil)
if videoContent.id != self.videoContent?.id {

View File

@ -101,6 +101,18 @@ public func areMediaArraysEqual(_ lhs: [Media], _ rhs: [Media]) -> Bool {
return true
}
public func areMediaArraysSemanticallyEqual(_ lhs: [Media], _ rhs: [Media]) -> Bool {
if lhs.count != rhs.count {
return false
}
for i in 0 ..< lhs.count {
if !lhs[i].isSemanticallyEqual(to: rhs[i]) {
return false
}
}
return true
}
public func areMediaDictionariesEqual(_ lhs: [MediaId: Media], _ rhs: [MediaId: Media]) -> Bool {
if lhs.count != rhs.count {
return false

View File

@ -645,7 +645,8 @@ private final class DemoSheetContent: CombinedComponent {
immediateThumbnailData: file.immediateThumbnailData,
mimeType: file.mimeType,
size: file.size,
attributes: file.attributes
attributes: file.attributes,
alternativeRepresentations: file.alternativeRepresentations
)
}
default:

View File

@ -158,7 +158,8 @@ public class PremiumLimitsListScreen: ViewController {
immediateThumbnailData: file.immediateThumbnailData,
mimeType: file.mimeType,
size: file.size,
attributes: file.attributes
attributes: file.attributes,
alternativeRepresentations: file.alternativeRepresentations
)
}
default:

View File

@ -177,7 +177,7 @@ private final class BubbleSettingsControllerNode: ASDisplayNode, ASScrollViewDel
let waveformBase64 = "DAAOAAkACQAGAAwADwAMABAADQAPABsAGAALAA0AGAAfABoAHgATABgAGQAYABQADAAVABEAHwANAA0ACQAWABkACQAOAAwACQAfAAAAGQAVAAAAEwATAAAACAAfAAAAHAAAABwAHwAAABcAGQAAABQADgAAABQAHwAAAB8AHwAAAAwADwAAAB8AEwAAABoAFwAAAB8AFAAAAAAAHwAAAAAAHgAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAAAA="
let voiceAttributes: [TelegramMediaFileAttribute] = [.Audio(isVoice: true, duration: 23, title: nil, performer: nil, waveform: Data(base64Encoded: waveformBase64)!)]
let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes)
let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes, alternativeRepresentations: [])
let message3 = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [voiceMedia], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false))

View File

@ -450,7 +450,7 @@ private final class TextSizeSelectionControllerNode: ASDisplayNode, ASScrollView
let waveformBase64 = "DAAOAAkACQAGAAwADwAMABAADQAPABsAGAALAA0AGAAfABoAHgATABgAGQAYABQADAAVABEAHwANAA0ACQAWABkACQAOAAwACQAfAAAAGQAVAAAAEwATAAAACAAfAAAAHAAAABwAHwAAABcAGQAAABQADgAAABQAHwAAAB8AHwAAAAwADwAAAB8AEwAAABoAFwAAAB8AFAAAAAAAHwAAAAAAHgAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAAAA="
let voiceAttributes: [TelegramMediaFileAttribute] = [.Audio(isVoice: true, duration: 23, title: nil, performer: nil, waveform: Data(base64Encoded: waveformBase64)!)]
let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes)
let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes, alternativeRepresentations: [])
let message3 = Message(stableId: 1, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 1), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66001, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [voiceMedia], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
items.append(self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message3], theme: self.presentationData.theme, strings: self.presentationData.strings, wallpaper: self.presentationData.chatWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local), tapMessage: nil, clickThroughMessage: nil, backgroundNode: self.chatBackgroundNode, availableReactions: nil, accountPeer: nil, isCentered: false, isPreview: true, isStandalone: false))

View File

@ -538,7 +538,7 @@ public func editThemeController(context: AccountContext, mode: EditThemeControll
let _ = (combineLatest(queue: Queue.mainQueue(), previewThemePromise.get(), settingsPromise.get())
|> take(1)).start(next: { previewTheme, settings in
let saveThemeTemplateFile: (String, LocalFileMediaResource, @escaping () -> Void) -> Void = { title, resource, completion in
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: resource.fileId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/x-tgtheme-ios", size: nil, attributes: [.FileName(fileName: "\(title).tgios-theme")])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: resource.fileId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/x-tgtheme-ios", size: nil, attributes: [.FileName(fileName: "\(title).tgios-theme")], alternativeRepresentations: [])
let message = EnqueueMessage.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
let _ = enqueueMessages(account: context.account, peerId: context.account.peerId, messages: [message]).start()

View File

@ -615,7 +615,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, ASScrollViewDelegate {
let waveformBase64 = "DAAOAAkACQAGAAwADwAMABAADQAPABsAGAALAA0AGAAfABoAHgATABgAGQAYABQADAAVABEAHwANAA0ACQAWABkACQAOAAwACQAfAAAAGQAVAAAAEwATAAAACAAfAAAAHAAAABwAHwAAABcAGQAAABQADgAAABQAHwAAAB8AHwAAAAwADwAAAB8AEwAAABoAFwAAAB8AFAAAAAAAHwAAAAAAHgAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAHwAAAAAAAAA="
let voiceAttributes: [TelegramMediaFileAttribute] = [.Audio(isVoice: true, duration: 23, title: nil, performer: nil, waveform: Data(base64Encoded: waveformBase64)!)]
let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes)
let voiceMedia = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: LocalFileMediaResource(fileId: 0), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: 0, attributes: voiceAttributes, alternativeRepresentations: [])
let message6 = Message(stableId: 6, stableVersion: 0, id: MessageId(peerId: peerId, namespace: 0, id: 6), globallyUniqueId: nil, groupingKey: nil, groupInfo: nil, threadId: nil, timestamp: 66005, flags: [.Incoming], tags: [], globalTags: [], localTags: [], customTags: [], forwardInfo: nil, author: peers[peerId], text: "", attributes: [], media: [voiceMedia], peers: peers, associatedMessages: messages, associatedMessageIds: [], associatedMedia: [:], associatedThreadInfo: nil, associatedStories: [:])
sampleMessages.append(message6)

View File

@ -279,7 +279,7 @@ public final class ShareProlongedLoadingContainerNode: ASDisplayNode, ShareConte
if let postbox, let mediaManager = environment.mediaManager, let path = getAppBundle().path(forResource: "BlankVideo", ofType: "m4v"), let size = fileSize(path) {
let decoration = ChatBubbleVideoDecoration(corners: ImageCorners(), nativeSize: CGSize(width: 100.0, height: 100.0), contentMode: .aspectFit, backgroundColor: .black)
let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil, coverTime: nil)])
let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: [])
let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil)

View File

@ -144,7 +144,7 @@ private func preparedShareItem(postbox: Postbox, network: Network, to peerId: Pe
let estimatedSize = TGMediaVideoConverter.estimatedSize(for: preset, duration: finalDuration, hasAudio: true)
let resource = LocalFileVideoMediaResource(randomId: Int64.random(in: Int64.min ... Int64.max), path: asset.url.path, adjustments: resourceAdjustments)
return standaloneUploadedFile(postbox: postbox, network: network, peerId: peerId, text: "", source: .resource(.standalone(resource: resource)), mimeType: "video/mp4", attributes: [.Video(duration: finalDuration, size: PixelDimensions(width: Int32(finalDimensions.width), height: Int32(finalDimensions.height)), flags: flags, preloadSize: nil, coverTime: nil)], hintFileIsLarge: estimatedSize > 10 * 1024 * 1024)
return standaloneUploadedFile(postbox: postbox, network: network, peerId: peerId, text: "", source: .resource(.standalone(resource: resource)), mimeType: "video/mp4", attributes: [.Video(duration: finalDuration, size: PixelDimensions(width: Int32(finalDimensions.width), height: Int32(finalDimensions.height)), flags: flags, preloadSize: nil, coverTime: nil, videoCodec: nil)], hintFileIsLarge: estimatedSize > 10 * 1024 * 1024)
|> mapError { _ -> PreparedShareItemError in
return .generic
}
@ -210,7 +210,7 @@ private func preparedShareItem(postbox: Postbox, network: Network, to peerId: Pe
let mimeType: String
if converted {
mimeType = "video/mp4"
attributes = [.Video(duration: duration, size: PixelDimensions(width: Int32(dimensions.width), height: Int32(dimensions.height)), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil), .Animated, .FileName(fileName: "animation.mp4")]
attributes = [.Video(duration: duration, size: PixelDimensions(width: Int32(dimensions.width), height: Int32(dimensions.height)), flags: [.supportsStreaming], preloadSize: nil, coverTime: nil, videoCodec: nil), .Animated, .FileName(fileName: "animation.mp4")]
} else {
mimeType = "animation/gif"
attributes = [.ImageSize(size: PixelDimensions(width: Int32(dimensions.width), height: Int32(dimensions.height))), .Animated, .FileName(fileName: fileName ?? "animation.gif")]

View File

@ -245,7 +245,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1744710921] = { return Api.DocumentAttribute.parse_documentAttributeHasStickers($0) }
dict[1815593308] = { return Api.DocumentAttribute.parse_documentAttributeImageSize($0) }
dict[1662637586] = { return Api.DocumentAttribute.parse_documentAttributeSticker($0) }
dict[389652397] = { return Api.DocumentAttribute.parse_documentAttributeVideo($0) }
dict[1137015880] = { return Api.DocumentAttribute.parse_documentAttributeVideo($0) }
dict[761606687] = { return Api.DraftMessage.parse_draftMessage($0) }
dict[453805082] = { return Api.DraftMessage.parse_draftMessageEmpty($0) }
dict[-1764723459] = { return Api.EmailVerification.parse_emailVerificationApple($0) }
@ -500,6 +500,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1560655744] = { return Api.KeyboardButton.parse_keyboardButton($0) }
dict[-1344716869] = { return Api.KeyboardButton.parse_keyboardButtonBuy($0) }
dict[901503851] = { return Api.KeyboardButton.parse_keyboardButtonCallback($0) }
dict[1976723854] = { return Api.KeyboardButton.parse_keyboardButtonCopy($0) }
dict[1358175439] = { return Api.KeyboardButton.parse_keyboardButtonGame($0) }
dict[-59151553] = { return Api.KeyboardButton.parse_keyboardButtonRequestGeoLocation($0) }
dict[1406648280] = { return Api.KeyboardButton.parse_keyboardButtonRequestPeer($0) }
@ -603,7 +604,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[1313731771] = { return Api.MessageFwdHeader.parse_messageFwdHeader($0) }
dict[1882335561] = { return Api.MessageMedia.parse_messageMediaContact($0) }
dict[1065280907] = { return Api.MessageMedia.parse_messageMediaDice($0) }
dict[1291114285] = { return Api.MessageMedia.parse_messageMediaDocument($0) }
dict[-581497899] = { return Api.MessageMedia.parse_messageMediaDocument($0) }
dict[1038967584] = { return Api.MessageMedia.parse_messageMediaEmpty($0) }
dict[-38694904] = { return Api.MessageMedia.parse_messageMediaGame($0) }
dict[1457575028] = { return Api.MessageMedia.parse_messageMediaGeo($0) }

View File

@ -674,6 +674,7 @@ public extension Api {
case keyboardButton(text: String)
case keyboardButtonBuy(text: String)
case keyboardButtonCallback(flags: Int32, text: String, data: Buffer)
case keyboardButtonCopy(text: String, copyText: String)
case keyboardButtonGame(text: String)
case keyboardButtonRequestGeoLocation(text: String)
case keyboardButtonRequestPeer(text: String, buttonId: Int32, peerType: Api.RequestPeerType, maxQuantity: Int32)
@ -735,6 +736,13 @@ public extension Api {
serializeString(text, buffer: buffer, boxed: false)
serializeBytes(data, buffer: buffer, boxed: false)
break
case .keyboardButtonCopy(let text, let copyText):
if boxed {
buffer.appendInt32(1976723854)
}
serializeString(text, buffer: buffer, boxed: false)
serializeString(copyText, buffer: buffer, boxed: false)
break
case .keyboardButtonGame(let text):
if boxed {
buffer.appendInt32(1358175439)
@ -838,6 +846,8 @@ public extension Api {
return ("keyboardButtonBuy", [("text", text as Any)])
case .keyboardButtonCallback(let flags, let text, let data):
return ("keyboardButtonCallback", [("flags", flags as Any), ("text", text as Any), ("data", data as Any)])
case .keyboardButtonCopy(let text, let copyText):
return ("keyboardButtonCopy", [("text", text as Any), ("copyText", copyText as Any)])
case .keyboardButtonGame(let text):
return ("keyboardButtonGame", [("text", text as Any)])
case .keyboardButtonRequestGeoLocation(let text):
@ -968,6 +978,20 @@ public extension Api {
return nil
}
}
public static func parse_keyboardButtonCopy(_ reader: BufferReader) -> KeyboardButton? {
var _1: String?
_1 = parseString(reader)
var _2: String?
_2 = parseString(reader)
let _c1 = _1 != nil
let _c2 = _2 != nil
if _c1 && _c2 {
return Api.KeyboardButton.keyboardButtonCopy(text: _1!, copyText: _2!)
}
else {
return nil
}
}
public static func parse_keyboardButtonGame(_ reader: BufferReader) -> KeyboardButton? {
var _1: String?
_1 = parseString(reader)

View File

@ -710,7 +710,7 @@ public extension Api {
indirect enum MessageMedia: TypeConstructorDescription {
case messageMediaContact(phoneNumber: String, firstName: String, lastName: String, vcard: String, userId: Int64)
case messageMediaDice(value: Int32, emoticon: String)
case messageMediaDocument(flags: Int32, document: Api.Document?, altDocument: Api.Document?, ttlSeconds: Int32?)
case messageMediaDocument(flags: Int32, document: Api.Document?, altDocuments: [Api.Document]?, ttlSeconds: Int32?)
case messageMediaEmpty
case messageMediaGame(game: Api.Game)
case messageMediaGeo(geo: Api.GeoPoint)
@ -745,13 +745,17 @@ public extension Api {
serializeInt32(value, buffer: buffer, boxed: false)
serializeString(emoticon, buffer: buffer, boxed: false)
break
case .messageMediaDocument(let flags, let document, let altDocument, let ttlSeconds):
case .messageMediaDocument(let flags, let document, let altDocuments, let ttlSeconds):
if boxed {
buffer.appendInt32(1291114285)
buffer.appendInt32(-581497899)
}
serializeInt32(flags, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 0) != 0 {document!.serialize(buffer, true)}
if Int(flags) & Int(1 << 5) != 0 {altDocument!.serialize(buffer, true)}
if Int(flags) & Int(1 << 5) != 0 {buffer.appendInt32(481674261)
buffer.appendInt32(Int32(altDocuments!.count))
for item in altDocuments! {
item.serialize(buffer, true)
}}
if Int(flags) & Int(1 << 2) != 0 {serializeInt32(ttlSeconds!, buffer: buffer, boxed: false)}
break
case .messageMediaEmpty:
@ -905,8 +909,8 @@ public extension Api {
return ("messageMediaContact", [("phoneNumber", phoneNumber as Any), ("firstName", firstName as Any), ("lastName", lastName as Any), ("vcard", vcard as Any), ("userId", userId as Any)])
case .messageMediaDice(let value, let emoticon):
return ("messageMediaDice", [("value", value as Any), ("emoticon", emoticon as Any)])
case .messageMediaDocument(let flags, let document, let altDocument, let ttlSeconds):
return ("messageMediaDocument", [("flags", flags as Any), ("document", document as Any), ("altDocument", altDocument as Any), ("ttlSeconds", ttlSeconds as Any)])
case .messageMediaDocument(let flags, let document, let altDocuments, let ttlSeconds):
return ("messageMediaDocument", [("flags", flags as Any), ("document", document as Any), ("altDocuments", altDocuments as Any), ("ttlSeconds", ttlSeconds as Any)])
case .messageMediaEmpty:
return ("messageMediaEmpty", [])
case .messageMediaGame(let game):
@ -982,9 +986,9 @@ public extension Api {
if Int(_1!) & Int(1 << 0) != 0 {if let signature = reader.readInt32() {
_2 = Api.parse(reader, signature: signature) as? Api.Document
} }
var _3: Api.Document?
if Int(_1!) & Int(1 << 5) != 0 {if let signature = reader.readInt32() {
_3 = Api.parse(reader, signature: signature) as? Api.Document
var _3: [Api.Document]?
if Int(_1!) & Int(1 << 5) != 0 {if let _ = reader.readInt32() {
_3 = Api.parseVector(reader, elementSignature: 0, elementType: Api.Document.self)
} }
var _4: Int32?
if Int(_1!) & Int(1 << 2) != 0 {_4 = reader.readInt32() }
@ -993,7 +997,7 @@ public extension Api {
let _c3 = (Int(_1!) & Int(1 << 5) == 0) || _3 != nil
let _c4 = (Int(_1!) & Int(1 << 2) == 0) || _4 != nil
if _c1 && _c2 && _c3 && _c4 {
return Api.MessageMedia.messageMediaDocument(flags: _1!, document: _2, altDocument: _3, ttlSeconds: _4)
return Api.MessageMedia.messageMediaDocument(flags: _1!, document: _2, altDocuments: _3, ttlSeconds: _4)
}
else {
return nil

View File

@ -2608,12 +2608,13 @@ public extension Api.functions.channels {
}
}
public extension Api.functions.channels {
static func clickSponsoredMessage(channel: Api.InputChannel, randomId: Buffer) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Bool>) {
static func clickSponsoredMessage(flags: Int32, channel: Api.InputChannel, randomId: Buffer) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Bool>) {
let buffer = Buffer()
buffer.appendInt32(414170259)
buffer.appendInt32(21257589)
serializeInt32(flags, buffer: buffer, boxed: false)
channel.serialize(buffer, true)
serializeBytes(randomId, buffer: buffer, boxed: false)
return (FunctionDescription(name: "channels.clickSponsoredMessage", parameters: [("channel", String(describing: channel)), ("randomId", String(describing: randomId))]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Bool? in
return (FunctionDescription(name: "channels.clickSponsoredMessage", parameters: [("flags", String(describing: flags)), ("channel", String(describing: channel)), ("randomId", String(describing: randomId))]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Bool? in
let reader = BufferReader(buffer)
var result: Api.Bool?
if let signature = reader.readInt32() {

View File

@ -1473,7 +1473,7 @@ public extension Api {
case documentAttributeHasStickers
case documentAttributeImageSize(w: Int32, h: Int32)
case documentAttributeSticker(flags: Int32, alt: String, stickerset: Api.InputStickerSet, maskCoords: Api.MaskCoords?)
case documentAttributeVideo(flags: Int32, duration: Double, w: Int32, h: Int32, preloadPrefixSize: Int32?, videoStartTs: Double?)
case documentAttributeVideo(flags: Int32, duration: Double, w: Int32, h: Int32, preloadPrefixSize: Int32?, videoStartTs: Double?, videoCodec: String?)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
@ -1529,9 +1529,9 @@ public extension Api {
stickerset.serialize(buffer, true)
if Int(flags) & Int(1 << 0) != 0 {maskCoords!.serialize(buffer, true)}
break
case .documentAttributeVideo(let flags, let duration, let w, let h, let preloadPrefixSize, let videoStartTs):
case .documentAttributeVideo(let flags, let duration, let w, let h, let preloadPrefixSize, let videoStartTs, let videoCodec):
if boxed {
buffer.appendInt32(389652397)
buffer.appendInt32(1137015880)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeDouble(duration, buffer: buffer, boxed: false)
@ -1539,6 +1539,7 @@ public extension Api {
serializeInt32(h, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 2) != 0 {serializeInt32(preloadPrefixSize!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 4) != 0 {serializeDouble(videoStartTs!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 5) != 0 {serializeString(videoCodec!, buffer: buffer, boxed: false)}
break
}
}
@ -1559,8 +1560,8 @@ public extension Api {
return ("documentAttributeImageSize", [("w", w as Any), ("h", h as Any)])
case .documentAttributeSticker(let flags, let alt, let stickerset, let maskCoords):
return ("documentAttributeSticker", [("flags", flags as Any), ("alt", alt as Any), ("stickerset", stickerset as Any), ("maskCoords", maskCoords as Any)])
case .documentAttributeVideo(let flags, let duration, let w, let h, let preloadPrefixSize, let videoStartTs):
return ("documentAttributeVideo", [("flags", flags as Any), ("duration", duration as Any), ("w", w as Any), ("h", h as Any), ("preloadPrefixSize", preloadPrefixSize as Any), ("videoStartTs", videoStartTs as Any)])
case .documentAttributeVideo(let flags, let duration, let w, let h, let preloadPrefixSize, let videoStartTs, let videoCodec):
return ("documentAttributeVideo", [("flags", flags as Any), ("duration", duration as Any), ("w", w as Any), ("h", h as Any), ("preloadPrefixSize", preloadPrefixSize as Any), ("videoStartTs", videoStartTs as Any), ("videoCodec", videoCodec as Any)])
}
}
@ -1674,14 +1675,17 @@ public extension Api {
if Int(_1!) & Int(1 << 2) != 0 {_5 = reader.readInt32() }
var _6: Double?
if Int(_1!) & Int(1 << 4) != 0 {_6 = reader.readDouble() }
var _7: String?
if Int(_1!) & Int(1 << 5) != 0 {_7 = parseString(reader) }
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
let _c4 = _4 != nil
let _c5 = (Int(_1!) & Int(1 << 2) == 0) || _5 != nil
let _c6 = (Int(_1!) & Int(1 << 4) == 0) || _6 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 {
return Api.DocumentAttribute.documentAttributeVideo(flags: _1!, duration: _2!, w: _3!, h: _4!, preloadPrefixSize: _5, videoStartTs: _6)
let _c7 = (Int(_1!) & Int(1 << 5) == 0) || _7 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 {
return Api.DocumentAttribute.documentAttributeVideo(flags: _1!, duration: _2!, w: _3!, h: _4!, preloadPrefixSize: _5, videoStartTs: _6, videoCodec: _7)
}
else {
return nil

View File

@ -291,7 +291,7 @@ func rateCallAndSendLogs(engine: TelegramEngine, callId: CallId, starsCount: Int
let id = Int64.random(in: Int64.min ... Int64.max)
let name = "\(callId.id)_\(callId.accessHash).log.json"
let path = callLogsPath(account: engine.account) + "/" + name
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: id), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: nil, attributes: [.FileName(fileName: name)])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: id), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: nil, attributes: [.FileName(fileName: name)], alternativeRepresentations: [])
let message = EnqueueMessage.message(text: comment, attributes: [], inlineStickers: [:], mediaReference: .standalone(media: file), threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])
return rate
|> then(enqueueMessages(account: engine.account, peerId: peerId, messages: [message])

View File

@ -730,6 +730,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
public var myAudioLevel: Signal<Float, NoError> {
return self.myAudioLevelPipe.signal()
}
private let myAudioLevelAndSpeakingPipe = ValuePipe<(Float, Bool)>()
public var myAudioLevelAndSpeaking: Signal<(Float, Bool), NoError> {
return self.myAudioLevelAndSpeakingPipe.signal()
}
private var myAudioLevelDisposable = MetaDisposable()
private var audioSessionControl: ManagedAudioSessionControl?
@ -1957,6 +1961,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
let mappedLevel = myLevel * 1.5
strongSelf.myAudioLevelPipe.putNext(mappedLevel)
strongSelf.myAudioLevelAndSpeakingPipe.putNext((mappedLevel, myLevelHasVoice))
strongSelf.processMyAudioLevel(level: mappedLevel, hasVoice: myLevelHasVoice)
strongSelf.isSpeakingPromise.set(orignalMyLevelHasVoice)

View File

@ -201,7 +201,7 @@ final class VideoChatParticipantThumbnailComponent: Component {
text: .plain(NSAttributedString(string: EnginePeer(component.participant.peer).compactDisplayTitle, font: Font.semibold(13.0), textColor: .white))
)),
environment: {},
containerSize: CGSize(width: availableSize.width - 6.0 * 2.0 - 8.0, height: 100.0)
containerSize: CGSize(width: availableSize.width - 6.0 * 2.0 - 12.0, height: 100.0)
)
let titleFrame = CGRect(origin: CGPoint(x: 6.0, y: availableSize.height - 6.0 - titleSize.height), size: titleSize)
if let titleView = self.title.view {

View File

@ -134,17 +134,20 @@ private final class BlobView: UIView {
final class VideoChatParticipantAvatarComponent: Component {
let call: PresentationGroupCall
let peer: EnginePeer
let myPeerId: EnginePeer.Id
let isSpeaking: Bool
let theme: PresentationTheme
init(
call: PresentationGroupCall,
peer: EnginePeer,
myPeerId: EnginePeer.Id,
isSpeaking: Bool,
theme: PresentationTheme
) {
self.call = call
self.peer = peer
self.myPeerId = myPeerId
self.isSpeaking = isSpeaking
self.theme = theme
}
@ -159,6 +162,9 @@ final class VideoChatParticipantAvatarComponent: Component {
if lhs.isSpeaking != rhs.isSpeaking {
return false
}
if lhs.myPeerId != rhs.myPeerId {
return false
}
if lhs.theme !== rhs.theme {
return false
}
@ -175,6 +181,7 @@ final class VideoChatParticipantAvatarComponent: Component {
private var wasSpeaking: Bool?
private var noAudioTimer: Foundation.Timer?
private var lastAudioLevelTimestamp: Double = 0.0
override init(frame: CGRect) {
super.init(frame: frame)
@ -189,6 +196,31 @@ final class VideoChatParticipantAvatarComponent: Component {
self.noAudioTimer?.invalidate()
}
private func checkNoAudio() {
let timestamp = CFAbsoluteTimeGetCurrent()
if self.lastAudioLevelTimestamp + 1.0 < timestamp {
self.noAudioTimer?.invalidate()
self.noAudioTimer = nil
if let blobView = self.blobView {
let transition: ComponentTransition = .easeInOut(duration: 0.3)
transition.setAlpha(view: blobView, alpha: 0.0, completion: { [weak self, weak blobView] completed in
guard let self, let blobView, completed else {
return
}
if self.blobView === blobView {
self.blobView = nil
}
blobView.removeFromSuperview()
})
transition.setScale(layer: blobView.layer, scale: 0.5)
if let avatarNode = self.avatarNode {
transition.setScale(view: avatarNode.view, scale: 1.0)
}
}
}
}
func update(component: VideoChatParticipantAvatarComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
self.isUpdating = true
defer {
@ -254,35 +286,52 @@ final class VideoChatParticipantAvatarComponent: Component {
let blobScale: CGFloat = 1.5
if self.audioLevelDisposable == nil {
let peerId = component.peer.id
struct Level {
var value: Float
var isSpeaking: Bool
}
self.audioLevelDisposable = (component.call.audioLevels
|> map { levels -> Level? in
for level in levels {
if level.0 == peerId {
return Level(value: level.2, isSpeaking: level.3)
let peerId = component.peer.id
let levelSignal: Signal<Level?, NoError>
if peerId == component.myPeerId {
levelSignal = component.call.myAudioLevelAndSpeaking
|> map { value, isSpeaking -> Level? in
if value == 0.0 {
return nil
} else {
return Level(value: value, isSpeaking: isSpeaking)
}
}
return nil
} else {
levelSignal = component.call.audioLevels
|> map { levels -> Level? in
for level in levels {
if level.0 == peerId {
return Level(value: level.2, isSpeaking: level.3)
}
}
return nil
}
}
self.audioLevelDisposable = (levelSignal
|> distinctUntilChanged(isEqual: { lhs, rhs in
if (lhs == nil) != (rhs == nil) {
return false
}
if lhs != nil {
return true
} else {
return false
} else {
return true
}
})
|> deliverOnMainQueue).startStrict(next: { [weak self] level in
guard let self, let component = self.component, let avatarNode = self.avatarNode else {
return
}
if let level {
if let level, level.value >= 0.1 {
self.lastAudioLevelTimestamp = CFAbsoluteTimeGetCurrent()
let blobView: BlobView
if let current = self.blobView {
blobView = current
@ -316,6 +365,11 @@ final class VideoChatParticipantAvatarComponent: Component {
ComponentTransition.immediate.setTintColor(layer: blobView.blobsLayer, color: component.isSpeaking ? UIColor(rgb: 0x33C758) : component.theme.list.itemAccentColor)
}
if blobView.alpha == 0.0 {
let transition: ComponentTransition = .easeInOut(duration: 0.3)
transition.setAlpha(view: blobView, alpha: 1.0)
transition.setScale(view: blobView, scale: 1.0 / blobScale)
}
blobView.updateLevel(CGFloat(level.value), immediately: false)
if let noAudioTimer = self.noAudioTimer {
@ -323,28 +377,19 @@ final class VideoChatParticipantAvatarComponent: Component {
noAudioTimer.invalidate()
}
} else {
if self.noAudioTimer == nil {
self.noAudioTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 0.4, repeats: false, block: { [weak self] _ in
guard let self else {
return
}
self.noAudioTimer?.invalidate()
self.noAudioTimer = nil
if let blobView = self.blobView {
self.blobView = nil
blobView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { [weak blobView] _ in
blobView?.removeFromSuperview()
})
blobView.layer.animateScale(from: 1.0 / blobScale, to: 0.5, duration: 0.3, removeOnCompletion: false)
let transition: ComponentTransition = .easeInOut(duration: 0.1)
if let avatarNode = self.avatarNode {
transition.setScale(view: avatarNode.view, scale: 1.0)
}
}
})
if let blobView = self.blobView {
blobView.updateLevel(0.0, immediately: false)
}
}
if self.noAudioTimer == nil {
self.noAudioTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 0.4, repeats: true, block: { [weak self] _ in
guard let self else {
return
}
self.checkNoAudio()
})
}
})
}

View File

@ -307,6 +307,12 @@ final class VideoChatParticipantVideoComponent: Component {
if muteStatusView.superview == nil {
self.addSubview(muteStatusView)
muteStatusView.alpha = controlsAlpha
//TODO:release
muteStatusView.layer.shadowOpacity = 0.7
muteStatusView.layer.shadowColor = UIColor(white: 0.0, alpha: 1.0).cgColor
muteStatusView.layer.shadowOffset = CGSize(width: 0.0, height: 1.0)
muteStatusView.layer.shadowRadius = 8.0
}
transition.setPosition(view: muteStatusView, position: muteStatusFrame.center)
transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size))
@ -320,7 +326,7 @@ final class VideoChatParticipantVideoComponent: Component {
text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white))
)),
environment: {},
containerSize: CGSize(width: availableSize.width - 8.0 * 2.0, height: 100.0)
containerSize: CGSize(width: availableSize.width - 8.0 * 2.0 - 4.0, height: 100.0)
)
let titleFrame: CGRect
if component.isExpanded {
@ -333,6 +339,12 @@ final class VideoChatParticipantVideoComponent: Component {
titleView.layer.anchorPoint = CGPoint()
self.addSubview(titleView)
titleView.alpha = controlsAlpha
//TODO:release
titleView.layer.shadowOpacity = 0.7
titleView.layer.shadowColor = UIColor(white: 0.0, alpha: 1.0).cgColor
titleView.layer.shadowOffset = CGSize(width: 0.0, height: 1.0)
titleView.layer.shadowRadius = 8.0
}
transition.setPosition(view: titleView, position: titleFrame.origin)
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)

View File

@ -1111,6 +1111,7 @@ final class VideoChatParticipantsComponent: Component {
avatarComponent: AnyComponent(VideoChatParticipantAvatarComponent(
call: component.call,
peer: EnginePeer(participant.peer),
myPeerId: component.participants?.myPeerId ?? component.call.accountContext.account.peerId,
isSpeaking: component.speakingParticipants.contains(participant.peer.id),
theme: component.theme
)),

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,343 @@
import Foundation
import UIKit
import Display
import TelegramCore
import SwiftSignalKit
import PeerInfoUI
import OverlayStatusController
import PresentationDataUtils
extension VideoChatScreenComponent.View {
func openInviteMembers() {
guard let component = self.component else {
return
}
var canInvite = true
var inviteIsLink = false
if case let .channel(peer) = self.peer {
if peer.flags.contains(.isGigagroup) {
if peer.flags.contains(.isCreator) || peer.adminRights != nil {
} else {
canInvite = false
}
}
if case .broadcast = peer.info, !(peer.addressName?.isEmpty ?? true) {
inviteIsLink = true
}
}
var inviteType: VideoChatParticipantsComponent.Participants.InviteType?
if canInvite {
if inviteIsLink {
inviteType = .shareLink
} else {
inviteType = .invite
}
}
guard let inviteType else {
return
}
switch inviteType {
case .invite:
let groupPeer = component.call.accountContext.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: component.call.peerId))
let _ = (groupPeer
|> deliverOnMainQueue).start(next: { [weak self] groupPeer in
guard let self, let component = self.component, let environment = self.environment, let groupPeer else {
return
}
let inviteLinks = self.inviteLinks
if case let .channel(groupPeer) = groupPeer {
var canInviteMembers = true
if case .broadcast = groupPeer.info, !(groupPeer.addressName?.isEmpty ?? true) {
canInviteMembers = false
}
if !canInviteMembers {
if let inviteLinks {
self.presentShare(inviteLinks)
}
return
}
}
var filters: [ChannelMembersSearchFilter] = []
if let members = self.members {
filters.append(.disable(Array(members.participants.map { $0.peer.id })))
}
if case let .channel(groupPeer) = groupPeer {
if !groupPeer.hasPermission(.inviteMembers) && inviteLinks?.listenerLink == nil {
filters.append(.excludeNonMembers)
}
} else if case let .legacyGroup(groupPeer) = groupPeer {
if groupPeer.hasBannedPermission(.banAddMembers) {
filters.append(.excludeNonMembers)
}
}
filters.append(.excludeBots)
var dismissController: (() -> Void)?
let controller = ChannelMembersSearchController(context: component.call.accountContext, peerId: groupPeer.id, forceTheme: environment.theme, mode: .inviteToCall, filters: filters, openPeer: { [weak self] peer, participant in
guard let self, let component = self.component, let environment = self.environment else {
dismissController?()
return
}
guard let callState = self.callState else {
return
}
let presentationData = component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: environment.theme)
if peer.id == callState.myPeerId {
return
}
if let participant {
dismissController?()
if component.call.invitePeer(participant.peer.id) {
let text: String
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
text = environment.strings.LiveStream_InvitedPeerText(peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string
} else {
text = environment.strings.VoiceChat_InvitedPeerText(peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string
}
self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: EnginePeer(participant.peer), title: nil, text: text, action: nil, duration: 3), action: { _ in return false })
}
} else {
if case let .channel(groupPeer) = groupPeer, let listenerLink = inviteLinks?.listenerLink, !groupPeer.hasPermission(.inviteMembers) {
let text = environment.strings.VoiceChat_SendPublicLinkText(peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder), EnginePeer(groupPeer).displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string
environment.controller()?.present(textAlertController(context: component.call.accountContext, forceTheme: environment.theme, title: nil, text: text, actions: [TextAlertAction(type: .genericAction, title: environment.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: environment.strings.VoiceChat_SendPublicLinkSend, action: { [weak self] in
dismissController?()
guard let self, let component = self.component else {
return
}
let _ = (enqueueMessages(account: component.call.accountContext.account, peerId: peer.id, messages: [.message(text: listenerLink, attributes: [], inlineStickers: [:], mediaReference: nil, threadId: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])])
|> deliverOnMainQueue).start(next: { [weak self] _ in
guard let self, let environment = self.environment else {
return
}
self.presentUndoOverlay(content: .forward(savedMessages: false, text: environment.strings.UserInfo_LinkForwardTooltip_Chat_One(peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string), action: { _ in return true })
})
})]), in: .window(.root))
} else {
let text: String
if case let .channel(groupPeer) = groupPeer, case .broadcast = groupPeer.info {
text = environment.strings.VoiceChat_InviteMemberToChannelFirstText(peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder), EnginePeer(groupPeer).displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string
} else {
text = environment.strings.VoiceChat_InviteMemberToGroupFirstText(peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder), groupPeer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string
}
environment.controller()?.present(textAlertController(context: component.call.accountContext, forceTheme: environment.theme, title: nil, text: text, actions: [TextAlertAction(type: .genericAction, title: environment.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: environment.strings.VoiceChat_InviteMemberToGroupFirstAdd, action: { [weak self] in
guard let self, let component = self.component, let environment = self.environment else {
return
}
if case let .channel(groupPeer) = groupPeer {
guard let selfController = environment.controller() else {
return
}
let inviteDisposable = self.inviteDisposable
var inviteSignal = component.call.accountContext.peerChannelMemberCategoriesContextsManager.addMembers(engine: component.call.accountContext.engine, peerId: groupPeer.id, memberIds: [peer.id])
var cancelImpl: (() -> Void)?
let progressSignal = Signal<Never, NoError> { [weak selfController] subscriber in
let controller = OverlayStatusController(theme: presentationData.theme, type: .loading(cancelled: {
cancelImpl?()
}))
selfController?.present(controller, in: .window(.root))
return ActionDisposable { [weak controller] in
Queue.mainQueue().async() {
controller?.dismiss()
}
}
}
|> runOn(Queue.mainQueue())
|> delay(0.15, queue: Queue.mainQueue())
let progressDisposable = progressSignal.start()
inviteSignal = inviteSignal
|> afterDisposed {
Queue.mainQueue().async {
progressDisposable.dispose()
}
}
cancelImpl = {
inviteDisposable.set(nil)
}
inviteDisposable.set((inviteSignal |> deliverOnMainQueue).start(error: { [weak self] error in
dismissController?()
guard let self, let component = self.component, let environment = self.environment else {
return
}
let text: String
switch error {
case .limitExceeded:
text = environment.strings.Channel_ErrorAddTooMuch
case .tooMuchJoined:
text = environment.strings.Invite_ChannelsTooMuch
case .generic:
text = environment.strings.Login_UnknownError
case .restricted:
text = environment.strings.Channel_ErrorAddBlocked
case .notMutualContact:
if case .broadcast = groupPeer.info {
text = environment.strings.Channel_AddUserLeftError
} else {
text = environment.strings.GroupInfo_AddUserLeftError
}
case .botDoesntSupportGroups:
text = environment.strings.Channel_BotDoesntSupportGroups
case .tooMuchBots:
text = environment.strings.Channel_TooMuchBots
case .bot:
text = environment.strings.Login_UnknownError
case .kicked:
text = environment.strings.Channel_AddUserKickedError
}
environment.controller()?.present(textAlertController(context: component.call.accountContext, forceTheme: environment.theme, title: nil, text: text, actions: [TextAlertAction(type: .defaultAction, title: environment.strings.Common_OK, action: {})]), in: .window(.root))
}, completed: { [weak self] in
guard let self, let component = self.component, let environment = self.environment else {
dismissController?()
return
}
dismissController?()
if component.call.invitePeer(peer.id) {
let text: String
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
text = environment.strings.LiveStream_InvitedPeerText(peer.displayTitle(strings: environment.strings, displayOrder: presentationData.nameDisplayOrder)).string
} else {
text = environment.strings.VoiceChat_InvitedPeerText(peer.displayTitle(strings: environment.strings, displayOrder: presentationData.nameDisplayOrder)).string
}
self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false })
}
}))
} else if case let .legacyGroup(groupPeer) = groupPeer {
guard let selfController = environment.controller() else {
return
}
let inviteDisposable = self.inviteDisposable
var inviteSignal = component.call.accountContext.engine.peers.addGroupMember(peerId: groupPeer.id, memberId: peer.id)
var cancelImpl: (() -> Void)?
let progressSignal = Signal<Never, NoError> { [weak selfController] subscriber in
let controller = OverlayStatusController(theme: presentationData.theme, type: .loading(cancelled: {
cancelImpl?()
}))
selfController?.present(controller, in: .window(.root))
return ActionDisposable { [weak controller] in
Queue.mainQueue().async() {
controller?.dismiss()
}
}
}
|> runOn(Queue.mainQueue())
|> delay(0.15, queue: Queue.mainQueue())
let progressDisposable = progressSignal.start()
inviteSignal = inviteSignal
|> afterDisposed {
Queue.mainQueue().async {
progressDisposable.dispose()
}
}
cancelImpl = {
inviteDisposable.set(nil)
}
inviteDisposable.set((inviteSignal |> deliverOnMainQueue).start(error: { [weak self] error in
dismissController?()
guard let self, let component = self.component, let environment = self.environment else {
return
}
let context = component.call.accountContext
switch error {
case .privacy:
let _ = (component.call.accountContext.account.postbox.loadedPeerWithId(peer.id)
|> deliverOnMainQueue).start(next: { [weak self] peer in
guard let self, let component = self.component, let environment = self.environment else {
return
}
environment.controller()?.present(textAlertController(context: component.call.accountContext, title: nil, text: environment.strings.Privacy_GroupsAndChannels_InviteToGroupError(EnginePeer(peer).compactDisplayTitle, EnginePeer(peer).compactDisplayTitle).string, actions: [TextAlertAction(type: .genericAction, title: environment.strings.Common_OK, action: {})]), in: .window(.root))
})
case .notMutualContact:
environment.controller()?.present(textAlertController(context: context, title: nil, text: environment.strings.GroupInfo_AddUserLeftError, actions: [TextAlertAction(type: .genericAction, title: environment.strings.Common_OK, action: {})]), in: .window(.root))
case .tooManyChannels:
environment.controller()?.present(textAlertController(context: context, title: nil, text: environment.strings.Invite_ChannelsTooMuch, actions: [TextAlertAction(type: .genericAction, title: environment.strings.Common_OK, action: {})]), in: .window(.root))
case .groupFull, .generic:
environment.controller()?.present(textAlertController(context: context, forceTheme: environment.theme, title: nil, text: environment.strings.Login_UnknownError, actions: [TextAlertAction(type: .defaultAction, title: environment.strings.Common_OK, action: {})]), in: .window(.root))
}
}, completed: { [weak self] in
guard let self, let component = self.component, let environment = self.environment else {
dismissController?()
return
}
dismissController?()
if component.call.invitePeer(peer.id) {
let text: String
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
text = environment.strings.LiveStream_InvitedPeerText(peer.displayTitle(strings: environment.strings, displayOrder: presentationData.nameDisplayOrder)).string
} else {
text = environment.strings.VoiceChat_InvitedPeerText(peer.displayTitle(strings: environment.strings, displayOrder: presentationData.nameDisplayOrder)).string
}
self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false })
}
}))
}
})]), in: .window(.root))
}
}
})
controller.copyInviteLink = { [weak self] in
dismissController?()
guard let self, let component = self.component else {
return
}
let callPeerId = component.call.peerId
let _ = (component.call.accountContext.engine.data.get(
TelegramEngine.EngineData.Item.Peer.Peer(id: callPeerId),
TelegramEngine.EngineData.Item.Peer.ExportedInvitation(id: callPeerId)
)
|> map { peer, exportedInvitation -> String? in
if let link = inviteLinks?.listenerLink {
return link
} else if let peer = peer, let addressName = peer.addressName, !addressName.isEmpty {
return "https://t.me/\(addressName)"
} else if let link = exportedInvitation?.link {
return link
} else {
return nil
}
}
|> deliverOnMainQueue).start(next: { [weak self] link in
guard let self, let environment = self.environment else {
return
}
if let link {
UIPasteboard.general.string = link
self.presentUndoOverlay(content: .linkCopied(text: environment.strings.VoiceChat_InviteLinkCopiedText), action: { _ in return false })
}
})
}
dismissController = { [weak controller] in
controller?.dismiss()
}
environment.controller()?.push(controller)
})
case .shareLink:
guard let inviteLinks = self.inviteLinks else {
return
}
self.presentShare(inviteLinks)
}
}
}

View File

@ -0,0 +1,560 @@
import Foundation
import UIKit
import Display
import ContextUI
import TelegramCore
import SwiftSignalKit
import DeleteChatPeerActionSheetItem
import PeerListItemComponent
import LegacyComponents
import LegacyUI
import WebSearchUI
import MapResourceToAvatarSizes
import LegacyMediaPickerUI
import AvatarNode
import PresentationDataUtils
import AccountContext
extension VideoChatScreenComponent.View {
func openMoreMenu() {
guard let sourceView = self.navigationLeftButton.view else {
return
}
guard let component = self.component, let environment = self.environment, let controller = environment.controller() else {
return
}
guard let peer = self.peer else {
return
}
guard let callState = self.callState else {
return
}
let canManageCall = callState.canManageCall
var items: [ContextMenuItem] = []
if let displayAsPeers = self.displayAsPeers, displayAsPeers.count > 1 {
for peer in displayAsPeers {
if peer.peer.id == callState.myPeerId {
let avatarSize = CGSize(width: 28.0, height: 28.0)
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_DisplayAs, textLayout: .secondLineWithValue(EnginePeer(peer.peer).displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)), icon: { _ in nil }, iconSource: ContextMenuActionItemIconSource(size: avatarSize, signal: peerAvatarCompleteImage(account: component.call.accountContext.account, peer: EnginePeer(peer.peer), size: avatarSize)), action: { [weak self] c, _ in
guard let self else {
return
}
c?.pushItems(items: .single(ContextController.Items(content: .list(self.contextMenuDisplayAsItems()))))
})))
items.append(.separator)
break
}
}
}
if let (availableOutputs, currentOutput) = self.audioOutputState, availableOutputs.count > 1 {
var currentOutputTitle = ""
for output in availableOutputs {
if output == currentOutput {
let title: String
switch output {
case .builtin:
title = UIDevice.current.model
case .speaker:
title = environment.strings.Call_AudioRouteSpeaker
case .headphones:
title = environment.strings.Call_AudioRouteHeadphones
case let .port(port):
title = port.name
}
currentOutputTitle = title
break
}
}
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_ContextAudio, textLayout: .secondLineWithValue(currentOutputTitle), icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Audio"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] c, _ in
guard let self else {
return
}
c?.pushItems(items: .single(ContextController.Items(content: .list(self.contextMenuAudioItems()))))
})))
}
if canManageCall {
let text: String
if case let .channel(channel) = peer, case .broadcast = channel.info {
text = environment.strings.LiveStream_EditTitle
} else {
text = environment.strings.VoiceChat_EditTitle
}
items.append(.action(ContextMenuActionItem(text: text, icon: { theme -> UIImage? in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Pencil"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
f(.default)
guard let self else {
return
}
self.openTitleEditing()
})))
var hasPermissions = true
if case let .channel(chatPeer) = peer {
if case .broadcast = chatPeer.info {
hasPermissions = false
} else if chatPeer.flags.contains(.isGigagroup) {
hasPermissions = false
}
}
if hasPermissions {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_EditPermissions, icon: { theme -> UIImage? in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Restrict"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] c, _ in
guard let self else {
return
}
c?.pushItems(items: .single(ContextController.Items(content: .list(self.contextMenuPermissionItems()))))
})))
}
}
if let inviteLinks = self.inviteLinks {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_Share, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Link"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
f(.default)
guard let self else {
return
}
self.presentShare(inviteLinks)
})))
}
//let isScheduled = strongSelf.isScheduled
//TODO:release
let isScheduled: Bool = !"".isEmpty
let canSpeak: Bool
if let muteState = callState.muteState {
canSpeak = muteState.canUnmute
} else {
canSpeak = true
}
if !isScheduled && canSpeak {
if #available(iOS 15.0, *) {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_MicrophoneModes, textColor: .primary, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Noise"), color: theme.actionSheet.primaryTextColor)
}, action: { _, f in
f(.dismissWithoutContent)
AVCaptureDevice.showSystemUserInterface(.microphoneModes)
})))
}
}
if callState.isVideoEnabled && (callState.muteState?.canUnmute ?? true) {
if component.call.hasScreencast {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_StopScreenSharing, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/ShareScreen"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
f(.default)
guard let self, let component = self.component else {
return
}
component.call.disableScreencast()
})))
} else {
items.append(.custom(VoiceChatShareScreenContextItem(context: component.call.accountContext, text: environment.strings.VoiceChat_ShareScreen, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/ShareScreen"), color: theme.actionSheet.primaryTextColor)
}, action: { _, _ in }), false))
}
}
if canManageCall {
if let recordingStartTimestamp = callState.recordingStartTimestamp {
items.append(.custom(VoiceChatRecordingContextItem(timestamp: recordingStartTimestamp, action: { [weak self] _, f in
f(.dismissWithoutContent)
guard let self, let component = self.component, let environment = self.environment else {
return
}
let alertController = textAlertController(context: component.call.accountContext, forceTheme: environment.theme, title: nil, text: environment.strings.VoiceChat_StopRecordingTitle, actions: [TextAlertAction(type: .genericAction, title: environment.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: environment.strings.VoiceChat_StopRecordingStop, action: { [weak self] in
guard let self, let component = self.component, let environment = self.environment else {
return
}
component.call.setShouldBeRecording(false, title: nil, videoOrientation: nil)
Queue.mainQueue().after(0.88) {
HapticFeedback().success()
}
let text: String
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
text = environment.strings.LiveStream_RecordingSaved
} else {
text = environment.strings.VideoChat_RecordingSaved
}
self.presentUndoOverlay(content: .forward(savedMessages: true, text: text), action: { [weak self] value in
if case .info = value, let self, let component = self.component, let environment = self.environment, let navigationController = environment.controller()?.navigationController as? NavigationController {
let context = component.call.accountContext
environment.controller()?.dismiss(completion: { [weak navigationController] in
Queue.mainQueue().justDispatch {
let _ = (context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: context.account.peerId))
|> deliverOnMainQueue).start(next: { peer in
guard let peer, let navigationController else {
return
}
context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: context, chatLocation: .peer(peer), keepStack: .always, purposefulAction: {}, peekData: nil))
})
}
})
return true
}
return false
})
})])
environment.controller()?.present(alertController, in: .window(.root))
}), false))
} else {
let text: String
if case let .channel(channel) = peer, case .broadcast = channel.info {
text = environment.strings.LiveStream_StartRecording
} else {
text = environment.strings.VoiceChat_StartRecording
}
if callState.scheduleTimestamp == nil {
items.append(.action(ContextMenuActionItem(text: text, icon: { theme -> UIImage? in
return generateStartRecordingIcon(color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
f(.dismissWithoutContent)
guard let self, let component = self.component, let environment = self.environment, let peer = self.peer else {
return
}
let controller = VoiceChatRecordingSetupController(context: component.call.accountContext, peer: peer, completion: { [weak self] videoOrientation in
guard let self, let component = self.component, let environment = self.environment, let peer = self.peer else {
return
}
let title: String
let text: String
let placeholder: String
if let _ = videoOrientation {
placeholder = environment.strings.VoiceChat_RecordingTitlePlaceholderVideo
} else {
placeholder = environment.strings.VoiceChat_RecordingTitlePlaceholder
}
if case let .channel(channel) = peer, case .broadcast = channel.info {
title = environment.strings.LiveStream_StartRecordingTitle
if let _ = videoOrientation {
text = environment.strings.LiveStream_StartRecordingTextVideo
} else {
text = environment.strings.LiveStream_StartRecordingText
}
} else {
title = environment.strings.VoiceChat_StartRecordingTitle
if let _ = videoOrientation {
text = environment.strings.VoiceChat_StartRecordingTextVideo
} else {
text = environment.strings.VoiceChat_StartRecordingText
}
}
let controller = voiceChatTitleEditController(sharedContext: component.call.accountContext.sharedContext, account: component.call.account, forceTheme: environment.theme, title: title, text: text, placeholder: placeholder, value: nil, maxLength: 40, apply: { [weak self] title in
guard let self, let component = self.component, let environment = self.environment, let peer = self.peer, let title else {
return
}
component.call.setShouldBeRecording(true, title: title, videoOrientation: videoOrientation)
let text: String
if case let .channel(channel) = peer, case .broadcast = channel.info {
text = environment.strings.LiveStream_RecordingStarted
} else {
text = environment.strings.VoiceChat_RecordingStarted
}
self.presentUndoOverlay(content: .voiceChatRecording(text: text), action: { _ in return false })
component.call.playTone(.recordingStarted)
})
environment.controller()?.present(controller, in: .window(.root))
})
environment.controller()?.present(controller, in: .window(.root))
})))
}
}
}
if canManageCall {
let text: String
if case let .channel(channel) = peer, case .broadcast = channel.info {
text = isScheduled ? environment.strings.VoiceChat_CancelLiveStream : environment.strings.VoiceChat_EndLiveStream
} else {
text = isScheduled ? environment.strings.VoiceChat_CancelVoiceChat : environment.strings.VoiceChat_EndVoiceChat
}
items.append(.action(ContextMenuActionItem(text: text, textColor: .destructive, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Clear"), color: theme.actionSheet.destructiveActionTextColor)
}, action: { [weak self] _, f in
f(.dismissWithoutContent)
guard let self, let component = self.component, let environment = self.environment else {
return
}
let action: () -> Void = { [weak self] in
guard let self, let component = self.component else {
return
}
let _ = (component.call.leave(terminateIfPossible: true)
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(completed: { [weak self] in
guard let self, let environment = self.environment else {
return
}
environment.controller()?.dismiss()
})
}
let title: String
let text: String
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
title = isScheduled ? environment.strings.LiveStream_CancelConfirmationTitle : environment.strings.LiveStream_EndConfirmationTitle
text = isScheduled ? environment.strings.LiveStream_CancelConfirmationText : environment.strings.LiveStream_EndConfirmationText
} else {
title = isScheduled ? environment.strings.VoiceChat_CancelConfirmationTitle : environment.strings.VoiceChat_EndConfirmationTitle
text = isScheduled ? environment.strings.VoiceChat_CancelConfirmationText : environment.strings.VoiceChat_EndConfirmationText
}
let alertController = textAlertController(context: component.call.accountContext, forceTheme: environment.theme, title: title, text: text, actions: [TextAlertAction(type: .defaultAction, title: environment.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: isScheduled ? environment.strings.VoiceChat_CancelConfirmationEnd : environment.strings.VoiceChat_EndConfirmationEnd, action: {
action()
})])
environment.controller()?.present(alertController, in: .window(.root))
})))
} else {
let leaveText: String
if case let .channel(channel) = peer, case .broadcast = channel.info {
leaveText = environment.strings.LiveStream_LeaveVoiceChat
} else {
leaveText = environment.strings.VoiceChat_LeaveVoiceChat
}
items.append(.action(ContextMenuActionItem(text: leaveText, textColor: .destructive, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Clear"), color: theme.actionSheet.destructiveActionTextColor)
}, action: { [weak self] _, f in
f(.dismissWithoutContent)
guard let self, let component = self.component else {
return
}
let _ = (component.call.leave(terminateIfPossible: false)
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(completed: { [weak self] in
guard let self, let environment = self.environment else {
return
}
environment.controller()?.dismiss()
})
})))
}
let presentationData = component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: environment.theme)
let contextController = ContextController(presentationData: presentationData, source: .reference(VoiceChatContextReferenceContentSource(controller: controller, sourceView: sourceView)), items: .single(ContextController.Items(content: .list(items))), gesture: nil)
controller.presentInGlobalOverlay(contextController)
}
private func contextMenuDisplayAsItems() -> [ContextMenuItem] {
guard let component = self.component, let environment = self.environment else {
return []
}
guard let callState = self.callState else {
return []
}
let myPeerId = callState.myPeerId
let avatarSize = CGSize(width: 28.0, height: 28.0)
var items: [ContextMenuItem] = []
items.append(.action(ContextMenuActionItem(text: environment.strings.Common_Back, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Back"), color: theme.actionSheet.primaryTextColor)
}, iconPosition: .left, action: { (c, _) in
c?.popItems()
})))
items.append(.separator)
var isGroup = false
if let displayAsPeers = self.displayAsPeers {
for peer in displayAsPeers {
if peer.peer is TelegramGroup {
isGroup = true
break
} else if let peer = peer.peer as? TelegramChannel, case .group = peer.info {
isGroup = true
break
}
}
}
items.append(.custom(VoiceChatInfoContextItem(text: isGroup ? environment.strings.VoiceChat_DisplayAsInfoGroup : environment.strings.VoiceChat_DisplayAsInfo, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Accounts"), color: theme.actionSheet.primaryTextColor)
}), true))
if let displayAsPeers = self.displayAsPeers {
for peer in displayAsPeers {
var subtitle: String?
if peer.peer.id.namespace == Namespaces.Peer.CloudUser {
subtitle = environment.strings.VoiceChat_PersonalAccount
} else if let subscribers = peer.subscribers {
if let peer = peer.peer as? TelegramChannel, case .broadcast = peer.info {
subtitle = environment.strings.Conversation_StatusSubscribers(subscribers)
} else {
subtitle = environment.strings.Conversation_StatusMembers(subscribers)
}
}
let isSelected = peer.peer.id == myPeerId
let extendedAvatarSize = CGSize(width: 35.0, height: 35.0)
let theme = environment.theme
let avatarSignal = peerAvatarCompleteImage(account: component.call.accountContext.account, peer: EnginePeer(peer.peer), size: avatarSize)
|> map { image -> UIImage? in
if isSelected, let image = image {
return generateImage(extendedAvatarSize, rotatedContext: { size, context in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: 1.0, y: -1.0)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
context.draw(image.cgImage!, in: CGRect(x: (extendedAvatarSize.width - avatarSize.width) / 2.0, y: (extendedAvatarSize.height - avatarSize.height) / 2.0, width: avatarSize.width, height: avatarSize.height))
let lineWidth = 1.0 + UIScreenPixel
context.setLineWidth(lineWidth)
context.setStrokeColor(theme.actionSheet.controlAccentColor.cgColor)
context.strokeEllipse(in: bounds.insetBy(dx: lineWidth / 2.0, dy: lineWidth / 2.0))
})
} else {
return image
}
}
items.append(.action(ContextMenuActionItem(text: EnginePeer(peer.peer).displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder), textLayout: subtitle.flatMap { .secondLineWithValue($0) } ?? .singleLine, icon: { _ in nil }, iconSource: ContextMenuActionItemIconSource(size: isSelected ? extendedAvatarSize : avatarSize, signal: avatarSignal), action: { [weak self] _, f in
f(.default)
guard let self, let component = self.component else {
return
}
if peer.peer.id != myPeerId {
component.call.reconnect(as: peer.peer.id)
}
})))
if peer.peer.id.namespace == Namespaces.Peer.CloudUser {
items.append(.separator)
}
}
}
return items
}
private func contextMenuAudioItems() -> [ContextMenuItem] {
guard let environment = self.environment else {
return []
}
guard let (availableOutputs, currentOutput) = self.audioOutputState else {
return []
}
var items: [ContextMenuItem] = []
items.append(.action(ContextMenuActionItem(text: environment.strings.Common_Back, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Back"), color: theme.actionSheet.primaryTextColor)
}, iconPosition: .left, action: { (c, _) in
c?.popItems()
})))
items.append(.separator)
for output in availableOutputs {
let title: String
switch output {
case .builtin:
title = UIDevice.current.model
case .speaker:
title = environment.strings.Call_AudioRouteSpeaker
case .headphones:
title = environment.strings.Call_AudioRouteHeadphones
case let .port(port):
title = port.name
}
items.append(.action(ContextMenuActionItem(text: title, icon: { theme in
if output == currentOutput {
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: theme.actionSheet.primaryTextColor)
} else {
return nil
}
}, action: { [weak self] _, f in
f(.default)
guard let self, let component = self.component else {
return
}
component.call.setCurrentAudioOutput(output)
})))
}
return items
}
private func contextMenuPermissionItems() -> [ContextMenuItem] {
guard let environment = self.environment, let callState = self.callState else {
return []
}
var items: [ContextMenuItem] = []
if callState.canManageCall, let defaultParticipantMuteState = callState.defaultParticipantMuteState {
let isMuted = defaultParticipantMuteState == .muted
items.append(.action(ContextMenuActionItem(text: environment.strings.Common_Back, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Back"), color: theme.actionSheet.primaryTextColor)
}, iconPosition: .left, action: { (c, _) in
c?.popItems()
})))
items.append(.separator)
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_SpeakPermissionEveryone, icon: { theme in
if isMuted {
return nil
} else {
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: theme.actionSheet.primaryTextColor)
}
}, action: { [weak self] _, f in
f(.dismissWithoutContent)
guard let self, let component = self.component else {
return
}
component.call.updateDefaultParticipantsAreMuted(isMuted: false)
})))
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_SpeakPermissionAdmin, icon: { theme in
if !isMuted {
return nil
} else {
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: theme.actionSheet.primaryTextColor)
}
}, action: { [weak self] _, f in
f(.dismissWithoutContent)
guard let self, let component = self.component else {
return
}
component.call.updateDefaultParticipantsAreMuted(isMuted: true)
})))
}
return items
}
}

View File

@ -0,0 +1,667 @@
import Foundation
import UIKit
import Display
import SwiftSignalKit
import AccountContext
import TelegramCore
import ContextUI
import DeleteChatPeerActionSheetItem
import UndoUI
import LegacyComponents
import WebSearchUI
import MapResourceToAvatarSizes
import LegacyUI
import LegacyMediaPickerUI
extension VideoChatScreenComponent.View {
func openParticipantContextMenu(id: EnginePeer.Id, sourceView: ContextExtractedContentContainingView, gesture: ContextGesture?) {
guard let component = self.component, let environment = self.environment else {
return
}
guard let members = self.members, let participant = members.participants.first(where: { $0.peer.id == id }) else {
return
}
let muteStatePromise = Promise<GroupCallParticipantsContext.Participant.MuteState?>(participant.muteState)
let itemsForEntry: (GroupCallParticipantsContext.Participant.MuteState?) -> [ContextMenuItem] = { [weak self] muteState in
guard let self, let component = self.component, let environment = self.environment else {
return []
}
guard let callState = self.callState else {
return []
}
var items: [ContextMenuItem] = []
var hasVolumeSlider = false
let peer = participant.peer
if let muteState = muteState, !muteState.canUnmute || muteState.mutedByYou {
} else {
if callState.canManageCall || callState.myPeerId != id {
hasVolumeSlider = true
let minValue: CGFloat
if callState.canManageCall && callState.adminIds.contains(peer.id) && muteState != nil {
minValue = 0.01
} else {
minValue = 0.0
}
items.append(.custom(VoiceChatVolumeContextItem(minValue: minValue, value: participant.volume.flatMap { CGFloat($0) / 10000.0 } ?? 1.0, valueChanged: { [weak self] newValue, finished in
guard let self, let component = self.component else {
return
}
if finished && newValue.isZero {
let updatedMuteState = component.call.updateMuteState(peerId: peer.id, isMuted: true)
muteStatePromise.set(.single(updatedMuteState))
} else {
component.call.setVolume(peerId: peer.id, volume: Int32(newValue * 10000), sync: finished)
}
}), true))
}
}
if callState.myPeerId == id && !hasVolumeSlider && ((participant.about?.isEmpty ?? true) || participant.peer.smallProfileImage == nil) {
items.append(.custom(VoiceChatInfoContextItem(text: environment.strings.VoiceChat_ImproveYourProfileText, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Tip"), color: theme.actionSheet.primaryTextColor)
}), true))
}
if peer.id == callState.myPeerId {
if participant.hasRaiseHand {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_CancelSpeakRequest, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/RevokeSpeak"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
guard let self, let component = self.component else {
return
}
component.call.lowerHand()
f(.default)
})))
}
items.append(.action(ContextMenuActionItem(text: peer.smallProfileImage == nil ? environment.strings.VoiceChat_AddPhoto : environment.strings.VoiceChat_ChangePhoto, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Camera"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
f(.default)
Queue.mainQueue().after(0.1) {
guard let self else {
return
}
self.openAvatarForEditing(fromGallery: false, completion: {})
}
})))
items.append(.action(ContextMenuActionItem(text: (participant.about?.isEmpty ?? true) ? environment.strings.VoiceChat_AddBio : environment.strings.VoiceChat_EditBio, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Info"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
f(.default)
Queue.mainQueue().after(0.1) {
guard let self, let component = self.component, let environment = self.environment else {
return
}
let maxBioLength: Int
if peer.id.namespace == Namespaces.Peer.CloudUser {
maxBioLength = 70
} else {
maxBioLength = 100
}
let controller = voiceChatTitleEditController(sharedContext: component.call.accountContext.sharedContext, account: component.call.accountContext.account, forceTheme: environment.theme, title: environment.strings.VoiceChat_EditBioTitle, text: environment.strings.VoiceChat_EditBioText, placeholder: environment.strings.VoiceChat_EditBioPlaceholder, doneButtonTitle: environment.strings.VoiceChat_EditBioSave, value: participant.about, maxLength: maxBioLength, apply: { [weak self] bio in
guard let self, let component = self.component, let environment = self.environment, let bio else {
return
}
if peer.id.namespace == Namespaces.Peer.CloudUser {
let _ = (component.call.accountContext.engine.accountData.updateAbout(about: bio)
|> `catch` { _ -> Signal<Void, NoError> in
return .complete()
}).start()
} else {
let _ = (component.call.accountContext.engine.peers.updatePeerDescription(peerId: peer.id, description: bio)
|> `catch` { _ -> Signal<Void, NoError> in
return .complete()
}).start()
}
self.presentUndoOverlay(content: .info(title: nil, text: environment.strings.VoiceChat_EditBioSuccess, timeout: nil, customUndoText: nil), action: { _ in return false })
})
environment.controller()?.present(controller, in: .window(.root))
}
})))
if let peer = peer as? TelegramUser {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_ChangeName, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/ChangeName"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
f(.default)
Queue.mainQueue().after(0.1) {
guard let self, let component = self.component, let environment = self.environment else {
return
}
let controller = voiceChatUserNameController(sharedContext: component.call.accountContext.sharedContext, account: component.call.accountContext.account, forceTheme: environment.theme, title: environment.strings.VoiceChat_ChangeNameTitle, firstNamePlaceholder: environment.strings.UserInfo_FirstNamePlaceholder, lastNamePlaceholder: environment.strings.UserInfo_LastNamePlaceholder, doneButtonTitle: environment.strings.VoiceChat_EditBioSave, firstName: peer.firstName, lastName: peer.lastName, maxLength: 128, apply: { [weak self] firstAndLastName in
guard let self, let component = self.component, let environment = self.environment, let (firstName, lastName) = firstAndLastName else {
return
}
let _ = component.call.accountContext.engine.accountData.updateAccountPeerName(firstName: firstName, lastName: lastName).startStandalone()
self.presentUndoOverlay(content: .info(title: nil, text: environment.strings.VoiceChat_EditNameSuccess, timeout: nil, customUndoText: nil), action: { _ in return false })
})
environment.controller()?.present(controller, in: .window(.root))
}
})))
}
} else {
if (callState.canManageCall || callState.adminIds.contains(component.call.accountContext.account.peerId)) {
if callState.adminIds.contains(peer.id) {
if let _ = muteState {
} else {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_MutePeer, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Mute"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
guard let self, let component = self.component else {
return
}
let _ = component.call.updateMuteState(peerId: peer.id, isMuted: true)
f(.default)
})))
}
} else {
if let muteState = muteState, !muteState.canUnmute {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_UnmutePeer, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: participant.hasRaiseHand ? "Call/Context Menu/AllowToSpeak" : "Call/Context Menu/Unmute"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
guard let self, let component = self.component, let environment = self.environment else {
return
}
let _ = component.call.updateMuteState(peerId: peer.id, isMuted: false)
f(.default)
self.presentUndoOverlay(content: .voiceChatCanSpeak(text: environment.strings.VoiceChat_UserCanNowSpeak(EnginePeer(participant.peer).displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string), action: { _ in return true })
})))
} else {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_MutePeer, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Mute"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
guard let self, let component = self.component else {
return
}
let _ = component.call.updateMuteState(peerId: peer.id, isMuted: true)
f(.default)
})))
}
}
} else {
if let muteState = muteState, muteState.mutedByYou {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_UnmuteForMe, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Unmute"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
guard let self, let component = self.component else {
return
}
let _ = component.call.updateMuteState(peerId: peer.id, isMuted: false)
f(.default)
})))
} else {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_MuteForMe, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Mute"), color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
guard let self, let component = self.component else {
return
}
let _ = component.call.updateMuteState(peerId: peer.id, isMuted: true)
f(.default)
})))
}
}
let openTitle: String
let openIcon: UIImage?
if [Namespaces.Peer.CloudChannel, Namespaces.Peer.CloudGroup].contains(peer.id.namespace) {
if let peer = peer as? TelegramChannel, case .broadcast = peer.info {
openTitle = environment.strings.VoiceChat_OpenChannel
openIcon = UIImage(bundleImageName: "Chat/Context Menu/Channels")
} else {
openTitle = environment.strings.VoiceChat_OpenGroup
openIcon = UIImage(bundleImageName: "Chat/Context Menu/Groups")
}
} else {
openTitle = environment.strings.Conversation_ContextMenuSendMessage
openIcon = UIImage(bundleImageName: "Chat/Context Menu/Message")
}
items.append(.action(ContextMenuActionItem(text: openTitle, icon: { theme in
return generateTintedImage(image: openIcon, color: theme.actionSheet.primaryTextColor)
}, action: { [weak self] _, f in
guard let self, let component = self.component, let environment = self.environment else {
return
}
guard let controller = environment.controller() as? VideoChatScreenV2Impl, let navigationController = controller.parentNavigationController else {
return
}
let context = component.call.accountContext
environment.controller()?.dismiss(completion: { [weak navigationController] in
Queue.mainQueue().after(0.3) {
guard let navigationController else {
return
}
context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: context, chatLocation: .peer(EnginePeer(peer)), keepStack: .always, purposefulAction: {}, peekData: nil))
}
})
f(.dismissWithoutContent)
})))
if (callState.canManageCall && !callState.adminIds.contains(peer.id)), peer.id != component.call.peerId {
items.append(.action(ContextMenuActionItem(text: environment.strings.VoiceChat_RemovePeer, textColor: .destructive, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Clear"), color: theme.actionSheet.destructiveActionTextColor)
}, action: { [weak self] c, _ in
c?.dismiss(completion: {
guard let self, let component = self.component else {
return
}
let _ = (component.call.accountContext.account.postbox.loadedPeerWithId(component.call.peerId)
|> deliverOnMainQueue).start(next: { [weak self] chatPeer in
guard let self, let component = self.component, let environment = self.environment else {
return
}
let presentationData = component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: environment.theme)
let actionSheet = ActionSheetController(presentationData: presentationData)
var items: [ActionSheetItem] = []
let nameDisplayOrder = presentationData.nameDisplayOrder
items.append(DeleteChatPeerActionSheetItem(context: component.call.accountContext, peer: EnginePeer(peer), chatPeer: EnginePeer(chatPeer), action: .removeFromGroup, strings: environment.strings, nameDisplayOrder: nameDisplayOrder))
items.append(ActionSheetButtonItem(title: environment.strings.VoiceChat_RemovePeerRemove, color: .destructive, action: { [weak self, weak actionSheet] in
actionSheet?.dismissAnimated()
guard let self, let component = self.component, let environment = self.environment else {
return
}
let _ = component.call.accountContext.peerChannelMemberCategoriesContextsManager.updateMemberBannedRights(engine: component.call.accountContext.engine, peerId: component.call.peerId, memberId: peer.id, bannedRights: TelegramChatBannedRights(flags: [.banReadMessages], untilDate: Int32.max)).start()
component.call.removedPeer(peer.id)
self.presentUndoOverlay(content: .banned(text: environment.strings.VoiceChat_RemovedPeerText(EnginePeer(peer).displayTitle(strings: environment.strings, displayOrder: nameDisplayOrder)).string), action: { _ in return false })
}))
actionSheet.setItemGroups([
ActionSheetItemGroup(items: items),
ActionSheetItemGroup(items: [
ActionSheetButtonItem(title: environment.strings.Common_Cancel, color: .accent, font: .bold, action: { [weak actionSheet] in
actionSheet?.dismissAnimated()
})
])
])
environment.controller()?.present(actionSheet, in: .window(.root))
})
})
})))
}
}
return items
}
let items = muteStatePromise.get()
|> map { muteState -> [ContextMenuItem] in
return itemsForEntry(muteState)
}
let presentationData = component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: environment.theme)
let contextController = ContextController(
presentationData: presentationData,
source: .extracted(ParticipantExtractedContentSource(contentView: sourceView)),
items: items |> map { items in
return ContextController.Items(content: .list(items))
},
recognizer: nil,
gesture: gesture
)
environment.controller()?.forEachController({ controller in
if let controller = controller as? UndoOverlayController {
controller.dismiss()
}
return true
})
environment.controller()?.presentInGlobalOverlay(contextController)
}
private func openAvatarForEditing(fromGallery: Bool = false, completion: @escaping () -> Void = {}) {
guard let component = self.component else {
return
}
guard let callState = self.callState else {
return
}
let peerId = callState.myPeerId
let _ = (component.call.accountContext.engine.data.get(
TelegramEngine.EngineData.Item.Peer.Peer(id: peerId),
TelegramEngine.EngineData.Item.Configuration.SearchBots()
)
|> deliverOnMainQueue).start(next: { [weak self] peer, searchBotsConfiguration in
guard let self, let component = self.component, let environment = self.environment else {
return
}
guard let peer else {
return
}
let presentationData = component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: environment.theme)
let legacyController = LegacyController(presentation: .custom, theme: environment.theme)
legacyController.statusBar.statusBarStyle = .Ignore
let emptyController = LegacyEmptyController(context: legacyController.context)!
let navigationController = makeLegacyNavigationController(rootController: emptyController)
navigationController.setNavigationBarHidden(true, animated: false)
navigationController.navigationBar.transform = CGAffineTransform(translationX: -1000.0, y: 0.0)
legacyController.bind(controller: navigationController)
self.endEditing(true)
environment.controller()?.present(legacyController, in: .window(.root))
var hasPhotos = false
if !peer.profileImageRepresentations.isEmpty {
hasPhotos = true
}
let mixin = TGMediaAvatarMenuMixin(context: legacyController.context, parentController: emptyController, hasSearchButton: true, hasDeleteButton: hasPhotos && !fromGallery, hasViewButton: false, personalPhoto: peerId.namespace == Namespaces.Peer.CloudUser, isVideo: false, saveEditedPhotos: false, saveCapturedMedia: false, signup: false, forum: false, title: nil, isSuggesting: false)!
mixin.forceDark = true
mixin.stickersContext = LegacyPaintStickersContext(context: component.call.accountContext)
let _ = self.currentAvatarMixin.swap(mixin)
mixin.requestSearchController = { [weak self] assetsController in
guard let self, let component = self.component, let environment = self.environment else {
return
}
let controller = WebSearchController(context: component.call.accountContext, peer: peer, chatLocation: nil, configuration: searchBotsConfiguration, mode: .avatar(initialQuery: peer.id.namespace == Namespaces.Peer.CloudUser ? nil : peer.displayTitle(strings: environment.strings, displayOrder: presentationData.nameDisplayOrder), completion: { [weak self] result in
assetsController?.dismiss()
guard let self else {
return
}
self.updateProfilePhoto(result)
}))
controller.navigationPresentation = .modal
environment.controller()?.push(controller)
if fromGallery {
completion()
}
}
mixin.didFinishWithImage = { [weak self] image in
if let image = image {
completion()
self?.updateProfilePhoto(image)
}
}
mixin.didFinishWithVideo = { [weak self] image, asset, adjustments in
if let image = image, let asset = asset {
completion()
self?.updateProfileVideo(image, asset: asset, adjustments: adjustments)
}
}
mixin.didFinishWithDelete = { [weak self] in
guard let self, let environment = self.environment else {
return
}
let proceed = { [weak self] in
guard let self, let component = self.component else {
return
}
let _ = self.currentAvatarMixin.swap(nil)
let postbox = component.call.accountContext.account.postbox
self.updateAvatarDisposable.set((component.call.accountContext.engine.peers.updatePeerPhoto(peerId: peerId, photo: nil, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: postbox, resource: resource, representations: representations)
})
|> deliverOnMainQueue).start())
}
let actionSheet = ActionSheetController(presentationData: presentationData)
let items: [ActionSheetItem] = [
ActionSheetButtonItem(title: environment.strings.Settings_RemoveConfirmation, color: .destructive, action: { [weak actionSheet] in
actionSheet?.dismissAnimated()
proceed()
})
]
actionSheet.setItemGroups([
ActionSheetItemGroup(items: items),
ActionSheetItemGroup(items: [
ActionSheetButtonItem(title: presentationData.strings.Common_Cancel, color: .accent, font: .bold, action: { [weak actionSheet] in
actionSheet?.dismissAnimated()
})
])
])
environment.controller()?.present(actionSheet, in: .window(.root))
}
mixin.didDismiss = { [weak self, weak legacyController] in
guard let self else {
return
}
let _ = self.currentAvatarMixin.swap(nil)
legacyController?.dismiss()
}
let menuController = mixin.present()
if let menuController = menuController {
menuController.customRemoveFromParentViewController = { [weak legacyController] in
legacyController?.dismiss()
}
}
})
}
private func updateProfilePhoto(_ image: UIImage) {
guard let component = self.component else {
return
}
guard let callState = self.callState else {
return
}
guard let data = image.jpegData(compressionQuality: 0.6) else {
return
}
let peerId = callState.myPeerId
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
component.call.account.postbox.mediaBox.storeResourceData(resource.id, data: data)
let representation = TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 640, height: 640), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)
self.currentUpdatingAvatar = (representation, 0.0)
let postbox = component.call.account.postbox
let signal = peerId.namespace == Namespaces.Peer.CloudUser ? component.call.accountContext.engine.accountData.updateAccountPhoto(resource: resource, videoResource: nil, videoStartTimestamp: nil, markup: nil, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: postbox, resource: resource, representations: representations)
}) : component.call.accountContext.engine.peers.updatePeerPhoto(peerId: peerId, photo: component.call.accountContext.engine.peers.uploadedPeerPhoto(resource: resource), mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: postbox, resource: resource, representations: representations)
})
self.updateAvatarDisposable.set((signal
|> deliverOnMainQueue).start(next: { [weak self] result in
guard let self else {
return
}
switch result {
case .complete:
self.currentUpdatingAvatar = nil
self.state?.updated(transition: .spring(duration: 0.4))
case let .progress(value):
self.currentUpdatingAvatar = (representation, value)
}
}))
self.state?.updated(transition: .spring(duration: 0.4))
}
private func updateProfileVideo(_ image: UIImage, asset: Any?, adjustments: TGVideoEditAdjustments?) {
guard let component = self.component else {
return
}
guard let callState = self.callState else {
return
}
guard let data = image.jpegData(compressionQuality: 0.6) else {
return
}
let peerId = callState.myPeerId
let photoResource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
component.call.accountContext.account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
let representation = TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 640, height: 640), resource: photoResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)
self.currentUpdatingAvatar = (representation, 0.0)
var videoStartTimestamp: Double? = nil
if let adjustments = adjustments, adjustments.videoStartValue > 0.0 {
videoStartTimestamp = adjustments.videoStartValue - adjustments.trimStartValue
}
let context = component.call.accountContext
let account = context.account
let signal = Signal<TelegramMediaResource, UploadPeerPhotoError> { [weak self] subscriber in
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(postbox: account.postbox, adjustments: adjustments)
} else {
return nil
}
}
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let uploadInterface = LegacyLiveUploadInterface(context: context)
let signal: SSignal
if let url = asset as? URL, url.absoluteString.hasSuffix(".jpg"), let data = try? Data(contentsOf: url, options: [.mappedRead]), let image = UIImage(data: data), let entityRenderer = entityRenderer {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber.putNext(duration)
subscriber.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
signal = durationSignal.map(toSignal: { duration -> SSignal in
if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: nil, entityRenderer: entityRenderer)!
} else {
return SSignal.single(nil)
}
})
} else if let asset = asset as? AVAsset {
signal = TGMediaVideoConverter.convert(asset, adjustments: adjustments, path: tempFile.path, watcher: uploadInterface, entityRenderer: entityRenderer)!
} else {
signal = SSignal.complete()
}
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
if let image = result.coverImage, let data = image.jpegData(compressionQuality: 0.7) {
account.postbox.mediaBox.storeResourceData(photoResource.id, data: data)
}
if let timestamp = videoStartTimestamp {
videoStartTimestamp = max(0.0, min(timestamp, result.duration - 0.05))
}
var value = stat()
if stat(result.fileURL.path, &value) == 0 {
if let data = try? Data(contentsOf: result.fileURL) {
let resource: TelegramMediaResource
if let liveUploadData = result.liveUploadData as? LegacyLiveUploadInterfaceResult {
resource = LocalFileMediaResource(fileId: liveUploadData.id)
} else {
resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
}
account.postbox.mediaBox.storeResourceData(resource.id, data: data, synchronous: true)
subscriber.putNext(resource)
EngineTempBox.shared.dispose(tempFile)
}
}
subscriber.putCompletion()
} else if let progress = next as? NSNumber {
Queue.mainQueue().async { [weak self] in
guard let self else {
return
}
self.currentUpdatingAvatar = (representation, Float(truncating: progress) * 0.25)
self.state?.updated(transition: .spring(duration: 0.4))
}
}
}, error: { _ in
}, completed: nil)
let disposable = ActionDisposable {
signalDisposable?.dispose()
}
return ActionDisposable {
disposable.dispose()
}
}
self.updateAvatarDisposable.set((signal
|> mapToSignal { videoResource -> Signal<UpdatePeerPhotoStatus, UploadPeerPhotoError> in
if peerId.namespace == Namespaces.Peer.CloudUser {
return context.engine.accountData.updateAccountPhoto(resource: photoResource, videoResource: videoResource, videoStartTimestamp: videoStartTimestamp, markup: nil, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: account.postbox, resource: resource, representations: representations)
})
} else {
return context.engine.peers.updatePeerPhoto(peerId: peerId, photo: context.engine.peers.uploadedPeerPhoto(resource: photoResource), video: context.engine.peers.uploadedPeerVideo(resource: videoResource) |> map(Optional.init), videoStartTimestamp: videoStartTimestamp, mapResourceToAvatarSizes: { resource, representations in
return mapResourceToAvatarSizes(postbox: account.postbox, resource: resource, representations: representations)
})
}
}
|> deliverOnMainQueue).start(next: { [weak self] result in
guard let self else {
return
}
switch result {
case .complete:
self.currentUpdatingAvatar = nil
self.state?.updated(transition: .spring(duration: 0.4))
case let .progress(value):
self.currentUpdatingAvatar = (representation, 0.25 + value * 0.75)
self.state?.updated(transition: .spring(duration: 0.4))
}
}))
}
}
private final class ParticipantExtractedContentSource: ContextExtractedContentSource {
let keepInPlace: Bool = false
let ignoreContentTouches: Bool = false
let blurBackground: Bool = true
private let contentView: ContextExtractedContentContainingView
init(contentView: ContextExtractedContentContainingView) {
self.contentView = contentView
}
func takeView() -> ContextControllerTakeViewInfo? {
return ContextControllerTakeViewInfo(containingItem: .view(self.contentView), contentAreaInScreenSpace: UIScreen.main.bounds)
}
func putBack() -> ContextControllerPutBackViewInfo? {
return ContextControllerPutBackViewInfo(contentAreaInScreenSpace: UIScreen.main.bounds)
}
}

View File

@ -18,7 +18,7 @@ extension BotInfo {
switch apiBotInfo {
case let .botInfo(_, _, description, descriptionPhoto, descriptionDocument, apiCommands, apiMenuButton, privacyPolicyUrl):
let photo: TelegramMediaImage? = descriptionPhoto.flatMap(telegramMediaImageFromApiPhoto)
let video: TelegramMediaFile? = descriptionDocument.flatMap(telegramMediaFileFromApiDocument)
let video: TelegramMediaFile? = descriptionDocument.flatMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }
var commands: [BotCommand] = []
if let apiCommands = apiCommands {
commands = apiCommands.map { command in

View File

@ -598,7 +598,7 @@ extension ChatContextResult {
if let photo = photo, let parsedImage = telegramMediaImageFromApiPhoto(photo) {
image = parsedImage
}
if let document = document, let parsedFile = telegramMediaFileFromApiDocument(document) {
if let document = document, let parsedFile = telegramMediaFileFromApiDocument(document, altDocuments: []) {
file = parsedFile
}
self = .internalReference(ChatContextResult.InternalReference(queryId: queryId, id: id, type: type, title: title, description: description, image: image, file: file, message: ChatContextResultMessage(apiMessage: sendMessage)))

View File

@ -192,7 +192,7 @@ extension InstantPage {
}
}
for file in files {
if let file = telegramMediaFileFromApiDocument(file), let id = file.id {
if let file = telegramMediaFileFromApiDocument(file, altDocuments: []), let id = file.id {
media[id] = file
}
}

View File

@ -89,6 +89,9 @@ extension ReplyMarkupButton {
))
}
self.init(title: text, titleWhenForwarded: nil, action: .requestPeer(peerType: mappedPeerType, buttonId: buttonId, maxQuantity: maxQuantity))
case let .keyboardButtonCopy(text, _):
//TODO:release
self.init(title: text, titleWhenForwarded: nil, action: .text)
}
}
}

View File

@ -50,7 +50,7 @@ public func tagsForStoreMessage(incoming: Bool, attributes: [MessageAttribute],
var isAnimated = false
inner: for attribute in file.attributes {
switch attribute {
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(.instantRoundVideo) {
refinedTag = .voiceOrInstantVideo
} else {
@ -350,9 +350,9 @@ func textMediaAndExpirationTimerFromApiMedia(_ media: Api.MessageMedia?, _ peerI
case let .messageMediaGeoLive(_, geo, heading, period, proximityNotificationRadius):
let mediaMap = telegramMediaMapFromApiGeoPoint(geo, title: nil, address: nil, provider: nil, venueId: nil, venueType: nil, liveBroadcastingTimeout: period, liveProximityNotificationRadius: proximityNotificationRadius, heading: heading)
return (mediaMap, nil, nil, nil, nil)
case let .messageMediaDocument(flags, document, _, ttlSeconds):
case let .messageMediaDocument(flags, document, altDocuments, ttlSeconds):
if let document = document {
if let mediaFile = telegramMediaFileFromApiDocument(document) {
if let mediaFile = telegramMediaFileFromApiDocument(document, altDocuments: altDocuments) {
return (mediaFile, ttlSeconds, (flags & (1 << 3)) != 0, (flags & (1 << 4)) != 0, nil)
}
} else {

View File

@ -6,7 +6,7 @@ import TelegramApi
func dimensionsForFileAttributes(_ attributes: [TelegramMediaFileAttribute]) -> PixelDimensions? {
for attribute in attributes {
switch attribute {
case let .Video(_, size, _, _, _):
case let .Video(_, size, _, _, _, _):
return size
case let .ImageSize(size):
return size
@ -20,7 +20,7 @@ func dimensionsForFileAttributes(_ attributes: [TelegramMediaFileAttribute]) ->
func durationForFileAttributes(_ attributes: [TelegramMediaFileAttribute]) -> Double? {
for attribute in attributes {
switch attribute {
case let .Video(duration, _, _, _, _):
case let .Video(duration, _, _, _, _, _):
return duration
case let .Audio(_, duration, _, _, _):
return Double(duration)
@ -99,7 +99,7 @@ func telegramMediaFileAttributesFromApiAttributes(_ attributes: [Api.DocumentAtt
result.append(.ImageSize(size: PixelDimensions(width: w, height: h)))
case .documentAttributeAnimated:
result.append(.Animated)
case let .documentAttributeVideo(flags, duration, w, h, preloadSize, videoStart):
case let .documentAttributeVideo(flags, duration, w, h, preloadSize, videoStart, videoCodec):
var videoFlags = TelegramMediaVideoFlags()
if (flags & (1 << 0)) != 0 {
videoFlags.insert(.instantRoundVideo)
@ -110,7 +110,7 @@ func telegramMediaFileAttributesFromApiAttributes(_ attributes: [Api.DocumentAtt
if (flags & (1 << 3)) != 0 {
videoFlags.insert(.isSilent)
}
result.append(.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: preloadSize, coverTime: videoStart))
result.append(.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: preloadSize, coverTime: videoStart, videoCodec: videoCodec))
case let .documentAttributeAudio(flags, duration, title, performer, waveform):
let isVoice = (flags & (1 << 10)) != 0
let waveformBuffer: Data? = waveform?.makeData()
@ -158,7 +158,7 @@ func telegramMediaFileThumbnailRepresentationsFromApiSizes(datacenterId: Int32,
return (immediateThumbnailData, representations)
}
func telegramMediaFileFromApiDocument(_ document: Api.Document) -> TelegramMediaFile? {
func telegramMediaFileFromApiDocument(_ document: Api.Document, altDocuments: [Api.Document]?) -> TelegramMediaFile? {
switch document {
case let .document(_, id, accessHash, fileReference, _, mimeType, size, thumbs, videoThumbs, dcId, attributes):
var parsedAttributes = telegramMediaFileAttributesFromApiAttributes(attributes)
@ -182,8 +182,13 @@ func telegramMediaFileFromApiDocument(_ document: Api.Document) -> TelegramMedia
}
}
}
var alternativeRepresentations: [Media] = []
if let altDocuments {
alternativeRepresentations = altDocuments.compactMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }
}
return TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: size, fileReference: fileReference.makeData(), fileName: fileNameFromFileAttributes(parsedAttributes)), previewRepresentations: previewRepresentations, videoThumbnails: videoThumbnails, immediateThumbnailData: immediateThumbnail, mimeType: mimeType, size: size, attributes: parsedAttributes)
return TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: size, fileReference: fileReference.makeData(), fileName: fileNameFromFileAttributes(parsedAttributes)), previewRepresentations: previewRepresentations, videoThumbnails: videoThumbnails, immediateThumbnailData: immediateThumbnail, mimeType: mimeType, size: size, attributes: parsedAttributes, alternativeRepresentations: alternativeRepresentations)
case .documentEmpty:
return nil
}

View File

@ -9,7 +9,7 @@ extension TelegramMediaGame {
case let .game(_, id, accessHash, shortName, title, description, photo, document):
var file: TelegramMediaFile?
if let document = document {
file = telegramMediaFileFromApiDocument(document)
file = telegramMediaFileFromApiDocument(document, altDocuments: [])
}
self.init(gameId: id, accessHash: accessHash, name: shortName, title: title, description: description, image: telegramMediaImageFromApiPhoto(photo), file: file)
}

View File

@ -9,7 +9,7 @@ func telegramMediaWebpageAttributeFromApiWebpageAttribute(_ attribute: Api.WebPa
case let .webPageAttributeTheme(_, documents, settings):
var files: [TelegramMediaFile] = []
if let documents = documents {
files = documents.compactMap { telegramMediaFileFromApiDocument($0) }
files = documents.compactMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }
}
return .theme(TelegraMediaWebpageThemeAttribute(files: files, settings: settings.flatMap { TelegramThemeSettings(apiThemeSettings: $0) }))
case let .webPageAttributeStickerSet(apiFlags, stickers):
@ -21,7 +21,7 @@ func telegramMediaWebpageAttributeFromApiWebpageAttribute(_ attribute: Api.WebPa
flags.insert(.isTemplate)
}
var files: [TelegramMediaFile] = []
files = stickers.compactMap { telegramMediaFileFromApiDocument($0) }
files = stickers.compactMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }
return .stickerPack(TelegramMediaWebpageStickerPackAttribute(flags: flags, files: files))
case .webPageAttributeStory:
return nil
@ -50,7 +50,7 @@ func telegramMediaWebpageFromApiWebpage(_ webpage: Api.WebPage) -> TelegramMedia
}
var file: TelegramMediaFile?
if let document = document {
file = telegramMediaFileFromApiDocument(document)
file = telegramMediaFileFromApiDocument(document, altDocuments: [])
}
var story: TelegramMediaStory?
var webpageAttributes: [TelegramMediaWebpageAttribute] = []

View File

@ -8,7 +8,7 @@ extension TelegramTheme {
convenience init(apiTheme: Api.Theme) {
switch apiTheme {
case let .theme(flags, id, accessHash, slug, title, document, settings, emoticon, installCount):
self.init(id: id, accessHash: accessHash, slug: slug, emoticon: emoticon, title: title, file: document.flatMap { telegramMediaFileFromApiDocument($0) }, settings: settings?.compactMap(TelegramThemeSettings.init(apiThemeSettings:)), isCreator: (flags & 1 << 0) != 0, isDefault: (flags & 1 << 1) != 0, installCount: installCount)
self.init(id: id, accessHash: accessHash, slug: slug, emoticon: emoticon, title: title, file: document.flatMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }, settings: settings?.compactMap(TelegramThemeSettings.init(apiThemeSettings:)), isCreator: (flags & 1 << 0) != 0, isDefault: (flags & 1 << 1) != 0, installCount: installCount)
}
}
}

View File

@ -67,7 +67,7 @@ extension TelegramWallpaper {
init(apiWallpaper: Api.WallPaper) {
switch apiWallpaper {
case let .wallPaper(id, flags, accessHash, slug, document, settings):
if let file = telegramMediaFileFromApiDocument(document) {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []) {
let wallpaperSettings: WallpaperSettings
if let settings = settings {
wallpaperSettings = WallpaperSettings(apiWallpaperSettings: settings)

View File

@ -13,12 +13,12 @@ extension MediaResourceReference {
}
}
final class TelegramCloudMediaResourceFetchInfo: MediaResourceFetchInfo {
let reference: MediaResourceReference
let preferBackgroundReferenceRevalidation: Bool
let continueInBackground: Bool
public final class TelegramCloudMediaResourceFetchInfo: MediaResourceFetchInfo {
public let reference: MediaResourceReference
public let preferBackgroundReferenceRevalidation: Bool
public let continueInBackground: Bool
init(reference: MediaResourceReference, preferBackgroundReferenceRevalidation: Bool, continueInBackground: Bool) {
public init(reference: MediaResourceReference, preferBackgroundReferenceRevalidation: Bool, continueInBackground: Bool) {
self.reference = reference
self.preferBackgroundReferenceRevalidation = preferBackgroundReferenceRevalidation
self.continueInBackground = continueInBackground
@ -493,7 +493,7 @@ final class MediaReferenceRevalidationContext {
return .fail(.generic)
}
for document in result {
if let file = telegramMediaFileFromApiDocument(document) {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []) {
return .single(file)
}
}
@ -956,9 +956,12 @@ func revalidateMediaResourceReference(accountPeerId: PeerId, postbox: Postbox, n
}
if let updatedResource = findUpdatedMediaResource(media: media, previousMedia: nil, resource: resource) {
return .single(RevalidatedMediaResource(updatedResource: updatedResource, updatedReference: nil))
} else if let alternativeMedia = item.alternativeMedia, let updatedResource = findUpdatedMediaResource(media: alternativeMedia, previousMedia: nil, resource: resource) {
return .single(RevalidatedMediaResource(updatedResource: updatedResource, updatedReference: nil))
} else {
for alternativeMediaValue in item.alternativeMediaList {
if let updatedResource = findUpdatedMediaResource(media: alternativeMediaValue, previousMedia: nil, resource: resource) {
return .single(RevalidatedMediaResource(updatedResource: updatedResource, updatedReference: nil))
}
}
return .fail(.generic)
}
}

View File

@ -205,7 +205,7 @@ func augmentMediaWithReference(_ mediaReference: AnyMediaReference) -> Media {
private func convertForwardedMediaForSecretChat(_ media: Media) -> Media {
if let file = media as? TelegramMediaFile {
return TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: file.partialReference, resource: file.resource, previewRepresentations: file.previewRepresentations, videoThumbnails: file.videoThumbnails, immediateThumbnailData: file.immediateThumbnailData, mimeType: file.mimeType, size: file.size, attributes: file.attributes)
return TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: file.partialReference, resource: file.resource, previewRepresentations: file.previewRepresentations, videoThumbnails: file.videoThumbnails, immediateThumbnailData: file.immediateThumbnailData, mimeType: file.mimeType, size: file.size, attributes: file.attributes, alternativeRepresentations: [])
} else if let image = media as? TelegramMediaImage {
return TelegramMediaImage(imageId: MediaId(namespace: Namespaces.Media.LocalImage, id: Int64.random(in: Int64.min ... Int64.max)), representations: image.representations, immediateThumbnailData: image.immediateThumbnailData, reference: image.reference, partialReference: image.partialReference, flags: [])
} else {

View File

@ -703,7 +703,7 @@ func inputDocumentAttributesFromFileAttributes(_ fileAttributes: [TelegramMediaF
attributes.append(.documentAttributeSticker(flags: flags, alt: displayText, stickerset: stickerSet, maskCoords: inputMaskCoords))
case .HasLinkedStickers:
attributes.append(.documentAttributeHasStickers)
case let .Video(duration, size, videoFlags, preloadSize, coverTime):
case let .Video(duration, size, videoFlags, preloadSize, coverTime, videoCodec):
var flags: Int32 = 0
if videoFlags.contains(.instantRoundVideo) {
flags |= (1 << 0)
@ -720,7 +720,10 @@ func inputDocumentAttributesFromFileAttributes(_ fileAttributes: [TelegramMediaF
if let coverTime = coverTime, coverTime > 0.0 {
flags |= (1 << 4)
}
attributes.append(.documentAttributeVideo(flags: flags, duration: duration, w: Int32(size.width), h: Int32(size.height), preloadPrefixSize: preloadSize, videoStartTs: coverTime))
if videoCodec != nil {
flags |= (1 << 5)
}
attributes.append(.documentAttributeVideo(flags: flags, duration: duration, w: Int32(size.width), h: Int32(size.height), preloadPrefixSize: preloadSize, videoStartTs: coverTime, videoCodec: videoCodec))
case let .Audio(isVoice, duration, title, performer, waveform):
var flags: Int32 = 0
if isVoice {
@ -790,7 +793,7 @@ public func statsCategoryForFileWithAttributes(_ attributes: [TelegramMediaFileA
} else {
return .audio
}
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(TelegramMediaVideoFlags.instantRoundVideo) {
return .voiceMessages
} else {
@ -1065,8 +1068,8 @@ private func uploadedMediaFileContent(network: Network, postbox: Postbox, auxili
|> mapError { _ -> PendingMessageUploadError in return .generic }
|> mapToSignal { result -> Signal<PendingMessageUploadedContentResult, PendingMessageUploadError> in
switch result {
case let .messageMediaDocument(_, document, _, _):
if let document = document, let mediaFile = telegramMediaFileFromApiDocument(document), let resource = mediaFile.resource as? CloudDocumentMediaResource, let fileReference = resource.fileReference {
case let .messageMediaDocument(_, document, altDocuments, _):
if let document = document, let mediaFile = telegramMediaFileFromApiDocument(document, altDocuments: altDocuments), let resource = mediaFile.resource as? CloudDocumentMediaResource, let fileReference = resource.fileReference {
var flags: Int32 = 0
var ttlSeconds: Int32?
if let autoclearMessageAttribute = autoclearMessageAttribute {

View File

@ -162,9 +162,9 @@ public func standaloneUploadedFile(postbox: Postbox, network: Network, peerId: P
|> mapError { _ -> StandaloneUploadMediaError in return .generic }
|> mapToSignal { media -> Signal<StandaloneUploadMediaEvent, StandaloneUploadMediaError> in
switch media {
case let .messageMediaDocument(_, document, _, _):
case let .messageMediaDocument(_, document, altDocuments, _):
if let document = document {
if let mediaFile = telegramMediaFileFromApiDocument(document) {
if let mediaFile = telegramMediaFileFromApiDocument(document, altDocuments: altDocuments) {
return .single(.result(.media(.standalone(media: mediaFile))))
}
}
@ -194,7 +194,7 @@ public func standaloneUploadedFile(postbox: Postbox, network: Network, peerId: P
|> mapToSignal { result -> Signal<StandaloneUploadMediaEvent, StandaloneUploadMediaError> in
switch result {
case let .encryptedFile(id, accessHash, size, dcId, _):
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: SecretFileMediaResource(fileId: id, accessHash: accessHash, containerSize: size, decryptedSize: size, datacenterId: Int(dcId), key: key), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: size, attributes: attributes)
let media = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: SecretFileMediaResource(fileId: id, accessHash: accessHash, containerSize: size, decryptedSize: size, datacenterId: Int(dcId), key: key), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: size, attributes: attributes, alternativeRepresentations: [])
return .single(.result(.media(.standalone(media: media))))
case .encryptedFileEmpty:

View File

@ -4762,7 +4762,7 @@ func replayFinalState(
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -4796,7 +4796,7 @@ func replayFinalState(
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -4980,7 +4980,7 @@ func replayFinalState(
}
for apiDocument in documents {
if let file = telegramMediaFileFromApiDocument(apiDocument), let id = file.id {
if let file = telegramMediaFileFromApiDocument(apiDocument, altDocuments: []), let id = file.id {
let fileIndexKeys: [MemoryBuffer]
if let indexKeys = indexKeysByFile[id] {
fileIndexKeys = indexKeys

View File

@ -1196,7 +1196,7 @@ public final class AccountViewTracker {
switch result {
case let .stickerSet(_, _, _, documents)?:
for document in documents {
if let file = telegramMediaFileFromApiDocument(document) {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []) {
if transaction.getMedia(file.fileId) != nil {
let _ = transaction.updateMedia(file.fileId, update: file)
}

View File

@ -216,7 +216,7 @@ func managedSynchronizeAvailableMessageEffects(postbox: Postbox, network: Networ
case let .availableEffects(hash, effects, documents):
var files: [Int64: TelegramMediaFile] = [:]
for document in documents {
if let file = telegramMediaFileFromApiDocument(document) {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []) {
files[file.fileId.id] = file
}
}

View File

@ -22,7 +22,8 @@ private func generateStarsReactionFile(kind: Int, isAnimatedSticker: Bool) -> Te
immediateThumbnailData: nil,
mimeType: isAnimatedSticker ? "application/x-tgsticker" : "image/webp",
size: nil,
attributes: attributes
attributes: attributes,
alternativeRepresentations: []
)
}
@ -261,23 +262,23 @@ private extension AvailableReactions.Reaction {
convenience init?(apiReaction: Api.AvailableReaction) {
switch apiReaction {
case let .availableReaction(flags, reaction, title, staticIcon, appearAnimation, selectAnimation, activateAnimation, effectAnimation, aroundAnimation, centerIcon):
guard let staticIconFile = telegramMediaFileFromApiDocument(staticIcon) else {
guard let staticIconFile = telegramMediaFileFromApiDocument(staticIcon, altDocuments: []) else {
return nil
}
guard let appearAnimationFile = telegramMediaFileFromApiDocument(appearAnimation) else {
guard let appearAnimationFile = telegramMediaFileFromApiDocument(appearAnimation, altDocuments: []) else {
return nil
}
guard let selectAnimationFile = telegramMediaFileFromApiDocument(selectAnimation) else {
guard let selectAnimationFile = telegramMediaFileFromApiDocument(selectAnimation, altDocuments: []) else {
return nil
}
guard let activateAnimationFile = telegramMediaFileFromApiDocument(activateAnimation) else {
guard let activateAnimationFile = telegramMediaFileFromApiDocument(activateAnimation, altDocuments: []) else {
return nil
}
guard let effectAnimationFile = telegramMediaFileFromApiDocument(effectAnimation) else {
guard let effectAnimationFile = telegramMediaFileFromApiDocument(effectAnimation, altDocuments: []) else {
return nil
}
let aroundAnimationFile = aroundAnimation.flatMap { telegramMediaFileFromApiDocument($0) }
let centerAnimationFile = centerIcon.flatMap { telegramMediaFileFromApiDocument($0) }
let aroundAnimationFile = aroundAnimation.flatMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }
let centerAnimationFile = centerIcon.flatMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }
let isEnabled = (flags & (1 << 0)) == 0
let isPremium = (flags & (1 << 2)) != 0
self.init(

View File

@ -170,7 +170,7 @@ func resolveUnknownEmojiFiles<T>(postbox: Postbox, source: FetchMessageHistoryHo
for documentSet in documentSets {
if let documentSet = documentSet {
for document in documentSet {
if let file = telegramMediaFileFromApiDocument(document) {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []) {
transaction.storeMediaIfNotPresent(media: file)
}
}

View File

@ -65,7 +65,7 @@ private extension PremiumPromoConfiguration {
var videos: [String: TelegramMediaFile] = [:]
for (key, document) in zip(videoSections, videoFiles) {
if let file = telegramMediaFileFromApiDocument(document) {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []) {
videos[key] = file
}
}

View File

@ -53,7 +53,7 @@ func managedRecentStickers(postbox: Postbox, network: Network, forceFetch: Bool
case let .recentStickers(_, _, stickers, _):
var items: [OrderedItemListEntry] = []
for sticker in stickers {
if let file = telegramMediaFileFromApiDocument(sticker), let id = file.id {
if let file = telegramMediaFileFromApiDocument(sticker, altDocuments: []), let id = file.id {
if let entry = CodableEntry(RecentMediaItem(file)) {
items.append(OrderedItemListEntry(id: RecentMediaItemId(id).rawValue, contents: entry))
}
@ -76,7 +76,7 @@ func managedRecentGifs(postbox: Postbox, network: Network, forceFetch: Bool = fa
case let .savedGifs(_, gifs):
var items: [OrderedItemListEntry] = []
for gif in gifs {
if let file = telegramMediaFileFromApiDocument(gif), let id = file.id {
if let file = telegramMediaFileFromApiDocument(gif, altDocuments: []), let id = file.id {
if let entry = CodableEntry(RecentMediaItem(file)) {
items.append(OrderedItemListEntry(id: RecentMediaItemId(id).rawValue, contents: entry))
}
@ -114,7 +114,7 @@ func managedSavedStickers(postbox: Postbox, network: Network, forceFetch: Bool =
var items: [OrderedItemListEntry] = []
for sticker in stickers {
if let file = telegramMediaFileFromApiDocument(sticker), let id = file.id {
if let file = telegramMediaFileFromApiDocument(sticker, altDocuments: []), let id = file.id {
var stringRepresentations: [String] = []
if let representations = fileStringRepresentations[id] {
stringRepresentations = representations
@ -141,7 +141,7 @@ func managedGreetingStickers(postbox: Postbox, network: Network) -> Signal<Void,
case let .stickers(_, stickers):
var items: [OrderedItemListEntry] = []
for sticker in stickers {
if let file = telegramMediaFileFromApiDocument(sticker), let id = file.id {
if let file = telegramMediaFileFromApiDocument(sticker, altDocuments: []), let id = file.id {
if let entry = CodableEntry(RecentMediaItem(file)) {
items.append(OrderedItemListEntry(id: RecentMediaItemId(id).rawValue, contents: entry))
}
@ -165,7 +165,7 @@ func managedPremiumStickers(postbox: Postbox, network: Network) -> Signal<Void,
case let .stickers(_, stickers):
var items: [OrderedItemListEntry] = []
for sticker in stickers {
if let file = telegramMediaFileFromApiDocument(sticker), let id = file.id {
if let file = telegramMediaFileFromApiDocument(sticker, altDocuments: []), let id = file.id {
if let entry = CodableEntry(RecentMediaItem(file)) {
items.append(OrderedItemListEntry(id: RecentMediaItemId(id).rawValue, contents: entry))
}
@ -189,7 +189,7 @@ func managedAllPremiumStickers(postbox: Postbox, network: Network) -> Signal<Voi
case let .stickers(_, stickers):
var items: [OrderedItemListEntry] = []
for sticker in stickers {
if let file = telegramMediaFileFromApiDocument(sticker), let id = file.id {
if let file = telegramMediaFileFromApiDocument(sticker, altDocuments: []), let id = file.id {
if let entry = CodableEntry(RecentMediaItem(file)) {
items.append(OrderedItemListEntry(id: RecentMediaItemId(id).rawValue, contents: entry))
}

View File

@ -553,7 +553,7 @@ private func decryptedAttributes46(_ attributes: [TelegramMediaFileAttribute], t
result.append(.documentAttributeSticker(alt: displayText, stickerset: stickerSet))
case let .ImageSize(size):
result.append(.documentAttributeImageSize(w: Int32(size.width), h: Int32(size.height)))
case let .Video(duration, size, _, _, _):
case let .Video(duration, size, _, _, _, _):
result.append(.documentAttributeVideo(duration: Int32(duration), w: Int32(size.width), h: Int32(size.height)))
case let .Audio(isVoice, duration, title, performer, waveform):
var flags: Int32 = 0
@ -612,7 +612,7 @@ private func decryptedAttributes73(_ attributes: [TelegramMediaFileAttribute], t
result.append(.documentAttributeSticker(alt: displayText, stickerset: stickerSet))
case let .ImageSize(size):
result.append(.documentAttributeImageSize(w: Int32(size.width), h: Int32(size.height)))
case let .Video(duration, size, videoFlags, _, _):
case let .Video(duration, size, videoFlags, _, _, _):
var flags: Int32 = 0
if videoFlags.contains(.instantRoundVideo) {
flags |= 1 << 0
@ -675,7 +675,7 @@ private func decryptedAttributes101(_ attributes: [TelegramMediaFileAttribute],
result.append(.documentAttributeSticker(alt: displayText, stickerset: stickerSet))
case let .ImageSize(size):
result.append(.documentAttributeImageSize(w: Int32(size.width), h: Int32(size.height)))
case let .Video(duration, size, videoFlags, _, _):
case let .Video(duration, size, videoFlags, _, _, _):
var flags: Int32 = 0
if videoFlags.contains(.instantRoundVideo) {
flags |= 1 << 0
@ -738,7 +738,7 @@ private func decryptedAttributes144(_ attributes: [TelegramMediaFileAttribute],
result.append(.documentAttributeSticker(alt: displayText, stickerset: stickerSet))
case let .ImageSize(size):
result.append(.documentAttributeImageSize(w: Int32(size.width), h: Int32(size.height)))
case let .Video(duration, size, videoFlags, _, _):
case let .Video(duration, size, videoFlags, _, _, _):
var flags: Int32 = 0
if videoFlags.contains(.instantRoundVideo) {
flags |= 1 << 0
@ -1810,7 +1810,7 @@ private func sendMessage(auxiliaryMethods: AccountAuxiliaryMethods, postbox: Pos
if let fromMedia = currentMessage.media.first, let encryptedFile = encryptedFile, let file = file {
var toMedia: Media?
if let fromMedia = fromMedia as? TelegramMediaFile {
let updatedFile = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: encryptedFile.id), partialReference: nil, resource: SecretFileMediaResource(fileId: encryptedFile.id, accessHash: encryptedFile.accessHash, containerSize: encryptedFile.size, decryptedSize: file.size, datacenterId: Int(encryptedFile.datacenterId), key: file.key), previewRepresentations: fromMedia.previewRepresentations, videoThumbnails: fromMedia.videoThumbnails, immediateThumbnailData: fromMedia.immediateThumbnailData, mimeType: fromMedia.mimeType, size: fromMedia.size, attributes: fromMedia.attributes)
let updatedFile = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: encryptedFile.id), partialReference: nil, resource: SecretFileMediaResource(fileId: encryptedFile.id, accessHash: encryptedFile.accessHash, containerSize: encryptedFile.size, decryptedSize: file.size, datacenterId: Int(encryptedFile.datacenterId), key: file.key), previewRepresentations: fromMedia.previewRepresentations, videoThumbnails: fromMedia.videoThumbnails, immediateThumbnailData: fromMedia.immediateThumbnailData, mimeType: fromMedia.mimeType, size: fromMedia.size, attributes: fromMedia.attributes, alternativeRepresentations: fromMedia.alternativeRepresentations)
toMedia = updatedFile
updatedMedia = [updatedFile]
}
@ -1947,7 +1947,8 @@ private func sendStandaloneMessage(auxiliaryMethods: AccountAuxiliaryMethods, po
immediateThumbnailData: file.immediateThumbnailData,
mimeType: file.mimeType,
size: file.size,
attributes: file.attributes
attributes: file.attributes,
alternativeRepresentations: file.alternativeRepresentations
)
updatedMedia.append(updatedFile)
} else if let image = item as? TelegramMediaImage, let encryptedFile = encryptedFile, let sourceFile = contents.file, let representation = image.representations.last {

View File

@ -183,7 +183,7 @@ private func fetchStickerPack(network: Network, info: StickerPackCollectionInfo)
}
for apiDocument in documents {
if let file = telegramMediaFileFromApiDocument(apiDocument), let id = file.id {
if let file = telegramMediaFileFromApiDocument(apiDocument, altDocuments: []), let id = file.id {
let fileIndexKeys: [MemoryBuffer]
if let indexKeys = indexKeysByFile[id] {
fileIndexKeys = indexKeys

View File

@ -610,7 +610,7 @@ extension TelegramMediaFileAttribute {
}
self = .Sticker(displayText: alt, packReference: packReference, maskData: nil)
case let .documentAttributeVideo(duration, w, h):
self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil)
self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)
}
}
}
@ -642,7 +642,7 @@ extension TelegramMediaFileAttribute {
if (flags & (1 << 0)) != 0 {
videoFlags.insert(.instantRoundVideo)
}
self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil)
self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil, videoCodec: nil)
}
}
}
@ -674,7 +674,7 @@ extension TelegramMediaFileAttribute {
if (flags & (1 << 0)) != 0 {
videoFlags.insert(.instantRoundVideo)
}
self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil)
self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil, videoCodec: nil)
}
}
}
@ -706,7 +706,7 @@ extension TelegramMediaFileAttribute {
if (flags & (1 << 0)) != 0 {
videoFlags.insert(.instantRoundVideo)
}
self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil)
self = .Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: videoFlags, preloadSize: nil, coverTime: nil, videoCodec: nil)
}
}
}
@ -793,7 +793,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
}
case let .decryptedMessageMediaAudio(duration, mimeType, size, key, iv):
if let file = file {
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: [TelegramMediaFileAttribute.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: nil)])
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: [TelegramMediaFileAttribute.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: nil)], alternativeRepresentations: [])
parsedMedia.append(fileMedia)
}
case let .decryptedMessageMediaDocument(thumb, thumbW, thumbH, mimeType, size, key, iv, attributes, caption):
@ -813,7 +813,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: thumbW, height: thumbH), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
resources.append((resource, thumb.makeData()))
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
}
case let .decryptedMessageMediaVideo(thumb, thumbW, thumbH, duration, mimeType, w, h, size, key, iv, caption):
@ -821,14 +821,14 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
text = caption
}
if let file = file {
let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil), .FileName(fileName: "video.mov")]
let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil), .FileName(fileName: "video.mov")]
var previewRepresentations: [TelegramMediaImageRepresentation] = []
if thumb.size != 0 {
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: thumbW, height: thumbH), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
resources.append((resource, thumb.makeData()))
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
}
case let .decryptedMessageMediaExternalDocument(id, accessHash, _, mimeType, size, thumb, dcId, attributes):
@ -861,7 +861,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
default:
break
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: Int64(size), fileReference: nil, fileName: nil), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: Int64(size), fileReference: nil, fileName: nil), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
case let .decryptedMessageMediaWebPage(url):
parsedMedia.append(TelegramMediaWebpage(webpageId: MediaId(namespace: Namespaces.Media.LocalWebpage, id: Int64.random(in: Int64.min ... Int64.max)), content: .Pending(0, url)))
@ -995,7 +995,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
}
case let .decryptedMessageMediaAudio(duration, mimeType, size, key, iv):
if let file = file {
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: [TelegramMediaFileAttribute.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: nil)])
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: [TelegramMediaFileAttribute.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: nil)], alternativeRepresentations: [])
parsedMedia.append(fileMedia)
attributes.append(ConsumableContentMessageAttribute(consumed: false))
}
@ -1016,12 +1016,12 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: thumbW, height: thumbH), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
resources.append((resource, thumb.makeData()))
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
loop: for attr in parsedAttributes {
switch attr {
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(.instantRoundVideo) {
attributes.append(ConsumableContentMessageAttribute(consumed: false))
}
@ -1040,14 +1040,14 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
text = caption
}
if let file = file {
let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil), .FileName(fileName: "video.mov")]
let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil), .FileName(fileName: "video.mov")]
var previewRepresentations: [TelegramMediaImageRepresentation] = []
if thumb.size != 0 {
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: thumbW, height: thumbH), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
resources.append((resource, thumb.makeData()))
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
}
case let .decryptedMessageMediaExternalDocument(id, accessHash, _, mimeType, size, thumb, dcId, attributes):
@ -1080,7 +1080,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
default:
break
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: Int64(size), fileReference: nil, fileName: nil), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: Int64(size), fileReference: nil, fileName: nil), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
case let .decryptedMessageMediaWebPage(url):
parsedMedia.append(TelegramMediaWebpage(webpageId: MediaId(namespace: Namespaces.Media.LocalWebpage, id: Int64.random(in: Int64.min ... Int64.max)), content: .Pending(0, url)))
@ -1274,7 +1274,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
}
case let .decryptedMessageMediaAudio(duration, mimeType, size, key, iv):
if let file = file {
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: [TelegramMediaFileAttribute.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: nil)])
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: [TelegramMediaFileAttribute.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: nil)], alternativeRepresentations: [])
parsedMedia.append(fileMedia)
attributes.append(ConsumableContentMessageAttribute(consumed: false))
}
@ -1295,12 +1295,12 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: thumbW, height: thumbH), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
resources.append((resource, thumb.makeData()))
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
loop: for attr in parsedAttributes {
switch attr {
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(.instantRoundVideo) {
attributes.append(ConsumableContentMessageAttribute(consumed: false))
}
@ -1319,14 +1319,14 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
text = caption
}
if let file = file {
let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil), .FileName(fileName: "video.mov")]
let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil), .FileName(fileName: "video.mov")]
var previewRepresentations: [TelegramMediaImageRepresentation] = []
if thumb.size != 0 {
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: thumbW, height: thumbH), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
resources.append((resource, thumb.makeData()))
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
}
case let .decryptedMessageMediaExternalDocument(id, accessHash, _, mimeType, size, thumb, dcId, attributes):
@ -1359,7 +1359,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
default:
break
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: Int64(size), fileReference: nil, fileName: nil), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: Int64(size), fileReference: nil, fileName: nil), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
case let .decryptedMessageMediaWebPage(url):
parsedMedia.append(TelegramMediaWebpage(webpageId: MediaId(namespace: Namespaces.Media.LocalWebpage, id: Int64.random(in: Int64.min ... Int64.max)), content: .Pending(0, url)))
@ -1475,7 +1475,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
}
case let .decryptedMessageMediaAudio(duration, mimeType, size, key, iv):
if let file = file {
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: [TelegramMediaFileAttribute.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: nil)])
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: [TelegramMediaFileAttribute.Audio(isVoice: true, duration: Int(duration), title: nil, performer: nil, waveform: nil)], alternativeRepresentations: [])
parsedMedia.append(fileMedia)
attributes.append(ConsumableContentMessageAttribute(consumed: false))
}
@ -1496,12 +1496,12 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: thumbW, height: thumbH), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
resources.append((resource, thumb.makeData()))
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: size), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: size), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
loop: for attr in parsedAttributes {
switch attr {
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(.instantRoundVideo) {
attributes.append(ConsumableContentMessageAttribute(consumed: false))
}
@ -1520,14 +1520,14 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
text = caption
}
if let file = file {
let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil), .FileName(fileName: "video.mov")]
let parsedAttributes: [TelegramMediaFileAttribute] = [.Video(duration: Double(duration), size: PixelDimensions(width: w, height: h), flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil), .FileName(fileName: "video.mov")]
var previewRepresentations: [TelegramMediaImageRepresentation] = []
if thumb.size != 0 {
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: thumbW, height: thumbH), resource: resource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false))
resources.append((resource, thumb.makeData()))
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudSecretFile, id: file.id), partialReference: nil, resource: file.resource(key: SecretFileEncryptionKey(aesKey: key.makeData(), aesIv: iv.makeData()), decryptedSize: Int64(size)), previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
}
case let .decryptedMessageMediaExternalDocument(id, accessHash, _, mimeType, size, thumb, dcId, attributes):
@ -1560,7 +1560,7 @@ private func parseMessage(peerId: PeerId, authorId: PeerId, tagLocalIndex: Int32
default:
break
}
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: Int64(size), fileReference: nil, fileName: nil), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes)
let fileMedia = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.CloudFile, id: id), partialReference: nil, resource: CloudDocumentMediaResource(datacenterId: Int(dcId), fileId: id, accessHash: accessHash, size: Int64(size), fileReference: nil, fileName: nil), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: mimeType, size: Int64(size), attributes: parsedAttributes, alternativeRepresentations: [])
parsedMedia.append(fileMedia)
case let .decryptedMessageMediaWebPage(url):
parsedMedia.append(TelegramMediaWebpage(webpageId: MediaId(namespace: Namespaces.Media.LocalWebpage, id: Int64.random(in: Int64.min ... Int64.max)), content: .Pending(0, url)))

View File

@ -210,7 +210,7 @@ public class BoxedMessage: NSObject {
public class Serialization: NSObject, MTSerialization {
public func currentLayer() -> UInt {
return 187
return 188
}
public func parseMessage(_ data: Data!) -> Any! {

View File

@ -291,7 +291,7 @@ func parsePreviewStickerSet(_ set: Api.StickerSetCovered, namespace: ItemCollect
case let .stickerSetCovered(set, cover):
let info = StickerPackCollectionInfo(apiSet: set, namespace: namespace)
var items: [StickerPackItem] = []
if let file = telegramMediaFileFromApiDocument(cover), let id = file.id {
if let file = telegramMediaFileFromApiDocument(cover, altDocuments: []), let id = file.id {
items.append(StickerPackItem(index: ItemCollectionItemIndex(index: 0, id: id.id), file: file, indexKeys: []))
}
return (info, items)
@ -299,7 +299,7 @@ func parsePreviewStickerSet(_ set: Api.StickerSetCovered, namespace: ItemCollect
let info = StickerPackCollectionInfo(apiSet: set, namespace: namespace)
var items: [StickerPackItem] = []
for cover in covers {
if let file = telegramMediaFileFromApiDocument(cover), let id = file.id {
if let file = telegramMediaFileFromApiDocument(cover, altDocuments: []), let id = file.id {
items.append(StickerPackItem(index: ItemCollectionItemIndex(index: 0, id: id.id), file: file, indexKeys: []))
}
}
@ -339,7 +339,7 @@ func parsePreviewStickerSet(_ set: Api.StickerSetCovered, namespace: ItemCollect
let info = StickerPackCollectionInfo(apiSet: set, namespace: namespace)
var items: [StickerPackItem] = []
for document in documents {
if let file = telegramMediaFileFromApiDocument(document), let id = file.id {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []), let id = file.id {
let fileIndexKeys: [MemoryBuffer]
if let indexKeys = indexKeysByFile[id] {
fileIndexKeys = indexKeys

View File

@ -321,7 +321,7 @@ private final class StoryStatsPublicForwardsContextImpl {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,

View File

@ -788,6 +788,35 @@ public enum MediaReference<T: Media> {
}
}
public func withMedia(_ media: T) -> MediaReference<T> {
switch self {
case .standalone:
return .standalone(media: media)
case let .message(message, _):
return .message(message: message, media: media)
case let .webPage(webPage, _):
return .webPage(webPage: webPage, media: media)
case let .stickerPack(stickerPack, _):
return .stickerPack(stickerPack: stickerPack, media: media)
case .savedGif:
return .savedGif(media: media)
case .savedSticker:
return .savedSticker(media: media)
case .recentSticker:
return .recentSticker(media: media)
case let .avatarList(peer, _):
return .avatarList(peer: peer, media: media)
case let .attachBot(peer, _):
return .attachBot(peer: peer, media: media)
case .customEmoji:
return .customEmoji(media: media)
case let .story(peer, id, _):
return .story(peer: peer, id: id, media: media)
case let .starsTransaction(transaction, _):
return .starsTransaction(transaction: transaction, media: media)
}
}
public func resourceReference(_ resource: MediaResource) -> MediaResourceReference {
return .media(media: self.abstract, resource: resource)
}

View File

@ -235,7 +235,7 @@ public enum TelegramMediaFileAttribute: PostboxCoding, Equatable {
case Sticker(displayText: String, packReference: StickerPackReference?, maskData: StickerMaskCoords?)
case ImageSize(size: PixelDimensions)
case Animated
case Video(duration: Double, size: PixelDimensions, flags: TelegramMediaVideoFlags, preloadSize: Int32?, coverTime: Double?)
case Video(duration: Double, size: PixelDimensions, flags: TelegramMediaVideoFlags, preloadSize: Int32?, coverTime: Double?, videoCodec: String?)
case Audio(isVoice: Bool, duration: Int, title: String?, performer: String?, waveform: Data?)
case HasLinkedStickers
case hintFileIsLarge
@ -262,7 +262,7 @@ public enum TelegramMediaFileAttribute: PostboxCoding, Equatable {
duration = Double(decoder.decodeInt32ForKey("du", orElse: 0))
}
self = .Video(duration: duration, size: PixelDimensions(width: decoder.decodeInt32ForKey("w", orElse: 0), height: decoder.decodeInt32ForKey("h", orElse: 0)), flags: TelegramMediaVideoFlags(rawValue: decoder.decodeInt32ForKey("f", orElse: 0)), preloadSize: decoder.decodeOptionalInt32ForKey("prs"), coverTime: decoder.decodeOptionalDoubleForKey("ct"))
self = .Video(duration: duration, size: PixelDimensions(width: decoder.decodeInt32ForKey("w", orElse: 0), height: decoder.decodeInt32ForKey("h", orElse: 0)), flags: TelegramMediaVideoFlags(rawValue: decoder.decodeInt32ForKey("f", orElse: 0)), preloadSize: decoder.decodeOptionalInt32ForKey("prs"), coverTime: decoder.decodeOptionalDoubleForKey("ct"), videoCodec: decoder.decodeOptionalStringForKey("vc"))
case typeAudio:
let waveformBuffer = decoder.decodeBytesForKeyNoCopy("wf")
var waveform: Data?
@ -309,7 +309,7 @@ public enum TelegramMediaFileAttribute: PostboxCoding, Equatable {
encoder.encodeInt32(Int32(size.height), forKey: "h")
case .Animated:
encoder.encodeInt32(typeAnimated, forKey: "t")
case let .Video(duration, size, flags, preloadSize, coverTime):
case let .Video(duration, size, flags, preloadSize, coverTime, videoCodec):
encoder.encodeInt32(typeVideo, forKey: "t")
encoder.encodeDouble(duration, forKey: "dur")
encoder.encodeInt32(Int32(size.width), forKey: "w")
@ -325,6 +325,11 @@ public enum TelegramMediaFileAttribute: PostboxCoding, Equatable {
} else {
encoder.encodeNil(forKey: "ct")
}
if let videoCodec {
encoder.encodeString(videoCodec, forKey: "vc")
} else {
encoder.encodeNil(forKey: "vc")
}
case let .Audio(isVoice, duration, title, performer, waveform):
encoder.encodeInt32(typeAudio, forKey: "t")
encoder.encodeInt32(isVoice ? 1 : 0, forKey: "iv")
@ -440,6 +445,7 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
public let mimeType: String
public let size: Int64?
public let attributes: [TelegramMediaFileAttribute]
public let alternativeRepresentations: [Media]
public let peerIds: [PeerId] = []
public var id: MediaId? {
@ -459,7 +465,7 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
return result.isEmpty ? nil : result
}
public init(fileId: MediaId, partialReference: PartialMediaReference?, resource: TelegramMediaResource, previewRepresentations: [TelegramMediaImageRepresentation], videoThumbnails: [TelegramMediaFile.VideoThumbnail], immediateThumbnailData: Data?, mimeType: String, size: Int64?, attributes: [TelegramMediaFileAttribute]) {
public init(fileId: MediaId, partialReference: PartialMediaReference?, resource: TelegramMediaResource, previewRepresentations: [TelegramMediaImageRepresentation], videoThumbnails: [TelegramMediaFile.VideoThumbnail], immediateThumbnailData: Data?, mimeType: String, size: Int64?, attributes: [TelegramMediaFileAttribute], alternativeRepresentations: [Media]) {
self.fileId = fileId
self.partialReference = partialReference
self.resource = resource
@ -469,6 +475,7 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
self.mimeType = mimeType
self.size = size
self.attributes = attributes
self.alternativeRepresentations = alternativeRepresentations
}
public init(decoder: PostboxDecoder) {
@ -487,6 +494,13 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
self.size = nil
}
self.attributes = decoder.decodeObjectArrayForKey("at")
if let altMedia = try? decoder.decodeObjectArrayWithCustomDecoderForKey("arep", decoder: { d in
return d.decodeRootObject() as! Media
}) {
self.alternativeRepresentations = altMedia
} else {
self.alternativeRepresentations = []
}
}
public func encode(_ encoder: PostboxEncoder) {
@ -513,6 +527,9 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
encoder.encodeNil(forKey: "s64")
}
encoder.encodeObjectArray(self.attributes, forKey: "at")
encoder.encodeObjectArrayWithEncoder(self.alternativeRepresentations, forKey: "arep", encoder: { v, e in
e.encodeRootObject(v)
})
}
public required init(from decoder: Decoder) throws {
@ -531,6 +548,7 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
self.mimeType = object.mimeType
self.size = object.size
self.attributes = object.attributes
self.alternativeRepresentations = object.alternativeRepresentations
}
public func encode(to encoder: Encoder) throws {
@ -597,7 +615,7 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
public var isInstantVideo: Bool {
for attribute in self.attributes {
if case .Video(_, _, let flags, _, _) = attribute {
if case .Video(_, _, let flags, _, _, _) = attribute {
return flags.contains(.instantRoundVideo)
}
}
@ -606,7 +624,7 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
public var preloadSize: Int32? {
for attribute in self.attributes {
if case .Video(_, _, _, let preloadSize, _) = attribute {
if case .Video(_, _, _, let preloadSize, _, _) = attribute {
return preloadSize
}
}
@ -803,6 +821,10 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
return false
}
if !areMediaArraysEqual(self.alternativeRepresentations, other.alternativeRepresentations) {
return false
}
return true
}
@ -849,27 +871,31 @@ public final class TelegramMediaFile: Media, Equatable, Codable {
return false
}
if !areMediaArraysSemanticallyEqual(self.alternativeRepresentations, other.alternativeRepresentations) {
return false
}
return true
}
public func withUpdatedPartialReference(_ partialReference: PartialMediaReference?) -> TelegramMediaFile {
return TelegramMediaFile(fileId: self.fileId, partialReference: partialReference, resource: self.resource, previewRepresentations: self.previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: self.size, attributes: self.attributes)
return TelegramMediaFile(fileId: self.fileId, partialReference: partialReference, resource: self.resource, previewRepresentations: self.previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: self.size, attributes: self.attributes, alternativeRepresentations: self.alternativeRepresentations)
}
public func withUpdatedResource(_ resource: TelegramMediaResource) -> TelegramMediaFile {
return TelegramMediaFile(fileId: self.fileId, partialReference: self.partialReference, resource: resource, previewRepresentations: self.previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: self.size, attributes: self.attributes)
return TelegramMediaFile(fileId: self.fileId, partialReference: self.partialReference, resource: resource, previewRepresentations: self.previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: self.size, attributes: self.attributes, alternativeRepresentations: self.alternativeRepresentations)
}
public func withUpdatedSize(_ size: Int64?) -> TelegramMediaFile {
return TelegramMediaFile(fileId: self.fileId, partialReference: self.partialReference, resource: self.resource, previewRepresentations: self.previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: size, attributes: self.attributes)
return TelegramMediaFile(fileId: self.fileId, partialReference: self.partialReference, resource: self.resource, previewRepresentations: self.previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: size, attributes: self.attributes, alternativeRepresentations: self.alternativeRepresentations)
}
public func withUpdatedPreviewRepresentations(_ previewRepresentations: [TelegramMediaImageRepresentation]) -> TelegramMediaFile {
return TelegramMediaFile(fileId: self.fileId, partialReference: self.partialReference, resource: self.resource, previewRepresentations: previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: self.size, attributes: self.attributes)
return TelegramMediaFile(fileId: self.fileId, partialReference: self.partialReference, resource: self.resource, previewRepresentations: previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: self.size, attributes: self.attributes, alternativeRepresentations: self.alternativeRepresentations)
}
public func withUpdatedAttributes(_ attributes: [TelegramMediaFileAttribute]) -> TelegramMediaFile {
return TelegramMediaFile(fileId: self.fileId, partialReference: self.partialReference, resource: self.resource, previewRepresentations: self.previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: self.size, attributes: attributes)
return TelegramMediaFile(fileId: self.fileId, partialReference: self.partialReference, resource: self.resource, previewRepresentations: self.previewRepresentations, videoThumbnails: self.videoThumbnails, immediateThumbnailData: self.immediateThumbnailData, mimeType: self.mimeType, size: self.size, attributes: attributes, alternativeRepresentations: self.alternativeRepresentations)
}
}

View File

@ -179,7 +179,7 @@ public struct TelegramWallpaperNativeCodable: Codable {
public enum TelegramWallpaper: Equatable {
public static func emoticonWallpaper(emoticon: String) -> TelegramWallpaper {
return .file(File(id: -1, accessHash: -1, isCreator: false, isDefault: false, isPattern: false, isDark: false, slug: "", file: TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: EmptyMediaResource(), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "", size: nil, attributes: []), settings: WallpaperSettings(emoticon: emoticon)))
return .file(File(id: -1, accessHash: -1, isCreator: false, isDefault: false, isPattern: false, isDark: false, slug: "", file: TelegramMediaFile(fileId: MediaId(namespace: 0, id: 0), partialReference: nil, resource: EmptyMediaResource(), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "", size: nil, attributes: [], alternativeRepresentations: []), settings: WallpaperSettings(emoticon: emoticon)))
}
public struct Gradient: Equatable {

View File

@ -615,7 +615,7 @@ func _internal_markAdAction(account: Account, peerId: EnginePeer.Id, opaqueId: D
guard let inputChannel = inputChannel else {
return .complete()
}
return account.network.request(Api.functions.channels.clickSponsoredMessage(channel: inputChannel, randomId: Buffer(data: opaqueId)))
return account.network.request(Api.functions.channels.clickSponsoredMessage(flags: 0, channel: inputChannel, randomId: Buffer(data: opaqueId)))
|> `catch` { _ -> Signal<Api.Bool, NoError> in
return .single(.boolFalse)
}

View File

@ -309,7 +309,7 @@ func managedSynchronizeAttachMenuBots(accountPeerId: PeerId, postbox: Postbox, n
for icon in botIcons {
switch icon {
case let .attachMenuBotIcon(_, name, icon, _):
if let iconName = AttachMenuBots.Bot.IconName(string: name), let icon = telegramMediaFileFromApiDocument(icon) {
if let iconName = AttachMenuBots.Bot.IconName(string: name), let icon = telegramMediaFileFromApiDocument(icon, altDocuments: []) {
icons[iconName] = icon
}
}
@ -544,7 +544,7 @@ func _internal_getAttachMenuBot(accountPeerId: PeerId, postbox: Postbox, network
for icon in botIcons {
switch icon {
case let .attachMenuBotIcon(_, name, icon, _):
if let iconName = AttachMenuBots.Bot.IconName(string: name), let icon = telegramMediaFileFromApiDocument(icon) {
if let iconName = AttachMenuBots.Bot.IconName(string: name), let icon = telegramMediaFileFromApiDocument(icon, altDocuments: []) {
icons[iconName] = icon
}
}
@ -755,7 +755,7 @@ func _internal_getBotApp(account: Account, reference: BotAppReference) -> Signal
if (botAppFlags & (1 << 2)) != 0 {
appFlags.insert(.hasSettings)
}
return .single(BotApp(id: id, accessHash: accessHash, shortName: shortName, title: title, description: description, photo: telegramMediaImageFromApiPhoto(photo), document: document.flatMap(telegramMediaFileFromApiDocument), hash: hash, flags: appFlags))
return .single(BotApp(id: id, accessHash: accessHash, shortName: shortName, title: title, description: description, photo: telegramMediaImageFromApiPhoto(photo), document: document.flatMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }, hash: hash, flags: appFlags))
case .botAppNotModified:
return .complete()
}
@ -770,7 +770,7 @@ extension BotApp {
convenience init?(apiBotApp: Api.BotApp) {
switch apiBotApp {
case let .botApp(_, id, accessHash, shortName, title, description, photo, document, hash):
self.init(id: id, accessHash: accessHash, shortName: shortName, title: title, description: description, photo: telegramMediaImageFromApiPhoto(photo), document: document.flatMap(telegramMediaFileFromApiDocument), hash: hash, flags: [])
self.init(id: id, accessHash: accessHash, shortName: shortName, title: title, description: description, photo: telegramMediaImageFromApiPhoto(photo), document: document.flatMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }, hash: hash, flags: [])
case .botAppNotModified:
return nil
}

View File

@ -530,7 +530,7 @@ public final class EngineStoryViewListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -575,7 +575,7 @@ public final class EngineStoryViewListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -615,7 +615,7 @@ public final class EngineStoryViewListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -727,7 +727,7 @@ public final class EngineStoryViewListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,

View File

@ -118,7 +118,7 @@ func _internal_outgoingMessageWithChatContextResult(to peerId: PeerId, threadId:
if let dimensions = externalReference.content?.dimensions {
fileAttributes.append(.ImageSize(size: dimensions))
if externalReference.type == "gif" {
fileAttributes.append(.Video(duration: externalReference.content?.duration ?? 0.0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil))
fileAttributes.append(.Video(duration: externalReference.content?.duration ?? 0.0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil))
}
}
@ -136,7 +136,7 @@ func _internal_outgoingMessageWithChatContextResult(to peerId: PeerId, threadId:
resource = EmptyMediaResource()
}
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: videoThumbnails, immediateThumbnailData: nil, mimeType: externalReference.content?.mimeType ?? "application/binary", size: nil, attributes: fileAttributes)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: videoThumbnails, immediateThumbnailData: nil, mimeType: externalReference.content?.mimeType ?? "application/binary", size: nil, attributes: fileAttributes, alternativeRepresentations: [])
return .message(text: caption, attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: file), threadId: threadId, replyToMessageId: replyToMessageId, replyToStoryId: replyToStoryId, localGroupingKey: nil, correlationId: correlationId, bubbleUpEmojiOrStickersets: [])
} else {
return .message(text: caption, attributes: attributes, inlineStickers: [:], mediaReference: nil, threadId: threadId, replyToMessageId: replyToMessageId, replyToStoryId: replyToStoryId, localGroupingKey: nil, correlationId: correlationId, bubbleUpEmojiOrStickersets: [])

View File

@ -742,7 +742,7 @@ extension TelegramBusinessIntro {
convenience init(apiBusinessIntro: Api.BusinessIntro) {
switch apiBusinessIntro {
case let .businessIntro(_, title, description, sticker):
self.init(title: title, text: description, stickerFile: sticker.flatMap(telegramMediaFileFromApiDocument))
self.init(title: title, text: description, stickerFile: sticker.flatMap { telegramMediaFileFromApiDocument($0, altDocuments: []) })
}
}

View File

@ -245,6 +245,7 @@ public enum Stories {
case expirationTimestamp
case media
case alternativeMedia
case alternativeMediaList
case mediaAreas
case text
case entities
@ -268,7 +269,7 @@ public enum Stories {
public let timestamp: Int32
public let expirationTimestamp: Int32
public let media: Media?
public let alternativeMedia: Media?
public let alternativeMediaList: [Media]
public let mediaAreas: [MediaArea]
public let text: String
public let entities: [MessageTextEntity]
@ -292,7 +293,7 @@ public enum Stories {
timestamp: Int32,
expirationTimestamp: Int32,
media: Media?,
alternativeMedia: Media?,
alternativeMediaList: [Media],
mediaAreas: [MediaArea],
text: String,
entities: [MessageTextEntity],
@ -315,7 +316,7 @@ public enum Stories {
self.timestamp = timestamp
self.expirationTimestamp = expirationTimestamp
self.media = media
self.alternativeMedia = alternativeMedia
self.alternativeMediaList = alternativeMediaList
self.mediaAreas = mediaAreas
self.text = text
self.entities = entities
@ -348,10 +349,18 @@ public enum Stories {
self.media = nil
}
if let alternativeMediaData = try container.decodeIfPresent(Data.self, forKey: .alternativeMedia) {
self.alternativeMedia = PostboxDecoder(buffer: MemoryBuffer(data: alternativeMediaData)).decodeRootObject() as? Media
if let alternativeMediaListData = try container.decodeIfPresent([Data].self, forKey: .alternativeMediaList) {
self.alternativeMediaList = alternativeMediaListData.compactMap { data -> Media? in
return PostboxDecoder(buffer: MemoryBuffer(data: data)).decodeRootObject() as? Media
}
} else if let alternativeMediaData = try container.decodeIfPresent(Data.self, forKey: .alternativeMedia) {
if let value = PostboxDecoder(buffer: MemoryBuffer(data: alternativeMediaData)).decodeRootObject() as? Media {
self.alternativeMediaList = [value]
} else {
self.alternativeMediaList = []
}
} else {
self.alternativeMedia = nil
self.alternativeMediaList = []
}
self.mediaAreas = try container.decodeIfPresent([MediaArea].self, forKey: .mediaAreas) ?? []
@ -388,12 +397,12 @@ public enum Stories {
try container.encode(mediaData, forKey: .media)
}
if let alternativeMedia = self.alternativeMedia {
let alternativeMediaListData = self.alternativeMediaList.map { alternativeMediaValue -> Data in
let encoder = PostboxEncoder()
encoder.encodeRootObject(alternativeMedia)
let alternativeMediaData = encoder.makeData()
try container.encode(alternativeMediaData, forKey: .alternativeMedia)
encoder.encodeRootObject(alternativeMediaValue)
return encoder.makeData()
}
try container.encode(alternativeMediaListData, forKey: .alternativeMediaList)
try container.encode(self.mediaAreas, forKey: .mediaAreas)
@ -436,14 +445,8 @@ public enum Stories {
}
}
if let lhsAlternativeMedia = lhs.alternativeMedia, let rhsAlternativeMedia = rhs.alternativeMedia {
if !lhsAlternativeMedia.isEqual(to: rhsAlternativeMedia) {
return false
}
} else {
if (lhs.alternativeMedia == nil) != (rhs.alternativeMedia == nil) {
return false
}
if !areMediaArraysEqual(lhs.alternativeMediaList, rhs.alternativeMediaList) {
return false
}
if lhs.mediaAreas != rhs.mediaAreas {
@ -871,8 +874,9 @@ private func prepareUploadStoryContent(account: Account, media: EngineStoryInput
mimeType: "video/mp4",
size: nil,
attributes: [
TelegramMediaFileAttribute.Video(duration: duration, size: dimensions, flags: .supportsStreaming, preloadSize: nil, coverTime: coverTime)
]
TelegramMediaFileAttribute.Video(duration: duration, size: dimensions, flags: .supportsStreaming, preloadSize: nil, coverTime: coverTime, videoCodec: nil)
],
alternativeRepresentations: []
)
return fileMedia
@ -1209,7 +1213,7 @@ func _internal_uploadStoryImpl(
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1617,7 +1621,7 @@ func _internal_editStoryPrivacy(account: Account, id: Int32, privacy: EngineStor
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1649,7 +1653,7 @@ func _internal_editStoryPrivacy(account: Account, id: Int32, privacy: EngineStor
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1846,7 +1850,7 @@ func _internal_updateStoriesArePinned(account: Account, peerId: PeerId, ids: [In
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1877,7 +1881,7 @@ func _internal_updateStoriesArePinned(account: Account, peerId: PeerId, ids: [In
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -2093,11 +2097,11 @@ extension Stories.StoredItem {
mergedForwardInfo = forwardFrom.flatMap(Stories.Item.ForwardInfo.init(apiForwardInfo:))
}
var parsedAlternativeMedia: Media?
var parsedAlternativeMedia: [Media] = []
switch media {
case let .messageMediaDocument(_, _, altDocument, _):
if let altDocument = altDocument {
parsedAlternativeMedia = telegramMediaFileFromApiDocument(altDocument)
case let .messageMediaDocument(_, _, altDocuments, _):
if let altDocuments {
parsedAlternativeMedia = altDocuments.compactMap { telegramMediaFileFromApiDocument($0, altDocuments: []) }
}
default:
break
@ -2108,7 +2112,7 @@ extension Stories.StoredItem {
timestamp: date,
expirationTimestamp: expireDate,
media: parsedMedia,
alternativeMedia: parsedAlternativeMedia,
alternativeMediaList: parsedAlternativeMedia,
mediaAreas: mediaAreas?.compactMap(mediaAreaFromApiMediaArea) ?? [],
text: caption ?? "",
entities: entities.flatMap { entities in return messageTextEntitiesFromApiEntities(entities) } ?? [],
@ -2173,7 +2177,7 @@ func _internal_getStoryById(accountPeerId: PeerId, postbox: Postbox, network: Ne
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -2656,7 +2660,7 @@ func _internal_setStoryReaction(account: Account, peerId: EnginePeer.Id, id: Int
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -2690,7 +2694,7 @@ func _internal_setStoryReaction(account: Account, peerId: EnginePeer.Id, id: Int
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,

View File

@ -67,7 +67,7 @@ public final class EngineStoryItem: Equatable {
public let timestamp: Int32
public let expirationTimestamp: Int32
public let media: EngineMedia
public let alternativeMedia: EngineMedia?
public let alternativeMediaList: [EngineMedia]
public let mediaAreas: [MediaArea]
public let text: String
public let entities: [MessageTextEntity]
@ -87,12 +87,12 @@ public final class EngineStoryItem: Equatable {
public let forwardInfo: ForwardInfo?
public let author: EnginePeer?
public init(id: Int32, timestamp: Int32, expirationTimestamp: Int32, media: EngineMedia, alternativeMedia: EngineMedia?, mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], views: Views?, privacy: EngineStoryPrivacy?, isPinned: Bool, isExpired: Bool, isPublic: Bool, isPending: Bool, isCloseFriends: Bool, isContacts: Bool, isSelectedContacts: Bool, isForwardingDisabled: Bool, isEdited: Bool, isMy: Bool, myReaction: MessageReaction.Reaction?, forwardInfo: ForwardInfo?, author: EnginePeer?) {
public init(id: Int32, timestamp: Int32, expirationTimestamp: Int32, media: EngineMedia, alternativeMediaList: [EngineMedia], mediaAreas: [MediaArea], text: String, entities: [MessageTextEntity], views: Views?, privacy: EngineStoryPrivacy?, isPinned: Bool, isExpired: Bool, isPublic: Bool, isPending: Bool, isCloseFriends: Bool, isContacts: Bool, isSelectedContacts: Bool, isForwardingDisabled: Bool, isEdited: Bool, isMy: Bool, myReaction: MessageReaction.Reaction?, forwardInfo: ForwardInfo?, author: EnginePeer?) {
self.id = id
self.timestamp = timestamp
self.expirationTimestamp = expirationTimestamp
self.media = media
self.alternativeMedia = alternativeMedia
self.alternativeMediaList = alternativeMediaList
self.mediaAreas = mediaAreas
self.text = text
self.entities = entities
@ -126,7 +126,7 @@ public final class EngineStoryItem: Equatable {
if lhs.media != rhs.media {
return false
}
if lhs.alternativeMedia != rhs.alternativeMedia {
if lhs.alternativeMediaList != rhs.alternativeMediaList {
return false
}
if lhs.mediaAreas != rhs.mediaAreas {
@ -205,7 +205,7 @@ public extension EngineStoryItem {
timestamp: self.timestamp,
expirationTimestamp: self.expirationTimestamp,
media: self.media._asMedia(),
alternativeMedia: self.alternativeMedia?._asMedia(),
alternativeMediaList: self.alternativeMediaList.map { $0._asMedia() },
mediaAreas: self.mediaAreas,
text: self.text,
entities: self.entities,
@ -670,7 +670,7 @@ public final class PeerStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -839,7 +839,7 @@ public final class PeerStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1013,7 +1013,7 @@ public final class PeerStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1062,7 +1062,7 @@ public final class PeerStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1113,7 +1113,7 @@ public final class PeerStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1170,7 +1170,7 @@ public final class PeerStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1416,7 +1416,7 @@ public final class SearchStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1565,7 +1565,7 @@ public final class SearchStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -1637,7 +1637,7 @@ public final class SearchStoryListContext: StoryListContext {
timestamp: item.storyItem.timestamp,
expirationTimestamp: item.storyItem.expirationTimestamp,
media: item.storyItem.media,
alternativeMedia: item.storyItem.alternativeMedia,
alternativeMediaList: item.storyItem.alternativeMediaList,
mediaAreas: item.storyItem.mediaAreas,
text: item.storyItem.text,
entities: item.storyItem.entities,
@ -1755,7 +1755,7 @@ public final class PeerExpiringStoryListContext {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: EngineMedia(media),
alternativeMedia: item.alternativeMedia.flatMap(EngineMedia.init),
alternativeMediaList: item.alternativeMediaList.map(EngineMedia.init),
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,
@ -2211,7 +2211,7 @@ public final class BotPreviewStoryListContext: StoryListContext {
timestamp: 0,
expirationTimestamp: Int32.max,
media: EngineMedia(item.media),
alternativeMedia: nil,
alternativeMediaList: [],
mediaAreas: [],
text: "",
entities: [],
@ -2260,7 +2260,7 @@ public final class BotPreviewStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: Int32.max,
media: EngineMedia(item.media),
alternativeMedia: nil,
alternativeMediaList: [],
mediaAreas: [],
text: "",
entities: [],
@ -2371,7 +2371,7 @@ public final class BotPreviewStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: Int32.max,
media: EngineMedia(item.media),
alternativeMedia: nil,
alternativeMediaList: [],
mediaAreas: [],
text: "",
entities: [],
@ -2447,7 +2447,7 @@ public final class BotPreviewStoryListContext: StoryListContext {
timestamp: 0,
expirationTimestamp: Int32.max,
media: EngineMedia(item.media),
alternativeMedia: nil,
alternativeMediaList: [],
mediaAreas: [],
text: "",
entities: [],
@ -2510,7 +2510,7 @@ public final class BotPreviewStoryListContext: StoryListContext {
timestamp: item.timestamp,
expirationTimestamp: Int32.max,
media: EngineMedia(item.media),
alternativeMedia: nil,
alternativeMediaList: [],
mediaAreas: [],
text: "",
entities: [],

View File

@ -1235,8 +1235,8 @@ public extension TelegramEngine {
}
var selectedMedia: EngineMedia
if let alternativeMedia = itemAndPeer.item.alternativeMedia.flatMap(EngineMedia.init), (!preferHighQuality && !itemAndPeer.item.isMy) {
selectedMedia = alternativeMedia
if let alternativeMediaValue = itemAndPeer.item.alternativeMediaList.first.flatMap(EngineMedia.init), (!preferHighQuality && !itemAndPeer.item.isMy) {
selectedMedia = alternativeMediaValue
} else {
selectedMedia = EngineMedia(media)
}
@ -1277,7 +1277,7 @@ public extension TelegramEngine {
timestamp: item.timestamp,
expirationTimestamp: item.expirationTimestamp,
media: item.media,
alternativeMedia: item.alternativeMedia,
alternativeMediaList: item.alternativeMediaList,
mediaAreas: item.mediaAreas,
text: item.text,
entities: item.entities,

View File

@ -104,7 +104,7 @@ public final class NotificationSoundList: Equatable, Codable {
private extension NotificationSoundList.NotificationSound {
convenience init?(apiDocument: Api.Document) {
guard let file = telegramMediaFileFromApiDocument(apiDocument) else {
guard let file = telegramMediaFileFromApiDocument(apiDocument, altDocuments: []) else {
return nil
}
self.init(file: file)
@ -313,7 +313,7 @@ func _internal_uploadNotificationSound(account: Account, title: String, data: Da
return .generic
}
|> mapToSignal { result -> Signal<NotificationSoundList.NotificationSound, UploadNotificationSoundError> in
guard let file = telegramMediaFileFromApiDocument(result) else {
guard let file = telegramMediaFileFromApiDocument(result, altDocuments: []) else {
return .fail(.generic)
}
return account.postbox.transaction { transaction -> NotificationSoundList.NotificationSound in

View File

@ -80,15 +80,15 @@ func _internal_uploadSticker(account: Account, peer: Peer, resource: MediaResour
var attributes: [Api.DocumentAttribute] = []
attributes.append(.documentAttributeSticker(flags: 0, alt: alt, stickerset: .inputStickerSetEmpty, maskCoords: nil))
if let duration {
attributes.append(.documentAttributeVideo(flags: 0, duration: duration, w: dimensions.width, h: dimensions.height, preloadPrefixSize: nil, videoStartTs: nil))
attributes.append(.documentAttributeVideo(flags: 0, duration: duration, w: dimensions.width, h: dimensions.height, preloadPrefixSize: nil, videoStartTs: nil, videoCodec: nil))
}
attributes.append(.documentAttributeImageSize(w: dimensions.width, h: dimensions.height))
return account.network.request(Api.functions.messages.uploadMedia(flags: 0, businessConnectionId: nil, peer: inputPeer, media: Api.InputMedia.inputMediaUploadedDocument(flags: flags, file: file, thumb: thumbnailFile, mimeType: mimeType, attributes: attributes, stickers: nil, ttlSeconds: nil)))
|> mapError { _ -> UploadStickerError in return .generic }
|> mapToSignal { media -> Signal<UploadStickerStatus, UploadStickerError> in
switch media {
case let .messageMediaDocument(_, document, _, _):
if let document = document, let file = telegramMediaFileFromApiDocument(document), let uploadedResource = file.resource as? CloudDocumentMediaResource {
case let .messageMediaDocument(_, document, altDocuments, _):
if let document = document, let file = telegramMediaFileFromApiDocument(document, altDocuments: altDocuments), let uploadedResource = file.resource as? CloudDocumentMediaResource {
account.postbox.mediaBox.copyResourceData(from: resource.id, to: uploadedResource.id, synchronous: true)
if let thumbnail, let previewRepresentation = file.previewRepresentations.first(where: { $0.dimensions == PixelDimensions(width: 320, height: 320) }) {
account.postbox.mediaBox.copyResourceData(from: thumbnail.id, to: previewRepresentation.resource.id, synchronous: true)
@ -144,7 +144,7 @@ public extension ImportSticker {
fileAttributes.append(.FileName(fileName: "sticker.webm"))
fileAttributes.append(.Animated)
fileAttributes.append(.Sticker(displayText: "", packReference: nil, maskData: nil))
fileAttributes.append(.Video(duration: self.duration ?? 3.0, size: self.dimensions, flags: [], preloadSize: nil, coverTime: nil))
fileAttributes.append(.Video(duration: self.duration ?? 3.0, size: self.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil))
} else if self.mimeType == "application/x-tgsticker" {
fileAttributes.append(.FileName(fileName: "sticker.tgs"))
fileAttributes.append(.Animated)
@ -159,7 +159,7 @@ public extension ImportSticker {
previewRepresentations.append(TelegramMediaImageRepresentation(dimensions: PixelDimensions(width: 320, height: 320), resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil))
}
return StickerPackItem(index: ItemCollectionItemIndex(index: 0, id: 0), file: TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: self.mimeType, size: nil, attributes: fileAttributes), indexKeys: [])
return StickerPackItem(index: ItemCollectionItemIndex(index: 0, id: 0), file: TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: resource, previewRepresentations: previewRepresentations, videoThumbnails: [], immediateThumbnailData: nil, mimeType: self.mimeType, size: nil, attributes: fileAttributes, alternativeRepresentations: []), indexKeys: [])
}
}
@ -560,7 +560,7 @@ func _internal_getMyStickerSets(account: Account) -> Signal<[(StickerPackCollect
}
let info = StickerPackCollectionInfo(apiSet: set, namespace: namespace)
var firstItem: StickerPackItem?
if let file = telegramMediaFileFromApiDocument(cover), let id = file.id {
if let file = telegramMediaFileFromApiDocument(cover, altDocuments: []), let id = file.id {
firstItem = StickerPackItem(index: ItemCollectionItemIndex(index: 0, id: id.id), file: file, indexKeys: [])
}
infos.append((info, firstItem))
@ -579,7 +579,7 @@ func _internal_getMyStickerSets(account: Account) -> Signal<[(StickerPackCollect
let info = StickerPackCollectionInfo(apiSet: set, namespace: namespace)
var firstItem: StickerPackItem?
if let apiDocument = documents.first {
if let file = telegramMediaFileFromApiDocument(apiDocument), let id = file.id {
if let file = telegramMediaFileFromApiDocument(apiDocument, altDocuments: []), let id = file.id {
firstItem = StickerPackItem(index: ItemCollectionItemIndex(index: 0, id: id.id), file: file, indexKeys: [])
}
}
@ -642,7 +642,7 @@ private func parseStickerSetInfoAndItems(apiStickerSet: Api.messages.StickerSet)
var items: [StickerPackItem] = []
for apiDocument in documents {
if let file = telegramMediaFileFromApiDocument(apiDocument), let id = file.id {
if let file = telegramMediaFileFromApiDocument(apiDocument, altDocuments: []), let id = file.id {
let fileIndexKeys: [MemoryBuffer]
if let indexKeys = indexKeysByFile[id] {
fileIndexKeys = indexKeys

View File

@ -100,7 +100,7 @@ func updatedRemoteStickerPack(postbox: Postbox, network: Network, reference: Sti
}
for apiDocument in documents {
if let file = telegramMediaFileFromApiDocument(apiDocument), let id = file.id {
if let file = telegramMediaFileFromApiDocument(apiDocument, altDocuments: []), let id = file.id {
let fileIndexKeys: [MemoryBuffer]
if let indexKeys = indexKeysByFile[id] {
fileIndexKeys = indexKeys

View File

@ -320,7 +320,7 @@ func _internal_searchStickers(account: Account, query: [String], scope: SearchSt
var files: [TelegramMediaFile] = []
for sticker in stickers {
if let file = telegramMediaFileFromApiDocument(sticker), let id = file.id {
if let file = telegramMediaFileFromApiDocument(sticker, altDocuments: []), let id = file.id {
files.append(file)
if !currentItemIds.contains(id) {
if file.isPremiumSticker {
@ -705,7 +705,7 @@ func _internal_searchStickers(account: Account, category: EmojiSearchCategories.
var files: [TelegramMediaFile] = []
for sticker in stickers {
if let file = telegramMediaFileFromApiDocument(sticker), let id = file.id {
if let file = telegramMediaFileFromApiDocument(sticker, altDocuments: []), let id = file.id {
files.append(file)
if !currentItemIds.contains(id) {
if file.isPremiumSticker {

View File

@ -117,7 +117,7 @@ func _internal_requestStickerSet(postbox: Postbox, network: Network, reference:
}
for apiDocument in documents {
if let file = telegramMediaFileFromApiDocument(apiDocument), let id = file.id {
if let file = telegramMediaFileFromApiDocument(apiDocument, altDocuments: []), let id = file.id {
let fileIndexKeys: [MemoryBuffer]
if let indexKeys = indexKeysByFile[id] {
fileIndexKeys = indexKeys
@ -199,7 +199,7 @@ func _internal_installStickerSetInteractively(account: Account, info: StickerPac
var items:[StickerPackItem] = []
for apiDocument in apiDocuments {
if let file = telegramMediaFileFromApiDocument(apiDocument), let id = file.id {
if let file = telegramMediaFileFromApiDocument(apiDocument, altDocuments: []), let id = file.id {
items.append(StickerPackItem(index: ItemCollectionItemIndex(index: Int32(items.count), id: id.id), file: file, indexKeys: []))
}
}

View File

@ -361,7 +361,7 @@ public func _internal_resolveInlineStickers(postbox: Postbox, network: Network,
for result in documentSets {
if let result = result {
for document in result {
if let file = telegramMediaFileFromApiDocument(document) {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []) {
resultFiles[file.fileId.id] = file
transaction.storeMediaIfNotPresent(media: file)
}

View File

@ -258,7 +258,7 @@ private func uploadTheme(account: Account, resource: MediaResource, thumbnailDat
return account.network.request(Api.functions.account.uploadTheme(flags: flags, file: file, thumb: thumbnailFile, fileName: fileName, mimeType: mimeType))
|> mapError { _ in return UploadThemeError.generic }
|> mapToSignal { document -> Signal<UploadThemeResult, UploadThemeError> in
if let file = telegramMediaFileFromApiDocument(document) {
if let file = telegramMediaFileFromApiDocument(document, altDocuments: []) {
return .single(.complete(file))
} else {
return .fail(.generic)

View File

@ -112,7 +112,7 @@ public func parseMediaData(data: Data) -> Media? {
if let photo = object as? Api.Photo {
return telegramMediaImageFromApiPhoto(photo)
} else if let document = object as? Api.Document {
return telegramMediaFileFromApiDocument(document)
return telegramMediaFileFromApiDocument(document, altDocuments: [])
}
}
return nil

View File

@ -11,7 +11,7 @@ public enum MediaResourceStatsCategory {
case voiceMessages
}
final class TelegramMediaResourceFetchTag: MediaResourceFetchTag {
public final class TelegramMediaResourceFetchTag: MediaResourceFetchTag {
public let statsCategory: MediaResourceStatsCategory
public init(statsCategory: MediaResourceStatsCategory, userContentType: MediaResourceUserContentType?) {

View File

@ -599,7 +599,7 @@ public func _internal_parseMediaAttachment(data: Data) -> Media? {
if let photo = object as? Api.Photo {
return telegramMediaImageFromApiPhoto(photo)
} else if let file = object as? Api.Document {
return telegramMediaFileFromApiDocument(file)
return telegramMediaFileFromApiDocument(file, altDocuments: [])
} else {
return nil
}

View File

@ -1380,7 +1380,8 @@ public func defaultBuiltinWallpaper(data: BuiltinWallpaperData, colors: [UInt32]
attributes: [
.ImageSize(size: PixelDimensions(width: 1440, height: 2960)),
.FileName(fileName: "pattern.tgv")
]
],
alternativeRepresentations: []
),
settings: WallpaperSettings(colors: colors, intensity: intensity, rotation: rotation)
))

View File

@ -122,7 +122,7 @@ struct TelegramWallpaperStandardizedCodable: Codable {
}
if let slug = slug {
self.value = .file(TelegramWallpaper.File(id: 0, accessHash: 0, isCreator: false, isDefault: false, isPattern: !colors.isEmpty, isDark: false, slug: slug, file: TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: WallpaperDataResource(slug: slug), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "", size: nil, attributes: []), settings: WallpaperSettings(blur: blur, motion: motion, colors: colors.map { $0.argb }, intensity: intensity, rotation: rotation)))
self.value = .file(TelegramWallpaper.File(id: 0, accessHash: 0, isCreator: false, isDefault: false, isPattern: !colors.isEmpty, isDark: false, slug: slug, file: TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 0), partialReference: nil, resource: WallpaperDataResource(slug: slug), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "", size: nil, attributes: [], alternativeRepresentations: []), settings: WallpaperSettings(blur: blur, motion: motion, colors: colors.map { $0.argb }, intensity: intensity, rotation: rotation)))
} else if colors.count > 1 {
self.value = .gradient(TelegramWallpaper.Gradient(id: nil, colors: colors.map { $0.argb }, settings: WallpaperSettings(blur: blur, motion: motion, rotation: rotation)))
} else {

View File

@ -330,7 +330,7 @@ public func mediaContentKind(_ media: EngineMedia, message: EngineMessage? = nil
return .file(performer)
}
}
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if file.isAnimated {
result = .animation
} else {

View File

@ -235,7 +235,7 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
} else {
for attribute in file.attributes {
switch attribute {
case let .Video(_, _, flags, _, _):
case let .Video(_, _, flags, _, _, _):
if flags.contains(.instantRoundVideo) {
type = .round
} else {

View File

@ -172,7 +172,7 @@ private final class ChatContextResultPeekNode: ASDisplayNode, PeekControllerCont
imageDimensions = externalReference.content?.dimensions?.cgSize
if let content = externalReference.content, externalReference.type == "gif", let thumbnailResource = imageResource
, let dimensions = content.dimensions {
videoFileReference = .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: content.resource, previewRepresentations: [TelegramMediaImageRepresentation(dimensions: dimensions, resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil)]))
videoFileReference = .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: content.resource, previewRepresentations: [TelegramMediaImageRepresentation(dimensions: dimensions, resource: thumbnailResource, progressiveSizes: [], immediateThumbnailData: nil, hasVideo: false, isPersonal: false)], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: []))
imageResource = nil
}
case let .internalReference(internalReference):

View File

@ -272,7 +272,7 @@ public class ChatMessageActionBubbleContentNode: ChatMessageBubbleContentNode {
strongSelf.mediaBackgroundNode.image = backgroundImage
if let image = image, let video = image.videoRepresentations.last, let id = image.id?.id {
let videoFileReference = FileMediaReference.message(message: MessageReference(item.message), media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: image.representations, videoThumbnails: [], immediateThumbnailData: image.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil, coverTime: nil)]))
let videoFileReference = FileMediaReference.message(message: MessageReference(item.message), media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: image.representations, videoThumbnails: [], immediateThumbnailData: image.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil, coverTime: nil, videoCodec: nil)], alternativeRepresentations: []))
let videoContent = NativeVideoContent(id: .profileVideo(id, "action"), userLocation: .peer(item.message.id.peerId), fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, storeAfterDownload: nil)
if videoContent.id != strongSelf.videoContent?.id {
let mediaManager = item.context.sharedContext.mediaManager

View File

@ -3061,7 +3061,7 @@ public struct AnimatedEmojiSoundsConfiguration {
if let idString = dict["id"], let id = Int64(idString), let accessHashString = dict["access_hash"], let accessHash = Int64(accessHashString), let fileReference = Data(base64Encoded: fileReferenceString) {
let resource = CloudDocumentMediaResource(datacenterId: 1, fileId: id, accessHash: accessHash, size: nil, fileReference: fileReference, fileName: nil)
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: nil, attributes: [])
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: nil, attributes: [], alternativeRepresentations: [])
sounds[key] = file
}
}

View File

@ -650,7 +650,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
let messageTheme = arguments.incoming ? arguments.presentationData.theme.theme.chat.message.incoming : arguments.presentationData.theme.theme.chat.message.outgoing
let isInstantVideo = arguments.file.isInstantVideo
for attribute in arguments.file.attributes {
if case let .Video(videoDuration, _, flags, _, _) = attribute, flags.contains(.instantRoundVideo) {
if case let .Video(videoDuration, _, flags, _, _, _) = attribute, flags.contains(.instantRoundVideo) {
isAudio = true
isVoice = true
@ -1558,7 +1558,7 @@ public final class ChatMessageInteractiveFileNode: ASDisplayNode {
var isVoice = false
var audioDuration: Int32?
for attribute in file.attributes {
if case let .Video(duration, _, flags, _, _) = attribute, flags.contains(.instantRoundVideo) {
if case let .Video(duration, _, flags, _, _, _) = attribute, flags.contains(.instantRoundVideo) {
isAudio = true
isVoice = true
audioDuration = Int32(duration)

Some files were not shown because too many files have changed in this diff Show More