diff --git a/submodules/MediaPlayer/Sources/MediaPlayer.swift b/submodules/MediaPlayer/Sources/MediaPlayer.swift index 1735ed7faa..f5aa6a8fe4 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayer.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayer.swift @@ -116,6 +116,7 @@ private final class MediaPlayerContext { private var keepAudioSessionWhilePaused: Bool private var continuePlayingWithoutSoundOnLostAudioSession: Bool private let storeAfterDownload: (() -> Void)? + private let isAudioVideoMessage: Bool private var seekId: Int = 0 @@ -136,7 +137,7 @@ private final class MediaPlayerContext { private var stoppedAtEnd = false - init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise, audioLevelPipe: ValuePipe, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, ambient: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil) { + init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise, audioLevelPipe: ValuePipe, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, ambient: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) { assert(queue.isCurrent()) self.queue = queue @@ -159,6 +160,7 @@ private final class MediaPlayerContext { self.keepAudioSessionWhilePaused = keepAudioSessionWhilePaused self.continuePlayingWithoutSoundOnLostAudioSession = continuePlayingWithoutSoundOnLostAudioSession self.storeAfterDownload = storeAfterDownload + self.isAudioVideoMessage = isAudioVideoMessage self.videoRenderer = VideoPlayerProxy(queue: queue) @@ -372,7 +374,7 @@ private final class MediaPlayerContext { self.audioRenderer = nil let queue = self.queue - renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), playAndRecord: self.playAndRecord, ambient: self.ambient, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in + renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in queue.async { if let strongSelf = self { strongSelf.tick() @@ -451,7 +453,7 @@ private final class MediaPlayerContext { self.lastStatusUpdateTimestamp = nil if self.enableSound { let queue = self.queue - let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), playAndRecord: self.playAndRecord, ambient: self.ambient, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in + let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in queue.async { if let strongSelf = self { strongSelf.tick() @@ -1067,10 +1069,10 @@ public final class MediaPlayer { } } - public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, ambient: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil) { + public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, ambient: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) { let audioLevelPipe = self.audioLevelPipe self.queue.async { - let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, ambient: ambient, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload) + let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, ambient: ambient, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage) self.contextRef = Unmanaged.passRetained(context) } } diff --git a/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift b/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift index 974a14ea22..cc3ef112a9 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift @@ -212,6 +212,7 @@ private final class AudioPlayerRendererContext { let lowWaterSizeInSeconds: Int = 2 let audioSession: MediaPlayerAudioSessionControl + let forAudioVideoMessage: Bool let useVoiceProcessingMode: Bool let controlTimebase: CMTimebase let updatedRate: () -> Void @@ -225,6 +226,7 @@ private final class AudioPlayerRendererContext { var audioGraph: AUGraph? var timePitchAudioUnit: AudioComponentInstance? var mixerAudioUnit: AudioComponentInstance? + var equalizerAudioUnit: AudioComponentInstance? var outputAudioUnit: AudioComponentInstance? var bufferContextId: Int32! @@ -242,14 +244,18 @@ private final class AudioPlayerRendererContext { if let audioSessionControl = self.audioSessionControl { audioSessionControl.setOutputMode(self.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system) } + if let equalizerAudioUnit = self.equalizerAudioUnit, self.forAudioVideoMessage && !self.ambient { + AudioUnitSetParameter(equalizerAudioUnit, kAUNBandEQParam_GlobalGain, kAudioUnitScope_Global, 0, self.forceAudioToSpeaker ? 0.0 : 12.0, 0) + } } } } - init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { + init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { assert(audioPlayerRendererQueue.isCurrent()) self.audioSession = audioSession + self.forAudioVideoMessage = forAudioVideoMessage self.forceAudioToSpeaker = forceAudioToSpeaker self.baseRate = baseRate self.audioLevelPipe = audioLevelPipe @@ -416,6 +422,15 @@ private final class AudioPlayerRendererContext { return } + var equalizerNode: AUNode = 0 + var equalizerDescription = AudioComponentDescription() + equalizerDescription.componentType = kAudioUnitType_Effect + equalizerDescription.componentSubType = kAudioUnitSubType_NBandEQ + equalizerDescription.componentManufacturer = kAudioUnitManufacturer_Apple + guard AUGraphAddNode(audioGraph, &equalizerDescription, &equalizerNode) == noErr else { + return + } + var outputNode: AUNode = 0 var outputDesc = AudioComponentDescription() outputDesc.componentType = kAudioUnitType_Output @@ -443,7 +458,11 @@ private final class AudioPlayerRendererContext { return } - guard AUGraphConnectNodeInput(audioGraph, mixerNode, 0, outputNode, 0) == noErr else { + guard AUGraphConnectNodeInput(audioGraph, mixerNode, 0, equalizerNode, 0) == noErr else { + return + } + + guard AUGraphConnectNodeInput(audioGraph, equalizerNode, 0, outputNode, 0) == noErr else { return } @@ -463,6 +482,15 @@ private final class AudioPlayerRendererContext { return } + var maybeEqualizerAudioUnit: AudioComponentInstance? + guard AUGraphNodeInfo(audioGraph, equalizerNode, &equalizerDescription, &maybeEqualizerAudioUnit) == noErr, let equalizerAudioUnit = maybeEqualizerAudioUnit else { + return + } + + if self.forAudioVideoMessage && !self.ambient { + AudioUnitSetParameter(equalizerAudioUnit, kAUNBandEQParam_GlobalGain, kAudioUnitScope_Global, 0, self.forceAudioToSpeaker ? 0.0 : 12.0, 0) + } + var maybeOutputAudioUnit: AudioComponentInstance? guard AUGraphNodeInfo(audioGraph, outputNode, &outputDesc, &maybeOutputAudioUnit) == noErr, let outputAudioUnit = maybeOutputAudioUnit else { return @@ -476,6 +504,7 @@ private final class AudioPlayerRendererContext { AudioUnitSetProperty(converterAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &streamFormat, UInt32(MemoryLayout.size)) AudioUnitSetProperty(timePitchAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, UInt32(MemoryLayout.size)) AudioUnitSetProperty(mixerAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, UInt32(MemoryLayout.size)) + AudioUnitSetProperty(equalizerAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, UInt32(MemoryLayout.size)) var callbackStruct = AURenderCallbackStruct() callbackStruct.inputProc = rendererInputProc @@ -494,6 +523,7 @@ private final class AudioPlayerRendererContext { AudioUnitSetProperty(converterAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) AudioUnitSetProperty(timePitchAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) AudioUnitSetProperty(mixerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) + AudioUnitSetProperty(equalizerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) AudioUnitSetProperty(outputAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) guard AUGraphInitialize(audioGraph) == noErr else { @@ -505,6 +535,7 @@ private final class AudioPlayerRendererContext { self.audioGraph = audioGraph self.timePitchAudioUnit = timePitchAudioUnit self.mixerAudioUnit = mixerAudioUnit + self.equalizerAudioUnit = equalizerAudioUnit self.outputAudioUnit = outputAudioUnit } @@ -616,9 +647,10 @@ private final class AudioPlayerRendererContext { } self.audioGraph = nil - self.mixerAudioUnit = nil - self.outputAudioUnit = nil self.timePitchAudioUnit = nil + self.mixerAudioUnit = nil + self.equalizerAudioUnit = nil + self.outputAudioUnit = nil } } @@ -789,7 +821,7 @@ public final class MediaPlayerAudioRenderer { private let audioClock: CMClock public let audioTimebase: CMTimebase - public init(audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { + public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { var audioClock: CMClock? CMAudioClockCreate(allocator: nil, clockOut: &audioClock) if audioClock == nil { @@ -802,7 +834,7 @@ public final class MediaPlayerAudioRenderer { self.audioTimebase = audioTimebase! audioPlayerRendererQueue.async { - let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused) + let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused) self.contextRef = Unmanaged.passRetained(context) } } diff --git a/submodules/TelegramUI/Sources/ChatMessageInteractiveInstantVideoNode.swift b/submodules/TelegramUI/Sources/ChatMessageInteractiveInstantVideoNode.swift index 554564f166..afb9e176de 100644 --- a/submodules/TelegramUI/Sources/ChatMessageInteractiveInstantVideoNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageInteractiveInstantVideoNode.swift @@ -690,7 +690,7 @@ class ChatMessageInteractiveInstantVideoNode: ASDisplayNode { } } } - }), content: NativeVideoContent(id: .message(item.message.stableId, telegramFile.fileId), userLocation: .peer(item.message.id.peerId), fileReference: .message(message: MessageReference(item.message), media: telegramFile), streamVideo: streamVideo ? .conservative : .none, enableSound: false, fetchAutomatically: false, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), priority: .embedded, autoplay: item.context.sharedContext.energyUsageSettings.autoplayVideo) + }), content: NativeVideoContent(id: .message(item.message.stableId, telegramFile.fileId), userLocation: .peer(item.message.id.peerId), fileReference: .message(message: MessageReference(item.message), media: telegramFile), streamVideo: streamVideo ? .conservative : .none, enableSound: false, fetchAutomatically: false, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), priority: .embedded, autoplay: item.context.sharedContext.energyUsageSettings.autoplayVideo) if let previousVideoNode = previousVideoNode { videoNode.bounds = previousVideoNode.bounds videoNode.position = previousVideoNode.position diff --git a/submodules/TelegramUI/Sources/SharedMediaPlayer.swift b/submodules/TelegramUI/Sources/SharedMediaPlayer.swift index c7887119d3..a522db9696 100644 --- a/submodules/TelegramUI/Sources/SharedMediaPlayer.swift +++ b/submodules/TelegramUI/Sources/SharedMediaPlayer.swift @@ -230,13 +230,13 @@ final class SharedMediaPlayer { case .voice, .music: switch playbackData.source { case let .telegramFile(fileReference, _): - strongSelf.playbackItem = .audio(MediaPlayer(audioSessionManager: strongSelf.audioSession, postbox: strongSelf.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: fileReference.resourceReference(fileReference.media.resource), streamable: playbackData.type == .music ? .conservative : .none, video: false, preferSoftwareDecoding: false, enableSound: true, baseRate: rateValue, fetchAutomatically: true, playAndRecord: controlPlaybackWithProximity)) + strongSelf.playbackItem = .audio(MediaPlayer(audioSessionManager: strongSelf.audioSession, postbox: strongSelf.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: fileReference.resourceReference(fileReference.media.resource), streamable: playbackData.type == .music ? .conservative : .none, video: false, preferSoftwareDecoding: false, enableSound: true, baseRate: rateValue, fetchAutomatically: true, playAndRecord: controlPlaybackWithProximity, isAudioVideoMessage: playbackData.type == .voice)) } case .instantVideo: if let mediaManager = strongSelf.mediaManager, let item = item as? MessageMediaPlaylistItem { switch playbackData.source { case let .telegramFile(fileReference, _): - let videoNode = OverlayInstantVideoNode(postbox: strongSelf.account.postbox, audioSession: strongSelf.audioSession, manager: mediaManager.universalVideoManager, content: NativeVideoContent(id: .message(item.message.stableId, fileReference.media.fileId), userLocation: .peer(item.message.id.peerId), fileReference: fileReference, enableSound: false, baseRate: rateValue, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), close: { [weak mediaManager] in + let videoNode = OverlayInstantVideoNode(postbox: strongSelf.account.postbox, audioSession: strongSelf.audioSession, manager: mediaManager.universalVideoManager, content: NativeVideoContent(id: .message(item.message.stableId, fileReference.media.fileId), userLocation: .peer(item.message.id.peerId), fileReference: fileReference, enableSound: false, baseRate: rateValue, isAudioVideoMessage: true, captureProtected: item.message.isCopyProtected(), storeAfterDownload: nil), close: { [weak mediaManager] in mediaManager?.setPlaylist(nil, type: .voice, control: .playback(.pause)) }) strongSelf.playbackItem = .instantVideo(videoNode) diff --git a/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift index 0238d59be0..e5a09a50bc 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/NativeVideoContent.swift @@ -46,11 +46,12 @@ public final class NativeVideoContent: UniversalVideoContent { let continuePlayingWithoutSoundOnLostAudioSession: Bool let placeholderColor: UIColor let tempFilePath: String? + let isAudioVideoMessage: Bool let captureProtected: Bool let hintDimensions: CGSize? let storeAfterDownload: (() -> Void)? - public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?) { + public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?) { self.id = id self.nativeId = id self.userLocation = userLocation @@ -84,12 +85,13 @@ public final class NativeVideoContent: UniversalVideoContent { self.placeholderColor = placeholderColor self.tempFilePath = tempFilePath self.captureProtected = captureProtected + self.isAudioVideoMessage = isAudioVideoMessage self.hintDimensions = hintDimensions self.storeAfterDownload = storeAfterDownload } public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode { - return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload) + return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload) } public func isEqual(to other: UniversalVideoContent) -> Bool { @@ -114,6 +116,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent private let loopVideo: Bool private let baseRate: Double private let audioSessionManager: ManagedAudioSession + private let isAudioVideoMessage: Bool private let captureProtected: Bool private let player: MediaPlayer @@ -164,7 +167,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent private var shouldPlay: Bool = false - init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil) { + init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil) { self.postbox = postbox self.userLocation = userLocation self.fileReference = fileReference @@ -173,11 +176,12 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent self.loopVideo = loopVideo self.baseRate = baseRate self.audioSessionManager = audioSessionManager + self.isAudioVideoMessage = isAudioVideoMessage self.captureProtected = captureProtected self.imageNode = TransformImageNode() - self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: MediaResourceUserContentType(file: fileReference.media), resourceReference: fileReference.resourceReference(fileReference.media.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload) + self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: MediaResourceUserContentType(file: fileReference.media), resourceReference: fileReference.resourceReference(fileReference.media.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage) var actionAtEndImpl: (() -> Void)? if enableSound && !loopVideo {