diff --git a/submodules/MediaPlayer/Sources/MediaPlayer.swift b/submodules/MediaPlayer/Sources/MediaPlayer.swift index 750330d180..559f4b44a7 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayer.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayer.swift @@ -123,6 +123,7 @@ private final class MediaPlayerContext { fileprivate let videoRenderer: VideoPlayerProxy private var tickTimer: SwiftSignalKit.Timer? + private var fadeTimer: SwiftSignalKit.Timer? private var lastStatusUpdateTimestamp: Double? private let playerStatus: Promise @@ -224,6 +225,7 @@ private final class MediaPlayerContext { deinit { assert(self.queue.isCurrent()) + self.fadeTimer?.invalidate() self.tickTimer?.invalidate() if case let .seeking(_, _, _, disposable, _, _) = self.state { @@ -383,7 +385,7 @@ private final class MediaPlayerContext { if strongSelf.continuePlayingWithoutSoundOnLostAudioSession { strongSelf.continuePlayingWithoutSound() } else { - strongSelf.pause(lostAudioSession: true) + strongSelf.pause(lostAudioSession: true, faded: false) } } else { strongSelf.seek(timestamp: 0.0, action: .play) @@ -440,7 +442,7 @@ private final class MediaPlayerContext { } } - fileprivate func play() { + fileprivate func play(faded: Bool = false) { assert(self.queue.isCurrent()) switch self.state { @@ -461,7 +463,7 @@ private final class MediaPlayerContext { if strongSelf.continuePlayingWithoutSoundOnLostAudioSession { strongSelf.continuePlayingWithoutSound() } else { - strongSelf.pause(lostAudioSession: true) + strongSelf.pause(lostAudioSession: true, faded: false) } } else { strongSelf.seek(timestamp: 0.0, action: .play) @@ -477,6 +479,26 @@ private final class MediaPlayerContext { self.state = .seeking(frameSource: frameSource, timestamp: timestamp, seekState: seekState, disposable: disposable, action: .play, enableSound: enableSound) self.lastStatusUpdateTimestamp = nil case let .paused(loadedState): + if faded { + self.fadeTimer?.invalidate() + + var volume: Double = 0.0 + let fadeTimer = SwiftSignalKit.Timer(timeout: 0.025, repeat: true, completion: { [weak self] in + if let strongSelf = self { + volume += 0.1 + if volume < 1.0 { + strongSelf.audioRenderer?.renderer.setVolume(volume) + } else { + strongSelf.audioRenderer?.renderer.setVolume(1.0) + strongSelf.fadeTimer?.invalidate() + strongSelf.fadeTimer = nil + } + } + }, queue: self.queue) + self.fadeTimer = fadeTimer + fadeTimer.start() + } + if loadedState.lostAudioSession { let timestamp = CMTimeGetSeconds(CMTimebaseGetTime(loadedState.controlTimebase.timebase)) self.seek(timestamp: timestamp, action: .play) @@ -632,7 +654,7 @@ private final class MediaPlayerContext { } } - fileprivate func pause(lostAudioSession: Bool) { + fileprivate func pause(lostAudioSession: Bool, faded: Bool = false) { assert(self.queue.isCurrent()) switch self.state { @@ -651,31 +673,52 @@ private final class MediaPlayerContext { } self.state = .paused(loadedState) self.lastStatusUpdateTimestamp = nil + + if faded { + self.fadeTimer?.invalidate() + + var volume: Double = 1.0 + let fadeTimer = SwiftSignalKit.Timer(timeout: 0.025, repeat: true, completion: { [weak self] in + if let strongSelf = self { + volume -= 0.1 + if volume > 0 { + strongSelf.audioRenderer?.renderer.setVolume(volume) + } else { + strongSelf.fadeTimer?.invalidate() + strongSelf.fadeTimer = nil + strongSelf.tick() + } + } + }, queue: self.queue) + self.fadeTimer = fadeTimer + fadeTimer.start() + } + self.tick() } } - fileprivate func togglePlayPause() { + fileprivate func togglePlayPause(faded: Bool) { assert(self.queue.isCurrent()) switch self.state { case .empty: - self.play() + self.play(faded: false) case let .seeking(_, _, _, _, action, _): switch action { case .play: - self.pause(lostAudioSession: false) + self.pause(lostAudioSession: false, faded: faded) case .pause: - self.play() + self.play(faded: faded) } case .paused: if !self.enableSound { self.playOnceWithSound(playAndRecord: false, seek: .none) } else { - self.play() + self.play(faded: faded) } case .playing: - self.pause(lostAudioSession: false) + self.pause(lostAudioSession: false, faded: faded) } } @@ -787,7 +830,13 @@ private final class MediaPlayerContext { var bufferingProgress: Float? if let worstStatus = worstStatus, case let .full(fullUntil) = worstStatus, fullUntil.isFinite { + var playing = false if case .playing = self.state { + playing = true + } else if self.fadeTimer != nil { + playing = true + } + if playing { rate = self.baseRate let nextTickDelay = max(0.0, fullUntil - timestamp) / self.baseRate @@ -805,7 +854,13 @@ private final class MediaPlayerContext { rate = 0.0 performActionAtEndNow = true } else { + var playing = false if case .playing = self.state { + playing = true + } else if self.fadeTimer != nil { + playing = true + } + if playing { rate = self.baseRate let tickTimer = SwiftSignalKit.Timer(timeout: nextTickDelay, repeat: false, completion: { [weak self] in @@ -871,13 +926,18 @@ private final class MediaPlayerContext { var statusTimestamp = CACurrentMediaTime() let playbackStatus: MediaPlayerPlaybackStatus var isPlaying = false + var isPaused = false if case .playing = self.state { isPlaying = true + } else if case .paused = self.state { + isPaused = true } if let bufferingProgress = bufferingProgress { playbackStatus = .buffering(initial: false, whilePlaying: isPlaying, progress: Float(bufferingProgress), display: true) } else if !rate.isZero { - if reportRate.isZero { + if isPaused && self.fadeTimer != nil { + playbackStatus = .paused + } else if reportRate.isZero { //playbackStatus = .buffering(initial: false, whilePlaying: true) playbackStatus = .playing statusTimestamp = 0.0 @@ -1076,10 +1136,10 @@ public final class MediaPlayer { } } - public func togglePlayPause() { + public func togglePlayPause(faded: Bool = false) { self.queue.async { if let context = self.contextRef?.takeUnretainedValue() { - context.togglePlayPause() + context.togglePlayPause(faded: faded) } } } diff --git a/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift b/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift index 0af3ee7a13..614e4d7316 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift @@ -230,6 +230,7 @@ private final class AudioPlayerRendererContext { var audioGraph: AUGraph? var timePitchAudioUnit: AudioComponentInstance? + var mixerAudioUnit: AudioComponentInstance? var outputAudioUnit: AudioComponentInstance? var bufferContextId: Int32! @@ -314,6 +315,12 @@ private final class AudioPlayerRendererContext { } } + fileprivate func setVolume(_ volume: Double) { + if let mixerAudioUnit = self.mixerAudioUnit { + AudioUnitSetParameter(mixerAudioUnit,kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(volume), 0) + } + } + fileprivate func setRate(_ rate: Double) { assert(audioPlayerRendererQueue.isCurrent()) @@ -406,6 +413,15 @@ private final class AudioPlayerRendererContext { return } + var mixerNode: AUNode = 0 + var mixerDescription = AudioComponentDescription() + mixerDescription.componentType = kAudioUnitType_Mixer + mixerDescription.componentSubType = kAudioUnitSubType_MultiChannelMixer + mixerDescription.componentManufacturer = kAudioUnitManufacturer_Apple + guard AUGraphAddNode(audioGraph, &mixerDescription, &mixerNode) == noErr else { + return + } + var outputNode: AUNode = 0 var outputDesc = AudioComponentDescription() outputDesc.componentType = kAudioUnitType_Output @@ -429,7 +445,11 @@ private final class AudioPlayerRendererContext { return } - guard AUGraphConnectNodeInput(audioGraph, timePitchNode, 0, outputNode, 0) == noErr else { + guard AUGraphConnectNodeInput(audioGraph, timePitchNode, 0, mixerNode, 0) == noErr else { + return + } + + guard AUGraphConnectNodeInput(audioGraph, mixerNode, 0, outputNode, 0) == noErr else { return } @@ -444,6 +464,11 @@ private final class AudioPlayerRendererContext { } AudioUnitSetParameter(timePitchAudioUnit, kTimePitchParam_Rate, kAudioUnitScope_Global, 0, Float32(self.baseRate), 0) + var maybeMixerAudioUnit: AudioComponentInstance? + guard AUGraphNodeInfo(audioGraph, mixerNode, &mixerDescription, &maybeMixerAudioUnit) == noErr, let mixerAudioUnit = maybeMixerAudioUnit else { + return + } + var maybeOutputAudioUnit: AudioComponentInstance? guard AUGraphNodeInfo(audioGraph, outputNode, &outputDesc, &maybeOutputAudioUnit) == noErr, let outputAudioUnit = maybeOutputAudioUnit else { return @@ -456,7 +481,7 @@ private final class AudioPlayerRendererContext { var streamFormat = AudioStreamBasicDescription() AudioUnitSetProperty(converterAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &streamFormat, UInt32(MemoryLayout.size)) AudioUnitSetProperty(timePitchAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, UInt32(MemoryLayout.size)) - AudioUnitSetProperty(converterAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &streamFormat, UInt32(MemoryLayout.size)) + AudioUnitSetProperty(mixerAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, UInt32(MemoryLayout.size)) var callbackStruct = AURenderCallbackStruct() callbackStruct.inputProc = rendererInputProc @@ -474,8 +499,9 @@ private final class AudioPlayerRendererContext { var maximumFramesPerSlice: UInt32 = 4096 AudioUnitSetProperty(converterAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) AudioUnitSetProperty(timePitchAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) + AudioUnitSetProperty(mixerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) AudioUnitSetProperty(outputAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4) - + guard AUGraphInitialize(audioGraph) == noErr else { return } @@ -484,6 +510,7 @@ private final class AudioPlayerRendererContext { self.audioGraph = audioGraph self.timePitchAudioUnit = timePitchAudioUnit + self.mixerAudioUnit = mixerAudioUnit self.outputAudioUnit = outputAudioUnit } @@ -820,6 +847,15 @@ public final class MediaPlayerAudioRenderer { } } + public func setVolume(_ volume: Double) { + audioPlayerRendererQueue.async { + if let contextRef = self.contextRef { + let context = contextRef.takeUnretainedValue() + context.setVolume(volume) + } + } + } + public func beginRequestingFrames(queue: DispatchQueue, takeFrame: @escaping () -> MediaTrackFrameResult) { audioPlayerRendererQueue.async { if let contextRef = self.contextRef { diff --git a/submodules/TelegramUI/Sources/SharedMediaPlayer.swift b/submodules/TelegramUI/Sources/SharedMediaPlayer.swift index 95d95ab9f4..0b18602b0d 100644 --- a/submodules/TelegramUI/Sources/SharedMediaPlayer.swift +++ b/submodules/TelegramUI/Sources/SharedMediaPlayer.swift @@ -77,7 +77,7 @@ private enum SharedMediaPlaybackItem: Equatable { func togglePlayPause() { switch self { case let .audio(player): - player.togglePlayPause() + player.togglePlayPause(faded: true) case let .instantVideo(node): node.togglePlayPause() }