mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-15 21:45:19 +00:00
Reorder audio playback initialization, add play(mixWithOthers:) mode
This commit is contained in:
parent
bc23589a5f
commit
e316f0521b
@ -188,7 +188,7 @@ private final class LegacyComponentsGlobalsProviderImpl: NSObject, LegacyCompone
|
||||
convertedType = .recordWithOthers
|
||||
}
|
||||
default:
|
||||
convertedType = .play
|
||||
convertedType = .play(mixWithOthers: false)
|
||||
}
|
||||
let disposable = legacyContext.sharedContext.mediaManager.audioSession.push(audioSessionType: convertedType, once: true, activate: { _ in
|
||||
activated?()
|
||||
|
@ -125,6 +125,7 @@ private final class MediaPlayerContext {
|
||||
private let fetchAutomatically: Bool
|
||||
private var playAndRecord: Bool
|
||||
private var ambient: Bool
|
||||
private var mixWithOthers: Bool
|
||||
private var keepAudioSessionWhilePaused: Bool
|
||||
private var continuePlayingWithoutSoundOnLostAudioSession: Bool
|
||||
private let storeAfterDownload: (() -> Void)?
|
||||
@ -149,7 +150,7 @@ private final class MediaPlayerContext {
|
||||
|
||||
private var stoppedAtEnd = false
|
||||
|
||||
init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise<MediaPlayerStatus>, audioLevelPipe: ValuePipe<Float>, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, ambient: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) {
|
||||
init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise<MediaPlayerStatus>, audioLevelPipe: ValuePipe<Float>, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, ambient: Bool, mixWithOthers: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) {
|
||||
assert(queue.isCurrent())
|
||||
|
||||
self.queue = queue
|
||||
@ -169,6 +170,7 @@ private final class MediaPlayerContext {
|
||||
self.fetchAutomatically = fetchAutomatically
|
||||
self.playAndRecord = playAndRecord
|
||||
self.ambient = ambient
|
||||
self.mixWithOthers = mixWithOthers
|
||||
self.keepAudioSessionWhilePaused = keepAudioSessionWhilePaused
|
||||
self.continuePlayingWithoutSoundOnLostAudioSession = continuePlayingWithoutSoundOnLostAudioSession
|
||||
self.storeAfterDownload = storeAfterDownload
|
||||
@ -402,7 +404,7 @@ private final class MediaPlayerContext {
|
||||
self.audioRenderer = nil
|
||||
|
||||
let queue = self.queue
|
||||
renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
|
||||
renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
|
||||
queue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.tick()
|
||||
@ -481,7 +483,7 @@ private final class MediaPlayerContext {
|
||||
self.lastStatusUpdateTimestamp = nil
|
||||
if self.enableSound {
|
||||
let queue = self.queue
|
||||
let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
|
||||
let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
|
||||
queue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.tick()
|
||||
@ -1136,10 +1138,10 @@ public final class MediaPlayer {
|
||||
}
|
||||
}
|
||||
|
||||
public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, ambient: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) {
|
||||
public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, ambient: Bool = false, mixWithOthers: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) {
|
||||
let audioLevelPipe = self.audioLevelPipe
|
||||
self.queue.async {
|
||||
let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, ambient: ambient, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
|
||||
let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, ambient: ambient, mixWithOthers: mixWithOthers, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
|
||||
self.contextRef = Unmanaged.passRetained(context)
|
||||
}
|
||||
}
|
||||
|
@ -238,6 +238,7 @@ private final class AudioPlayerRendererContext {
|
||||
var audioSessionControl: ManagedAudioSessionControl?
|
||||
let playAndRecord: Bool
|
||||
let ambient: Bool
|
||||
let mixWithOthers: Bool
|
||||
var forceAudioToSpeaker: Bool {
|
||||
didSet {
|
||||
if self.forceAudioToSpeaker != oldValue {
|
||||
@ -251,7 +252,7 @@ private final class AudioPlayerRendererContext {
|
||||
}
|
||||
}
|
||||
|
||||
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
||||
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
||||
assert(audioPlayerRendererQueue.isCurrent())
|
||||
|
||||
self.audioSession = audioSession
|
||||
@ -267,6 +268,7 @@ private final class AudioPlayerRendererContext {
|
||||
self.playAndRecord = playAndRecord
|
||||
self.useVoiceProcessingMode = useVoiceProcessingMode
|
||||
self.ambient = ambient
|
||||
self.mixWithOthers = mixWithOthers
|
||||
|
||||
self.audioStreamDescription = audioRendererNativeStreamDescription()
|
||||
|
||||
@ -370,7 +372,7 @@ private final class AudioPlayerRendererContext {
|
||||
|
||||
if self.paused {
|
||||
self.paused = false
|
||||
self.startAudioUnit()
|
||||
self.acquireAudioSession()
|
||||
}
|
||||
}
|
||||
|
||||
@ -384,6 +386,69 @@ private final class AudioPlayerRendererContext {
|
||||
}
|
||||
}
|
||||
|
||||
private func acquireAudioSession() {
|
||||
switch self.audioSession {
|
||||
case let .manager(manager):
|
||||
self.audioSessionDisposable.set(manager.push(audioSessionType: self.ambient ? .ambient : (self.playAndRecord ? .playWithPossiblePortOverride : .play(mixWithOthers: self.mixWithOthers)), outputMode: self.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system, once: self.ambient, manualActivate: { [weak self] control in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.audioSessionControl = control
|
||||
if !strongSelf.paused {
|
||||
control.setup()
|
||||
control.setOutputMode(strongSelf.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system)
|
||||
control.activate({ _ in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self, !strongSelf.paused {
|
||||
strongSelf.audioSessionAcquired()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}, deactivate: { [weak self] temporary in
|
||||
return Signal { subscriber in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.audioSessionControl = nil
|
||||
if !temporary {
|
||||
strongSelf.audioPaused()
|
||||
strongSelf.stop()
|
||||
}
|
||||
subscriber.putCompletion()
|
||||
}
|
||||
}
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
}, headsetConnectionStatusChanged: { [weak self] value in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self, !value {
|
||||
strongSelf.audioPaused()
|
||||
}
|
||||
}
|
||||
}))
|
||||
case let .custom(request):
|
||||
self.audioSessionDisposable.set(request(MediaPlayerAudioSessionCustomControl(activate: { [weak self] in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self {
|
||||
if !strongSelf.paused {
|
||||
strongSelf.audioSessionAcquired()
|
||||
}
|
||||
}
|
||||
}
|
||||
}, deactivate: { [weak self] in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.audioSessionControl = nil
|
||||
strongSelf.audioPaused()
|
||||
strongSelf.stop()
|
||||
}
|
||||
}
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
private func startAudioUnit() {
|
||||
assert(audioPlayerRendererQueue.isCurrent())
|
||||
|
||||
@ -538,72 +603,13 @@ private final class AudioPlayerRendererContext {
|
||||
self.equalizerAudioUnit = equalizerAudioUnit
|
||||
self.outputAudioUnit = outputAudioUnit
|
||||
}
|
||||
|
||||
switch self.audioSession {
|
||||
case let .manager(manager):
|
||||
self.audioSessionDisposable.set(manager.push(audioSessionType: self.ambient ? .ambient : (self.playAndRecord ? .playWithPossiblePortOverride : .play), outputMode: self.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system, once: self.ambient, manualActivate: { [weak self] control in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.audioSessionControl = control
|
||||
if !strongSelf.paused {
|
||||
control.setup()
|
||||
control.setOutputMode(strongSelf.forceAudioToSpeaker ? .speakerIfNoHeadphones : .system)
|
||||
control.activate({ _ in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self, !strongSelf.paused {
|
||||
strongSelf.audioSessionAcquired()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}, deactivate: { [weak self] temporary in
|
||||
return Signal { subscriber in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.audioSessionControl = nil
|
||||
if !temporary {
|
||||
strongSelf.audioPaused()
|
||||
strongSelf.stop()
|
||||
}
|
||||
subscriber.putCompletion()
|
||||
}
|
||||
}
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
}, headsetConnectionStatusChanged: { [weak self] value in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self, !value {
|
||||
strongSelf.audioPaused()
|
||||
}
|
||||
}
|
||||
}))
|
||||
case let .custom(request):
|
||||
self.audioSessionDisposable.set(request(MediaPlayerAudioSessionCustomControl(activate: { [weak self] in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self {
|
||||
if !strongSelf.paused {
|
||||
strongSelf.audioSessionAcquired()
|
||||
}
|
||||
}
|
||||
}
|
||||
}, deactivate: { [weak self] in
|
||||
audioPlayerRendererQueue.async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.audioSessionControl = nil
|
||||
strongSelf.audioPaused()
|
||||
strongSelf.stop()
|
||||
}
|
||||
}
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
private func audioSessionAcquired() {
|
||||
assert(audioPlayerRendererQueue.isCurrent())
|
||||
|
||||
self.startAudioUnit()
|
||||
|
||||
if let audioGraph = self.audioGraph {
|
||||
let startTime = CFAbsoluteTimeGetCurrent()
|
||||
|
||||
@ -821,7 +827,7 @@ public final class MediaPlayerAudioRenderer {
|
||||
private let audioClock: CMClock
|
||||
public let audioTimebase: CMTimebase
|
||||
|
||||
public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
||||
public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
||||
var audioClock: CMClock?
|
||||
CMAudioClockCreate(allocator: nil, clockOut: &audioClock)
|
||||
if audioClock == nil {
|
||||
@ -834,7 +840,7 @@ public final class MediaPlayerAudioRenderer {
|
||||
self.audioTimebase = audioTimebase!
|
||||
|
||||
audioPlayerRendererQueue.async {
|
||||
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
|
||||
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, mixWithOthers: mixWithOthers, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
|
||||
self.contextRef = Unmanaged.passRetained(context)
|
||||
}
|
||||
}
|
||||
|
@ -319,7 +319,7 @@ public func playSound(context: AccountContext, notificationSoundList: Notificati
|
||||
return Signal { subscriber in
|
||||
var currentPlayer: AudioPlayerWrapper?
|
||||
var deactivateImpl: (() -> Void)?
|
||||
let session = context.sharedContext.mediaManager.audioSession.push(audioSessionType: .play, activate: { _ in
|
||||
let session = context.sharedContext.mediaManager.audioSession.push(audioSessionType: .play(mixWithOthers: true), activate: { _ in
|
||||
Queue.mainQueue().async {
|
||||
let filePath = fileNameForNotificationSound(account: context.account, notificationSoundList: notificationSoundList, sound: sound, defaultSound: defaultSound)
|
||||
|
||||
|
@ -16,7 +16,7 @@ func managedAudioSessionLog(_ what: @autoclosure () -> String) {
|
||||
|
||||
public enum ManagedAudioSessionType: Equatable {
|
||||
case ambient
|
||||
case play
|
||||
case play(mixWithOthers: Bool)
|
||||
case playWithPossiblePortOverride
|
||||
case record(speaker: Bool, withOthers: Bool)
|
||||
case voiceCall
|
||||
@ -766,23 +766,25 @@ public final class ManagedAudioSession: NSObject {
|
||||
managedAudioSessionLog("ManagedAudioSession setting category for \(type) (native: \(nativeCategory)) activateNow: \(activateNow)")
|
||||
var options: AVAudioSession.CategoryOptions = []
|
||||
switch type {
|
||||
case .play:
|
||||
break
|
||||
case .ambient:
|
||||
case let .play(mixWithOthers):
|
||||
if mixWithOthers {
|
||||
options.insert(.mixWithOthers)
|
||||
case .playWithPossiblePortOverride:
|
||||
if case .playAndRecord = nativeCategory {
|
||||
options.insert(.allowBluetoothA2DP)
|
||||
}
|
||||
case .voiceCall, .videoCall:
|
||||
options.insert(.allowBluetooth)
|
||||
}
|
||||
case .ambient:
|
||||
options.insert(.mixWithOthers)
|
||||
case .playWithPossiblePortOverride:
|
||||
if case .playAndRecord = nativeCategory {
|
||||
options.insert(.allowBluetoothA2DP)
|
||||
options.insert(.mixWithOthers)
|
||||
case .record:
|
||||
options.insert(.allowBluetooth)
|
||||
case .recordWithOthers:
|
||||
options.insert(.allowBluetoothA2DP)
|
||||
options.insert(.mixWithOthers)
|
||||
}
|
||||
case .voiceCall, .videoCall:
|
||||
options.insert(.allowBluetooth)
|
||||
options.insert(.allowBluetoothA2DP)
|
||||
options.insert(.mixWithOthers)
|
||||
case .record:
|
||||
options.insert(.allowBluetooth)
|
||||
case .recordWithOthers:
|
||||
options.insert(.allowBluetoothA2DP)
|
||||
options.insert(.mixWithOthers)
|
||||
}
|
||||
managedAudioSessionLog("ManagedAudioSession setting category and options")
|
||||
let mode: AVAudioSession.Mode
|
||||
@ -796,11 +798,24 @@ public final class ManagedAudioSession: NSObject {
|
||||
default:
|
||||
mode = .default
|
||||
}
|
||||
|
||||
switch type {
|
||||
case .play(mixWithOthers: true), .ambient:
|
||||
try AVAudioSession.sharedInstance().setActive(false)
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
|
||||
try AVAudioSession.sharedInstance().setMode(mode)
|
||||
if AVAudioSession.sharedInstance().categoryOptions != options {
|
||||
managedAudioSessionLog("ManagedAudioSession resetting options")
|
||||
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
|
||||
switch type {
|
||||
case .voiceCall, .videoCall, .recordWithOthers:
|
||||
managedAudioSessionLog("ManagedAudioSession resetting options")
|
||||
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch let error {
|
||||
managedAudioSessionLog("ManagedAudioSession setup error \(error)")
|
||||
|
@ -898,7 +898,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.audioOutputStatePromise.set(.single(([], .speaker)))
|
||||
}
|
||||
|
||||
self.audioSessionDisposable = audioSession.push(audioSessionType: self.isStream ? .play : .voiceCall, activateImmediately: true, manualActivate: { [weak self] control in
|
||||
self.audioSessionDisposable = audioSession.push(audioSessionType: self.isStream ? .play(mixWithOthers: false) : .voiceCall, activateImmediately: true, manualActivate: { [weak self] control in
|
||||
Queue.mainQueue().async {
|
||||
if let strongSelf = self {
|
||||
strongSelf.updateSessionState(internalState: strongSelf.internalState, audioSessionControl: control)
|
||||
|
@ -138,6 +138,7 @@ final class StoryItemContentComponent: Component {
|
||||
loopVideo: true,
|
||||
enableSound: true,
|
||||
beginWithAmbientSound: environment.sharedState.useAmbientMode,
|
||||
mixWithOthers: true,
|
||||
useLargeThumbnail: false,
|
||||
autoFetchFullSizeThumbnail: false,
|
||||
tempFilePath: nil,
|
||||
|
@ -2236,7 +2236,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
if component.slice.item.storyItem.isCloseFriends && component.slice.peer.id != component.context.account.peerId {
|
||||
if component.slice.item.storyItem.isCloseFriends {
|
||||
let closeFriendIcon: ComponentView<Empty>
|
||||
var closeFriendIconTransition = transition
|
||||
if let current = self.closeFriendIcon {
|
||||
@ -2262,10 +2262,16 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
guard let closeFriendIconView = self.closeFriendIcon?.view else {
|
||||
return
|
||||
}
|
||||
let tooltipText: String
|
||||
if component.slice.peer.id == component.context.account.peerId {
|
||||
tooltipText = "Only people from your close friends list will see this story."
|
||||
} else {
|
||||
tooltipText = "You are seeing this story because you have\nbeen added to \(component.slice.peer.compactDisplayTitle)'s list of close friends."
|
||||
}
|
||||
let tooltipScreen = TooltipScreen(
|
||||
account: component.context.account,
|
||||
sharedContext: component.context.sharedContext,
|
||||
text: .plain(text: "You are seeing this story because you have\nbeen added to \(component.slice.peer.compactDisplayTitle)'s list of close friends."), style: .default, location: TooltipScreen.Location.point(closeFriendIconView.convert(closeFriendIconView.bounds, to: self).offsetBy(dx: 1.0, dy: 6.0), .top), displayDuration: .manual, shouldDismissOnTouch: { _, _ in
|
||||
text: .plain(text: tooltipText), style: .default, location: TooltipScreen.Location.point(closeFriendIconView.convert(closeFriendIconView.bounds, to: self).offsetBy(dx: 1.0, dy: 6.0), .top), displayDuration: .manual, shouldDismissOnTouch: { _, _ in
|
||||
return .dismiss(consume: true)
|
||||
}
|
||||
)
|
||||
|
@ -214,7 +214,7 @@ final class ManagedAudioRecorderContext {
|
||||
}
|
||||
return ActionDisposable {
|
||||
}
|
||||
}), playAndRecord: true, ambient: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: ValuePipe<Float>(), updatedRate: {
|
||||
}), playAndRecord: true, ambient: false, mixWithOthers: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: ValuePipe<Float>(), updatedRate: {
|
||||
}, audioPaused: {})
|
||||
self.toneRenderer = toneRenderer
|
||||
|
||||
|
@ -37,6 +37,7 @@ public final class NativeVideoContent: UniversalVideoContent {
|
||||
public let loopVideo: Bool
|
||||
public let enableSound: Bool
|
||||
public let beginWithAmbientSound: Bool
|
||||
public let mixWithOthers: Bool
|
||||
public let baseRate: Double
|
||||
let fetchAutomatically: Bool
|
||||
let onlyFullSizeThumbnail: Bool
|
||||
@ -54,7 +55,7 @@ public final class NativeVideoContent: UniversalVideoContent {
|
||||
let displayImage: Bool
|
||||
let hasSentFramesToDisplay: (() -> Void)?
|
||||
|
||||
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, beginWithAmbientSound: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
|
||||
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
|
||||
self.id = id
|
||||
self.nativeId = id
|
||||
self.userLocation = userLocation
|
||||
@ -78,6 +79,7 @@ public final class NativeVideoContent: UniversalVideoContent {
|
||||
self.loopVideo = loopVideo
|
||||
self.enableSound = enableSound
|
||||
self.beginWithAmbientSound = beginWithAmbientSound
|
||||
self.mixWithOthers = mixWithOthers
|
||||
self.baseRate = baseRate
|
||||
self.fetchAutomatically = fetchAutomatically
|
||||
self.onlyFullSizeThumbnail = onlyFullSizeThumbnail
|
||||
@ -97,7 +99,7 @@ public final class NativeVideoContent: UniversalVideoContent {
|
||||
}
|
||||
|
||||
public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
|
||||
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, beginWithAmbientSound: self.beginWithAmbientSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
|
||||
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
|
||||
}
|
||||
|
||||
public func isEqual(to other: UniversalVideoContent) -> Bool {
|
||||
@ -120,6 +122,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
private let fileReference: FileMediaReference
|
||||
private let enableSound: Bool
|
||||
private let beginWithAmbientSound: Bool
|
||||
private let mixWithOthers: Bool
|
||||
private let loopVideo: Bool
|
||||
private let baseRate: Double
|
||||
private let audioSessionManager: ManagedAudioSession
|
||||
@ -177,13 +180,14 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
|
||||
private let hasSentFramesToDisplay: (() -> Void)?
|
||||
|
||||
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, beginWithAmbientSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
|
||||
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
|
||||
self.postbox = postbox
|
||||
self.userLocation = userLocation
|
||||
self.fileReference = fileReference
|
||||
self.placeholderColor = placeholderColor
|
||||
self.enableSound = enableSound
|
||||
self.beginWithAmbientSound = beginWithAmbientSound
|
||||
self.mixWithOthers = mixWithOthers
|
||||
self.loopVideo = loopVideo
|
||||
self.baseRate = baseRate
|
||||
self.audioSessionManager = audioSessionManager
|
||||
@ -194,7 +198,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
|
||||
self.imageNode = TransformImageNode()
|
||||
|
||||
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: MediaResourceUserContentType(file: fileReference.media), resourceReference: fileReference.resourceReference(fileReference.media.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, ambient: beginWithAmbientSound, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
|
||||
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: MediaResourceUserContentType(file: fileReference.media), resourceReference: fileReference.resourceReference(fileReference.media.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
|
||||
|
||||
var actionAtEndImpl: (() -> Void)?
|
||||
if enableSound && !loopVideo {
|
||||
|
@ -388,7 +388,7 @@ private final class PlatformVideoContentNode: ASDisplayNode, UniversalVideoConte
|
||||
self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: 0, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true))
|
||||
}
|
||||
if !self.hasAudioSession {
|
||||
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play, activate: { [weak self] _ in
|
||||
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
|
||||
self?.hasAudioSession = true
|
||||
self?.player.play()
|
||||
}, deactivate: { [weak self] _ in
|
||||
|
@ -224,7 +224,7 @@ private final class SystemVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: self.approximateDuration, dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true))
|
||||
}
|
||||
if !self.hasAudioSession {
|
||||
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play, activate: { [weak self] _ in
|
||||
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
|
||||
self?.hasAudioSession = true
|
||||
self?.player.play()
|
||||
}, deactivate: { [weak self] _ in
|
||||
|
Loading…
x
Reference in New Issue
Block a user