This commit is contained in:
Ali 2023-07-21 00:16:56 +04:00
parent d16d5b8bb8
commit 6e17762083
23 changed files with 348 additions and 331 deletions

View File

@ -1171,6 +1171,7 @@ private final class NotificationServiceHandler {
}*/
if let storyId {
content.category = "st"
action = .pollStories(peerId: peerId, content: content, storyId: storyId)
} else {
action = .poll(peerId: peerId, content: content, messageId: messageIdValue)

View File

@ -9714,3 +9714,5 @@ Sorry for the inconvenience.";
"AutoDownloadSettings.Stories" = "Stories";
"MediaEditor.Draft" = "Draft";
"Notification.LockScreenStoryPlaceholder" = "New Story";

View File

@ -23,6 +23,7 @@ public protocol UniversalVideoContentNode: AnyObject {
func setSoundEnabled(_ value: Bool)
func seek(_ timestamp: Double)
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
func setSoundMuted(soundMuted: Bool)
func continueWithOverridingAmbientMode(isAmbient: Bool)
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool)
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
@ -284,6 +285,14 @@ public final class UniversalVideoNode: ASDisplayNode {
})
}
public func setSoundMuted(soundMuted: Bool) {
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
if let contentNode = contentNode {
contentNode.setSoundMuted(soundMuted: soundMuted)
}
})
}
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
if let contentNode = contentNode {

View File

@ -1856,6 +1856,9 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
}
})
if self.previewing {
self.storiesReady.set(.single(true))
} else {
self.storySubscriptionsDisposable = (self.context.engine.messages.storySubscriptions(isHidden: self.location == .chatList(groupId: .archive))
|> deliverOnMainQueue).start(next: { [weak self] rawStorySubscriptions in
guard let self else {
@ -1967,6 +1970,7 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
}
}
}
}
fileprivate func maybeDisplayStoryTooltip() {
let content = self.updateHeaderContent()

View File

@ -2169,7 +2169,7 @@ final class ChatListControllerNode: ASDisplayNode, UIGestureRecognizerDelegate {
return
}
if let storySubscriptions = self.controller?.orderedStorySubscriptions {
if let controller = self.controller, let storySubscriptions = controller.orderedStorySubscriptions, shouldDisplayStoriesInChatListHeader(storySubscriptions: storySubscriptions, isHidden: controller.location == .chatList(groupId: .archive)) {
let _ = storySubscriptions
self.tempAllowAvatarExpansion = true

View File

@ -124,6 +124,7 @@ private final class MediaPlayerContext {
private var baseRate: Double
private let fetchAutomatically: Bool
private var playAndRecord: Bool
private var soundMuted: Bool
private var ambient: Bool
private var mixWithOthers: Bool
private var keepAudioSessionWhilePaused: Bool
@ -150,7 +151,7 @@ private final class MediaPlayerContext {
private var stoppedAtEnd = false
init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise<MediaPlayerStatus>, audioLevelPipe: ValuePipe<Float>, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, ambient: Bool, mixWithOthers: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) {
init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise<MediaPlayerStatus>, audioLevelPipe: ValuePipe<Float>, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, soundMuted: Bool, ambient: Bool, mixWithOthers: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) {
assert(queue.isCurrent())
self.queue = queue
@ -169,6 +170,7 @@ private final class MediaPlayerContext {
self.baseRate = baseRate
self.fetchAutomatically = fetchAutomatically
self.playAndRecord = playAndRecord
self.soundMuted = soundMuted
self.ambient = ambient
self.mixWithOthers = mixWithOthers
self.keepAudioSessionWhilePaused = keepAudioSessionWhilePaused
@ -404,7 +406,7 @@ private final class MediaPlayerContext {
self.audioRenderer = nil
let queue = self.queue
renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, soundMuted: self.soundMuted, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
queue.async {
if let strongSelf = self {
strongSelf.tick()
@ -483,7 +485,7 @@ private final class MediaPlayerContext {
self.lastStatusUpdateTimestamp = nil
if self.enableSound {
let queue = self.queue
let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, soundMuted: self.soundMuted, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
queue.async {
if let strongSelf = self {
strongSelf.tick()
@ -601,43 +603,15 @@ private final class MediaPlayerContext {
self.stoppedAtEnd = false
}
fileprivate func setSoundMuted(soundMuted: Bool) {
self.soundMuted = soundMuted
self.audioRenderer?.renderer.setSoundMuted(soundMuted: soundMuted)
}
fileprivate func continueWithOverridingAmbientMode(isAmbient: Bool) {
if !isAmbient {
self.ambient = false
var loadedState: MediaPlayerLoadedState?
switch self.state {
case .empty:
break
case let .playing(currentLoadedState):
loadedState = currentLoadedState
case let .paused(currentLoadedState):
loadedState = currentLoadedState
case .seeking:
break
}
if let loadedState = loadedState {
let timestamp = CMTimeGetSeconds(CMTimebaseGetTime(loadedState.controlTimebase.timebase))
self.seek(timestamp: timestamp, action: .play)
}
} else {
self.ambient = true
var loadedState: MediaPlayerLoadedState?
switch self.state {
case .empty:
break
case let .playing(currentLoadedState):
loadedState = currentLoadedState
case let .paused(currentLoadedState):
loadedState = currentLoadedState
case .seeking:
break
}
if let loadedState = loadedState {
let timestamp = CMTimeGetSeconds(CMTimebaseGetTime(loadedState.controlTimebase.timebase))
self.seek(timestamp: timestamp, action: .play)
}
if self.ambient != isAmbient {
self.ambient = isAmbient
self.audioRenderer?.renderer.reconfigureAudio(ambient: self.ambient)
}
}
@ -1154,10 +1128,10 @@ public final class MediaPlayer {
}
}
public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, ambient: Bool = false, mixWithOthers: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) {
public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, soundMuted: Bool = false, ambient: Bool = false, mixWithOthers: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) {
let audioLevelPipe = self.audioLevelPipe
self.queue.async {
let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, ambient: ambient, mixWithOthers: mixWithOthers, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, soundMuted: soundMuted, ambient: ambient, mixWithOthers: mixWithOthers, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
self.contextRef = Unmanaged.passRetained(context)
}
}
@ -1185,6 +1159,14 @@ public final class MediaPlayer {
}
}
public func setSoundMuted(soundMuted: Bool) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setSoundMuted(soundMuted: soundMuted)
}
}
}
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {

View File

@ -237,7 +237,9 @@ private final class AudioPlayerRendererContext {
let audioSessionDisposable = MetaDisposable()
var audioSessionControl: ManagedAudioSessionControl?
let playAndRecord: Bool
let ambient: Bool
var soundMuted: Bool
var ambient: Bool
var volume: Double = 1.0
let mixWithOthers: Bool
var forceAudioToSpeaker: Bool {
didSet {
@ -252,7 +254,7 @@ private final class AudioPlayerRendererContext {
}
}
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, soundMuted: Bool, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
assert(audioPlayerRendererQueue.isCurrent())
self.audioSession = audioSession
@ -267,6 +269,7 @@ private final class AudioPlayerRendererContext {
self.playAndRecord = playAndRecord
self.useVoiceProcessingMode = useVoiceProcessingMode
self.soundMuted = soundMuted
self.ambient = ambient
self.mixWithOthers = mixWithOthers
@ -318,8 +321,10 @@ private final class AudioPlayerRendererContext {
}
fileprivate func setVolume(_ volume: Double) {
self.volume = volume
if let mixerAudioUnit = self.mixerAudioUnit {
AudioUnitSetParameter(mixerAudioUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(volume), 0)
AudioUnitSetParameter(mixerAudioUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(volume) * (self.soundMuted ? 0.0 : 1.0), 0)
}
}
@ -345,6 +350,36 @@ private final class AudioPlayerRendererContext {
}
}
fileprivate func setSoundMuted(soundMuted: Bool) {
self.soundMuted = soundMuted
if let mixerAudioUnit = self.mixerAudioUnit {
AudioUnitSetParameter(mixerAudioUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(self.volume) * (self.soundMuted ? 0.0 : 1.0), 0)
}
}
fileprivate func reconfigureAudio(ambient: Bool) {
self.ambient = ambient
if let audioGraph = self.audioGraph {
var isRunning: DarwinBoolean = false
AUGraphIsRunning(audioGraph, &isRunning)
if isRunning.boolValue {
AUGraphStop(audioGraph)
}
}
self.audioSessionControl?.setType(self.ambient ? .ambient : (self.playAndRecord ? .playWithPossiblePortOverride : .play(mixWithOthers: self.mixWithOthers)), completion: { [weak self] in
audioPlayerRendererQueue.async {
guard let self else {
return
}
if let audioGraph = self.audioGraph {
AUGraphStart(audioGraph)
}
}
})
}
fileprivate func flushBuffers(at timestamp: CMTime, completion: () -> Void) {
assert(audioPlayerRendererQueue.isCurrent())
@ -554,6 +589,8 @@ private final class AudioPlayerRendererContext {
if self.forAudioVideoMessage && !self.ambient {
AudioUnitSetParameter(equalizerAudioUnit, kAUNBandEQParam_GlobalGain, kAudioUnitScope_Global, 0, self.forceAudioToSpeaker ? 0.0 : 12.0, 0)
} else if self.soundMuted {
AudioUnitSetParameter(equalizerAudioUnit, kAUNBandEQParam_GlobalGain, kAudioUnitScope_Global, 0, 0.0, 0)
}
var maybeOutputAudioUnit: AudioComponentInstance?
@ -591,6 +628,8 @@ private final class AudioPlayerRendererContext {
AudioUnitSetProperty(equalizerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
AudioUnitSetProperty(outputAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
AudioUnitSetParameter(mixerAudioUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(self.volume) * (self.soundMuted ? 0.0 : 1.0), 0)
guard AUGraphInitialize(audioGraph) == noErr else {
return
}
@ -827,7 +866,7 @@ public final class MediaPlayerAudioRenderer {
private let audioClock: CMClock
public let audioTimebase: CMTimebase
public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, soundMuted: Bool, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
var audioClock: CMClock?
CMAudioClockCreate(allocator: nil, clockOut: &audioClock)
if audioClock == nil {
@ -840,7 +879,7 @@ public final class MediaPlayerAudioRenderer {
self.audioTimebase = audioTimebase!
audioPlayerRendererQueue.async {
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, mixWithOthers: mixWithOthers, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, soundMuted: soundMuted, ambient: ambient, mixWithOthers: mixWithOthers, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
self.contextRef = Unmanaged.passRetained(context)
}
}
@ -870,6 +909,24 @@ public final class MediaPlayerAudioRenderer {
}
}
public func setSoundMuted(soundMuted: Bool) {
audioPlayerRendererQueue.async {
if let contextRef = self.contextRef {
let context = contextRef.takeUnretainedValue()
context.setSoundMuted(soundMuted: soundMuted)
}
}
}
public func reconfigureAudio(ambient: Bool) {
audioPlayerRendererQueue.async {
if let contextRef = self.contextRef {
let context = contextRef.takeUnretainedValue()
context.reconfigureAudio(ambient: ambient)
}
}
}
public func setRate(_ rate: Double) {
audioPlayerRendererQueue.async {
if let contextRef = self.contextRef {

View File

@ -120,7 +120,7 @@ public enum AudioSessionOutputMode: Equatable {
private final class HolderRecord {
let id: Int32
let audioSessionType: ManagedAudioSessionType
var audioSessionType: ManagedAudioSessionType
let control: ManagedAudioSessionControl
let activate: (ManagedAudioSessionControl) -> Void
let deactivate: (Bool) -> Signal<Void, NoError>
@ -161,12 +161,14 @@ public class ManagedAudioSessionControl {
private let activateImpl: (ManagedAudioSessionControlActivate) -> Void
private let setupAndActivateImpl: (Bool, ManagedAudioSessionControlActivate) -> Void
private let setOutputModeImpl: (AudioSessionOutputMode) -> Void
private let setTypeImpl: (ManagedAudioSessionType, @escaping () -> Void) -> Void
fileprivate init(setupImpl: @escaping (Bool) -> Void, activateImpl: @escaping (ManagedAudioSessionControlActivate) -> Void, setOutputModeImpl: @escaping (AudioSessionOutputMode) -> Void, setupAndActivateImpl: @escaping (Bool, ManagedAudioSessionControlActivate) -> Void) {
fileprivate init(setupImpl: @escaping (Bool) -> Void, activateImpl: @escaping (ManagedAudioSessionControlActivate) -> Void, setOutputModeImpl: @escaping (AudioSessionOutputMode) -> Void, setupAndActivateImpl: @escaping (Bool, ManagedAudioSessionControlActivate) -> Void, setTypeImpl: @escaping (ManagedAudioSessionType, @escaping () -> Void) -> Void) {
self.setupImpl = setupImpl
self.activateImpl = activateImpl
self.setOutputModeImpl = setOutputModeImpl
self.setupAndActivateImpl = setupAndActivateImpl
self.setTypeImpl = setTypeImpl
}
public func setup(synchronous: Bool = false) {
@ -184,6 +186,10 @@ public class ManagedAudioSessionControl {
public func setOutputMode(_ mode: AudioSessionOutputMode) {
self.setOutputModeImpl(mode)
}
public func setType(_ audioSessionType: ManagedAudioSessionType, completion: @escaping () -> Void) {
self.setTypeImpl(audioSessionType, completion)
}
}
public final class ManagedAudioSession: NSObject {
@ -548,6 +554,24 @@ public final class ManagedAudioSession: NSObject {
queue.async(f)
}
}
}, setTypeImpl: { [weak self] audioSessionType, completion in
queue.async {
if let strongSelf = self {
for holder in strongSelf.holders {
if holder.id == id {
if holder.audioSessionType != audioSessionType {
holder.audioSessionType = audioSessionType
}
if holder.active {
strongSelf.updateAudioSessionType(audioSessionType)
}
}
}
}
completion()
}
}), activate: { [weak self] state in
manualActivate(state)
queue.async {
@ -801,7 +825,11 @@ public final class ManagedAudioSession: NSObject {
switch type {
case .play(mixWithOthers: true), .ambient:
do {
try AVAudioSession.sharedInstance().setActive(false)
} catch let error {
managedAudioSessionLog("ManagedAudioSession setActive error \(error)")
}
default:
break
}
@ -1004,6 +1032,12 @@ public final class ManagedAudioSession: NSObject {
}
}
private func updateAudioSessionType(_ audioSessionType: ManagedAudioSessionType) {
if let (_, outputMode) = self.currentTypeAndOutputMode {
self.setup(type: audioSessionType, outputMode: outputMode, activateNow: true)
}
}
private func updateOutputMode(_ outputMode: AudioSessionOutputMode) {
if let (type, _) = self.currentTypeAndOutputMode {
self.setup(type: type, outputMode: outputMode, activateNow: true)

View File

@ -724,7 +724,7 @@ private func apiInputPrivacyRules(privacy: EngineStoryPrivacy, transaction: Tran
privacyRules = [.inputPrivacyValueAllowCloseFriends]
case .nobody:
if privacy.additionallyIncludePeers.isEmpty {
privacyRules = [.inputPrivacyValueDisallowAll]
privacyRules = [.inputPrivacyValueAllowUsers(users: [.inputUserSelf])]
} else {
privacyRules = []
}

View File

@ -171,7 +171,7 @@ public extension StoryContainerScreen {
|> take(1)
|> mapToSignal { state -> Signal<StoryContentContextState, NoError> in
if let slice = state.slice {
#if DEBUG && true
#if DEBUG && false
if "".isEmpty {
return .single(state)
|> delay(4.0, queue: .mainQueue())

View File

@ -153,7 +153,8 @@ final class StoryItemContentComponent: Component {
imageReference: nil,
streamVideo: .story,
loopVideo: true,
enableSound: component.audioMode != .off,
enableSound: true,
soundMuted: component.audioMode == .off,
beginWithAmbientSound: component.audioMode == .ambient,
mixWithOthers: true,
useLargeThumbnail: false,
@ -255,7 +256,7 @@ final class StoryItemContentComponent: Component {
override func leaveAmbientMode() {
if let videoNode = self.videoNode {
self.ignoreBufferingTimestamp = CFAbsoluteTimeGetCurrent()
videoNode.setSoundEnabled(true)
videoNode.setSoundMuted(soundMuted: false)
videoNode.continueWithOverridingAmbientMode(isAmbient: false)
}
}
@ -266,7 +267,7 @@ final class StoryItemContentComponent: Component {
if ambient {
videoNode.continueWithOverridingAmbientMode(isAmbient: true)
} else {
videoNode.setSoundEnabled(false)
videoNode.setSoundMuted(soundMuted: true)
}
}
}

View File

@ -25,8 +25,6 @@ final class StoryItemImageView: UIView {
private(set) var isContentLoaded: Bool = false
var didLoadContents: (() -> Void)?
private var isCaptureProtected: Bool = false
override init(frame: CGRect) {
self.contentView = UIImageView()
self.contentView.contentMode = .scaleAspectFill
@ -44,8 +42,8 @@ final class StoryItemImageView: UIView {
self.disposable?.dispose()
}
private func updateImage(image: UIImage) {
if self.isCaptureProtected {
private func updateImage(image: UIImage, isCaptureProtected: Bool) {
if isCaptureProtected {
let captureProtectedContentLayer: CaptureProtectedContentLayer
if let current = self.captureProtectedContentLayer {
captureProtectedContentLayer = current
@ -71,8 +69,6 @@ final class StoryItemImageView: UIView {
}
func update(context: AccountContext, strings: PresentationStrings, peer: EnginePeer, storyId: Int32, media: EngineMedia, size: CGSize, isCaptureProtected: Bool, attemptSynchronous: Bool, transition: Transition) {
self.isCaptureProtected = isCaptureProtected
self.backgroundColor = isCaptureProtected ? UIColor(rgb: 0x181818) : nil
var dimensions: CGSize?
@ -90,14 +86,28 @@ final class StoryItemImageView: UIView {
dimensions = representation.dimensions.cgSize
if isMediaUpdated {
if isCaptureProtected {
if let thumbnailData = image.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
self.updateImage(image: image, isCaptureProtected: false)
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.1, execute: { [weak self] in
guard let self else {
return
}
self.contentView.image = nil
})
}
}
}
if attemptSynchronous, let path = context.account.postbox.mediaBox.completedResourcePath(id: representation.resource.id, pathExtension: nil) {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: path)?.preparingForDisplay() {
self.updateImage(image: image)
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
}
} else {
if let image = UIImage(contentsOfFile: path)?.precomposed() {
self.updateImage(image: image)
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
}
}
self.isContentLoaded = true
@ -105,7 +115,7 @@ final class StoryItemImageView: UIView {
} else {
if let thumbnailData = image.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
self.updateImage(image: image)
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
}
}
@ -137,7 +147,7 @@ final class StoryItemImageView: UIView {
return
}
if let image {
self.updateImage(image: image)
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
self.isContentLoaded = true
self.didLoadContents?()
}
@ -149,16 +159,30 @@ final class StoryItemImageView: UIView {
dimensions = file.dimensions?.cgSize
if isMediaUpdated {
if isCaptureProtected {
if let thumbnailData = file.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
self.updateImage(image: image, isCaptureProtected: false)
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.1, execute: { [weak self] in
guard let self else {
return
}
self.contentView.image = nil
})
}
}
}
let cachedPath = context.account.postbox.mediaBox.cachedRepresentationCompletePath(file.resource.id, representation: CachedVideoFirstFrameRepresentation())
if attemptSynchronous, FileManager.default.fileExists(atPath: cachedPath) {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: cachedPath)?.preparingForDisplay() {
self.updateImage(image: image)
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
}
} else {
if let image = UIImage(contentsOfFile: cachedPath)?.precomposed() {
self.updateImage(image: image)
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
}
}
self.isContentLoaded = true
@ -166,7 +190,7 @@ final class StoryItemImageView: UIView {
} else {
if let thumbnailData = file.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
self.updateImage(image: image)
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
}
}
@ -195,7 +219,7 @@ final class StoryItemImageView: UIView {
return
}
if let image {
self.updateImage(image: image)
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
self.isContentLoaded = true
self.didLoadContents?()
}
@ -217,7 +241,7 @@ final class StoryItemImageView: UIView {
}
}
if self.isCaptureProtected {
if isCaptureProtected {
let captureProtectedInfo: ComponentView<Empty>
var captureProtectedInfoTransition = transition
if let current = self.captureProtectedInfo {

View File

@ -363,6 +363,8 @@ final class StoryItemSetContainerSendMessage {
controller.present(tooltipScreen, in: .current)
self.tooltipScreen = tooltipScreen
view.updateIsProgressPaused()
HapticFeedback().success()
}
func presentSendMessageOptions(view: StoryItemSetContainerComponent.View, sourceView: UIView, gesture: ContextGesture?) {

View File

@ -2392,7 +2392,6 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
}))
}
@available(iOS 10.0, *)
func userNotificationCenter(_ center: UNUserNotificationCenter, didReceive response: UNNotificationResponse, withCompletionHandler completionHandler: @escaping () -> Void) {
let _ = (accountIdFromNotification(response.notification, sharedContext: self.sharedContextPromise.get())
|> deliverOnMainQueue).start(next: { accountId in
@ -2493,11 +2492,11 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
return settings.displayNameOnLockscreen
}
|> deliverOnMainQueue).start(next: { displayNames in
self.registerForNotifications(replyString: presentationData.strings.Notification_Reply, messagePlaceholderString: presentationData.strings.Conversation_InputTextPlaceholder, hiddenContentString: presentationData.strings.Watch_MessageView_Title, hiddenReactionContentString: presentationData.strings.Notification_LockScreenReactionPlaceholder, includeNames: displayNames, authorize: authorize, completion: completion)
self.registerForNotifications(replyString: presentationData.strings.Notification_Reply, messagePlaceholderString: presentationData.strings.Conversation_InputTextPlaceholder, hiddenContentString: presentationData.strings.Watch_MessageView_Title, hiddenReactionContentString: presentationData.strings.Notification_LockScreenReactionPlaceholder, hiddenStoryContentString: presentationData.strings.Notification_LockScreenStoryPlaceholder, includeNames: displayNames, authorize: authorize, completion: completion)
})
}
private func registerForNotifications(replyString: String, messagePlaceholderString: String, hiddenContentString: String, hiddenReactionContentString: String, includeNames: Bool, authorize: Bool = true, completion: @escaping (Bool) -> Void = { _ in }) {
private func registerForNotifications(replyString: String, messagePlaceholderString: String, hiddenContentString: String, hiddenReactionContentString: String, hiddenStoryContentString: String, includeNames: Bool, authorize: Bool = true, completion: @escaping (Bool) -> Void = { _ in }) {
let notificationCenter = UNUserNotificationCenter.current()
Logger.shared.log("App \(self.episodeId)", "register for notifications: get settings (authorize: \(authorize))")
notificationCenter.getNotificationSettings(completionHandler: { settings in
@ -2527,8 +2526,8 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
let groupRepliableMediaMessageCategory: UNNotificationCategory
let channelMessageCategory: UNNotificationCategory
let reactionMessageCategory: UNNotificationCategory
let storyCategory: UNNotificationCategory
if #available(iOS 11.0, *) {
var options: UNNotificationCategoryOptions = []
if includeNames {
options.insert(.hiddenPreviewsShowTitle)
@ -2547,17 +2546,7 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
groupRepliableMediaMessageCategory = UNNotificationCategory(identifier: "gm", actions: [reply], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
channelMessageCategory = UNNotificationCategory(identifier: "c", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
reactionMessageCategory = UNNotificationCategory(identifier: "t", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenReactionContentString, options: options)
} else {
let carPlayOptions: UNNotificationCategoryOptions = [.allowInCarPlay]
unknownMessageCategory = UNNotificationCategory(identifier: "unknown", actions: [], intentIdentifiers: [], options: [])
repliableMessageCategory = UNNotificationCategory(identifier: "r", actions: [reply], intentIdentifiers: [INSearchForMessagesIntentIdentifier], options: carPlayOptions)
repliableMediaMessageCategory = UNNotificationCategory(identifier: "m", actions: [reply], intentIdentifiers: [], options: [])
groupRepliableMessageCategory = UNNotificationCategory(identifier: "gr", actions: [reply], intentIdentifiers: [INSearchForMessagesIntentIdentifier], options: [])
groupRepliableMediaMessageCategory = UNNotificationCategory(identifier: "gm", actions: [reply], intentIdentifiers: [], options: [])
channelMessageCategory = UNNotificationCategory(identifier: "c", actions: [], intentIdentifiers: [], options: [])
reactionMessageCategory = UNNotificationCategory(identifier: "t", actions: [], intentIdentifiers: [], options: [])
}
storyCategory = UNNotificationCategory(identifier: "st", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenStoryContentString, options: options)
UNUserNotificationCenter.current().setNotificationCategories([
unknownMessageCategory,
@ -2566,7 +2555,8 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
channelMessageCategory,
reactionMessageCategory,
groupRepliableMessageCategory,
groupRepliableMediaMessageCategory
groupRepliableMediaMessageCategory,
storyCategory
])
Logger.shared.log("App \(self.episodeId)", "register for notifications: invoke registerForRemoteNotifications")

View File

@ -4524,7 +4524,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return
}
if let story = message.associatedStories[storyId], story.data.isEmpty {
self.present(UndoOverlayController(presentationData: self.presentationData, content: .info(title: nil, text: self.presentationData.strings.Story_TooltipExpired, timeout: nil), elevatedLayout: false, action: { _ in return true }), in: .current)
self.present(UndoOverlayController(presentationData: self.presentationData, content: .universal(animation: "story_expired", scale: 0.066, colors: [:], title: nil, text: self.presentationData.strings.Story_TooltipExpired, customUndoText: nil, timeout: nil), elevatedLayout: false, action: { _ in return true }), in: .current)
return
}
@ -19170,6 +19170,10 @@ func canAddMessageReactions(message: Message) -> Bool {
for media in message.media {
if let _ = media as? TelegramMediaAction {
return false
} else if let story = media as? TelegramMediaStory {
if story.isMention {
return false
}
}
}
return true

View File

@ -151,6 +151,9 @@ private func canEditMessage(accountPeerId: PeerId, limitsConfiguration: EngineCo
} else if let _ = media as? TelegramMediaInvoice {
hasUneditableAttributes = true
break
} else if let _ = media as? TelegramMediaStory {
hasUneditableAttributes = true
break
}
}
@ -562,6 +565,10 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
}
} else if let dice = media as? TelegramMediaDice {
diceEmoji = dice.emoji
} else if let story = media as? TelegramMediaStory {
if story.isMention {
isAction = true
}
}
}
}
@ -626,6 +633,8 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
if let story = media as? TelegramMediaStory {
if let story = message.associatedStories[story.storyId], story.data.isEmpty {
canPin = false
} else if story.isMention {
canPin = false
}
}
}
@ -875,136 +884,6 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
}
}
}
if context.sharedContext.immediateExperimentalUISettings.enableReactionOverrides {
for media in message.media {
if let file = media as? TelegramMediaFile, file.isAnimatedSticker {
actions.append(.action(ContextMenuActionItem(text: "Set as Reaction Effect", icon: { _ in
return nil
}, action: { c, _ in
let subItems: Signal<ContextController.Items, NoError> = context.engine.stickers.availableReactions()
|> map { reactions -> ContextController.Items in
var subActions: [ContextMenuItem] = []
if let reactions = reactions {
for reaction in reactions.reactions {
if !reaction.isEnabled || !reaction.isPremium {
continue
}
guard case let .builtin(emojiValue) = reaction.value else {
continue
}
subActions.append(.action(ContextMenuActionItem(text: emojiValue, icon: { _ in
return nil
}, action: { _, f in
let _ = updateExperimentalUISettingsInteractively(accountManager: context.sharedContext.accountManager, { settings in
var settings = settings
var currentItems: [ExperimentalUISettings.AccountReactionOverrides.Item]
if let value = settings.accountReactionEffectOverrides.first(where: { $0.accountId == context.account.id.int64 }) {
currentItems = value.items
} else {
currentItems = []
}
currentItems.removeAll(where: { $0.key == reaction.value })
currentItems.append(ExperimentalUISettings.AccountReactionOverrides.Item(
key: reaction.value,
messageId: message.id,
mediaId: file.fileId
))
settings.accountReactionEffectOverrides.removeAll(where: { $0.accountId == context.account.id.int64 })
settings.accountReactionEffectOverrides.append(ExperimentalUISettings.AccountReactionOverrides(accountId: context.account.id.int64, items: currentItems))
return settings
}).start()
f(.default)
})))
}
}
return ContextController.Items(content: .list(subActions), disablePositionLock: true, tip: nil)
}
c.pushItems(items: subItems)
})))
actions.append(.action(ContextMenuActionItem(text: "Set as Sticker Effect", icon: { _ in
return nil
}, action: { c, _ in
let stickersKey: PostboxViewKey = .orderedItemList(id: Namespaces.OrderedItemList.CloudPremiumStickers)
let subItems: Signal<ContextController.Items, NoError> = context.account.postbox.combinedView(keys: [stickersKey])
|> map { views -> [String] in
if let view = views.views[stickersKey] as? OrderedItemListView, !view.items.isEmpty {
return view.items.compactMap { item -> String? in
guard let mediaItem = item.contents.get(RecentMediaItem.self) else {
return nil
}
let file = mediaItem.media
for attribute in file.attributes {
switch attribute {
case let .Sticker(text, _, _):
return text
default:
break
}
}
return nil
}
} else {
return []
}
}
|> map { stickerNames -> ContextController.Items in
var subActions: [ContextMenuItem] = []
for stickerName in stickerNames {
subActions.append(.action(ContextMenuActionItem(text: stickerName, icon: { _ in
return nil
}, action: { _, f in
let _ = updateExperimentalUISettingsInteractively(accountManager: context.sharedContext.accountManager, { settings in
var settings = settings
var currentItems: [ExperimentalUISettings.AccountReactionOverrides.Item]
if let value = settings.accountStickerEffectOverrides.first(where: { $0.accountId == context.account.id.int64 }) {
currentItems = value.items
} else {
currentItems = []
}
currentItems.removeAll(where: { $0.key == MessageReaction.Reaction.builtin(stickerName) })
currentItems.append(ExperimentalUISettings.AccountReactionOverrides.Item(
key: .builtin(stickerName),
messageId: message.id,
mediaId: file.fileId
))
settings.accountStickerEffectOverrides.removeAll(where: { $0.accountId == context.account.id.int64 })
settings.accountStickerEffectOverrides.append(ExperimentalUISettings.AccountReactionOverrides(accountId: context.account.id.int64, items: currentItems))
return settings
}).start()
f(.default)
})))
}
return ContextController.Items(content: .list(subActions), disablePositionLock: true, tip: nil)
}
c.pushItems(items: subItems)
})))
actions.append(.separator)
break
}
}
}
}
var isDownloading = false
@ -1951,6 +1830,8 @@ func chatAvailableMessageActionsImpl(engine: TelegramEngine, accountPeerId: Peer
} else if let story = media as? TelegramMediaStory {
if let story = message.associatedStories[story.storyId], story.data.isEmpty {
isShareProtected = true
} else if story.isMention {
isShareProtected = true
}
}
}

View File

@ -214,7 +214,7 @@ final class ManagedAudioRecorderContext {
}
return ActionDisposable {
}
}), playAndRecord: true, ambient: false, mixWithOthers: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: ValuePipe<Float>(), updatedRate: {
}), playAndRecord: true, soundMuted: false, ambient: false, mixWithOthers: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: ValuePipe<Float>(), updatedRate: {
}, audioPaused: {})
self.toneRenderer = toneRenderer

View File

@ -115,6 +115,9 @@ final class OverlayInstantVideoNode: OverlayMediaItemNode {
self.videoNode.playOnceWithSound(playAndRecord: playAndRecord)
}
func setSoundMuted(soundMuted: Bool) {
}
func continueWithOverridingAmbientMode(isAmbient: Bool) {
}

View File

@ -2140,6 +2140,8 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
private var expiringStoryListState: PeerExpiringStoryListContext.State?
private var expiringStoryListDisposable: Disposable?
private let storiesReady = ValuePromise<Bool>(true, ignoreRepeated: true)
private let _ready = Promise<Bool>()
var ready: Promise<Bool> {
return self._ready
@ -3863,6 +3865,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
self?.translationState = translationState
})
} else if peerId.namespace == Namespaces.Peer.CloudUser {
self.storiesReady.set(false)
let expiringStoryList = PeerExpiringStoryListContext(account: context.account, peerId: peerId)
self.expiringStoryList = expiringStoryList
self.expiringStoryListDisposable = (combineLatest(queue: .mainQueue(),
@ -3897,6 +3900,8 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
}, state.items.count, state.hasUnseen, state.hasUnseenCloseFriends)
}
self.storiesReady.set(true)
self.requestLayout(animated: false)
if self.headerNode.avatarListNode.openStories == nil {
@ -9515,10 +9520,11 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
let avatarReady = self.headerNode.avatarListNode.isReady.get()
let combinedSignal = combineLatest(queue: .mainQueue(),
avatarReady,
self.storiesReady.get(),
self.paneContainerNode.isReady.get()
)
|> map { lhs, rhs in
return lhs && rhs
|> map { a, b, c in
return a && b && c
}
self._ready.set(combinedSignal
|> filter { $0 }

View File

@ -36,6 +36,7 @@ public final class NativeVideoContent: UniversalVideoContent {
public let streamVideo: MediaPlayerStreaming
public let loopVideo: Bool
public let enableSound: Bool
public let soundMuted: Bool
public let beginWithAmbientSound: Bool
public let mixWithOthers: Bool
public let baseRate: Double
@ -55,7 +56,7 @@ public final class NativeVideoContent: UniversalVideoContent {
let displayImage: Bool
let hasSentFramesToDisplay: (() -> Void)?
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, soundMuted: Bool = false, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
self.id = id
self.nativeId = id
self.userLocation = userLocation
@ -78,6 +79,7 @@ public final class NativeVideoContent: UniversalVideoContent {
self.streamVideo = streamVideo
self.loopVideo = loopVideo
self.enableSound = enableSound
self.soundMuted = soundMuted
self.beginWithAmbientSound = beginWithAmbientSound
self.mixWithOthers = mixWithOthers
self.baseRate = baseRate
@ -99,7 +101,7 @@ public final class NativeVideoContent: UniversalVideoContent {
}
public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
}
public func isEqual(to other: UniversalVideoContent) -> Bool {
@ -121,6 +123,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private let userLocation: MediaResourceUserLocation
private let fileReference: FileMediaReference
private let enableSound: Bool
private let soundMuted: Bool
private let beginWithAmbientSound: Bool
private let mixWithOthers: Bool
private let loopVideo: Bool
@ -180,12 +183,13 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private let hasSentFramesToDisplay: (() -> Void)?
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
self.postbox = postbox
self.userLocation = userLocation
self.fileReference = fileReference
self.placeholderColor = placeholderColor
self.enableSound = enableSound
self.soundMuted = soundMuted
self.beginWithAmbientSound = beginWithAmbientSound
self.mixWithOthers = mixWithOthers
self.loopVideo = loopVideo
@ -206,7 +210,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
break
}
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(fileReference.media.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(fileReference.media.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, soundMuted: soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
var actionAtEndImpl: (() -> Void)?
if enableSound && !loopVideo {
@ -483,6 +487,10 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
self.player.setForceAudioToSpeaker(forceAudioToSpeaker)
}
func setSoundMuted(soundMuted: Bool) {
self.player.setSoundMuted(soundMuted: soundMuted)
}
func continueWithOverridingAmbientMode(isAmbient: Bool) {
self.player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
}

View File

@ -430,6 +430,9 @@ private final class PlatformVideoContentNode: ASDisplayNode, UniversalVideoConte
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
}
func setSoundMuted(soundMuted: Bool) {
}
func continueWithOverridingAmbientMode(isAmbient: Bool) {
}

View File

@ -267,6 +267,9 @@ private final class SystemVideoContentNode: ASDisplayNode, UniversalVideoContent
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
}
func setSoundMuted(soundMuted: Bool) {
}
func continueWithOverridingAmbientMode(isAmbient: Bool) {
}

View File

@ -164,6 +164,9 @@ final class WebEmbedVideoContentNode: ASDisplayNode, UniversalVideoContentNode {
}
}
func setSoundMuted(soundMuted: Bool) {
}
func continueWithOverridingAmbientMode(isAmbient: Bool) {
}