mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Stories
This commit is contained in:
parent
d16d5b8bb8
commit
6e17762083
@ -1171,6 +1171,7 @@ private final class NotificationServiceHandler {
|
|||||||
}*/
|
}*/
|
||||||
|
|
||||||
if let storyId {
|
if let storyId {
|
||||||
|
content.category = "st"
|
||||||
action = .pollStories(peerId: peerId, content: content, storyId: storyId)
|
action = .pollStories(peerId: peerId, content: content, storyId: storyId)
|
||||||
} else {
|
} else {
|
||||||
action = .poll(peerId: peerId, content: content, messageId: messageIdValue)
|
action = .poll(peerId: peerId, content: content, messageId: messageIdValue)
|
||||||
|
@ -9714,3 +9714,5 @@ Sorry for the inconvenience.";
|
|||||||
|
|
||||||
"AutoDownloadSettings.Stories" = "Stories";
|
"AutoDownloadSettings.Stories" = "Stories";
|
||||||
"MediaEditor.Draft" = "Draft";
|
"MediaEditor.Draft" = "Draft";
|
||||||
|
|
||||||
|
"Notification.LockScreenStoryPlaceholder" = "New Story";
|
||||||
|
@ -23,6 +23,7 @@ public protocol UniversalVideoContentNode: AnyObject {
|
|||||||
func setSoundEnabled(_ value: Bool)
|
func setSoundEnabled(_ value: Bool)
|
||||||
func seek(_ timestamp: Double)
|
func seek(_ timestamp: Double)
|
||||||
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
|
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
|
||||||
|
func setSoundMuted(soundMuted: Bool)
|
||||||
func continueWithOverridingAmbientMode(isAmbient: Bool)
|
func continueWithOverridingAmbientMode(isAmbient: Bool)
|
||||||
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool)
|
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool)
|
||||||
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
|
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
|
||||||
@ -284,6 +285,14 @@ public final class UniversalVideoNode: ASDisplayNode {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func setSoundMuted(soundMuted: Bool) {
|
||||||
|
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
|
||||||
|
if let contentNode = contentNode {
|
||||||
|
contentNode.setSoundMuted(soundMuted: soundMuted)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||||
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
|
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
|
||||||
if let contentNode = contentNode {
|
if let contentNode = contentNode {
|
||||||
|
@ -1856,101 +1856,49 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
self.storySubscriptionsDisposable = (self.context.engine.messages.storySubscriptions(isHidden: self.location == .chatList(groupId: .archive))
|
if self.previewing {
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] rawStorySubscriptions in
|
self.storiesReady.set(.single(true))
|
||||||
guard let self else {
|
} else {
|
||||||
return
|
self.storySubscriptionsDisposable = (self.context.engine.messages.storySubscriptions(isHidden: self.location == .chatList(groupId: .archive))
|
||||||
}
|
|> deliverOnMainQueue).start(next: { [weak self] rawStorySubscriptions in
|
||||||
|
guard let self else {
|
||||||
self.rawStorySubscriptions = rawStorySubscriptions
|
return
|
||||||
var items: [EngineStorySubscriptions.Item] = []
|
}
|
||||||
if self.shouldFixStorySubscriptionOrder {
|
|
||||||
for peerId in self.fixedStorySubscriptionOrder {
|
self.rawStorySubscriptions = rawStorySubscriptions
|
||||||
if let item = rawStorySubscriptions.items.first(where: { $0.peer.id == peerId }) {
|
var items: [EngineStorySubscriptions.Item] = []
|
||||||
|
if self.shouldFixStorySubscriptionOrder {
|
||||||
|
for peerId in self.fixedStorySubscriptionOrder {
|
||||||
|
if let item = rawStorySubscriptions.items.first(where: { $0.peer.id == peerId }) {
|
||||||
|
items.append(item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for item in rawStorySubscriptions.items {
|
||||||
|
if !items.contains(where: { $0.peer.id == item.peer.id }) {
|
||||||
items.append(item)
|
items.append(item)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
self.orderedStorySubscriptions = EngineStorySubscriptions(
|
||||||
for item in rawStorySubscriptions.items {
|
accountItem: rawStorySubscriptions.accountItem,
|
||||||
if !items.contains(where: { $0.peer.id == item.peer.id }) {
|
items: items,
|
||||||
items.append(item)
|
hasMoreToken: rawStorySubscriptions.hasMoreToken
|
||||||
}
|
)
|
||||||
}
|
self.fixedStorySubscriptionOrder = items.map(\.peer.id)
|
||||||
self.orderedStorySubscriptions = EngineStorySubscriptions(
|
|
||||||
accountItem: rawStorySubscriptions.accountItem,
|
|
||||||
items: items,
|
|
||||||
hasMoreToken: rawStorySubscriptions.hasMoreToken
|
|
||||||
)
|
|
||||||
self.fixedStorySubscriptionOrder = items.map(\.peer.id)
|
|
||||||
|
|
||||||
let transition: ContainedViewLayoutTransition
|
|
||||||
if self.didAppear {
|
|
||||||
transition = .animated(duration: 0.4, curve: .spring)
|
|
||||||
} else {
|
|
||||||
transition = .immediate
|
|
||||||
}
|
|
||||||
|
|
||||||
self.chatListDisplayNode.temporaryContentOffsetChangeTransition = transition
|
|
||||||
self.requestLayout(transition: transition)
|
|
||||||
self.chatListDisplayNode.temporaryContentOffsetChangeTransition = nil
|
|
||||||
|
|
||||||
if !shouldDisplayStoriesInChatListHeader(storySubscriptions: rawStorySubscriptions, isHidden: self.location == .chatList(groupId: .archive)) {
|
|
||||||
self.chatListDisplayNode.scrollToTopIfStoriesAreExpanded()
|
|
||||||
}
|
|
||||||
|
|
||||||
self.storiesReady.set(.single(true))
|
|
||||||
|
|
||||||
Queue.mainQueue().after(1.0, { [weak self] in
|
|
||||||
guard let self else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
self.maybeDisplayStoryTooltip()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
self.storyProgressDisposable = (self.context.engine.messages.allStoriesUploadProgress()
|
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] progress in
|
|
||||||
guard let self else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
self.updateStoryUploadProgress(progress)
|
|
||||||
})
|
|
||||||
|
|
||||||
if case .chatList(.root) = self.location {
|
|
||||||
self.storyArchiveSubscriptionsDisposable = (self.context.engine.messages.storySubscriptions(isHidden: true)
|
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] rawStoryArchiveSubscriptions in
|
|
||||||
guard let self else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
self.rawStoryArchiveSubscriptions = rawStoryArchiveSubscriptions
|
let transition: ContainedViewLayoutTransition
|
||||||
|
if self.didAppear {
|
||||||
let archiveStoryState: ChatListNodeState.StoryState?
|
transition = .animated(duration: 0.4, curve: .spring)
|
||||||
if rawStoryArchiveSubscriptions.items.isEmpty {
|
|
||||||
archiveStoryState = nil
|
|
||||||
} else {
|
} else {
|
||||||
var unseenCount = 0
|
transition = .immediate
|
||||||
for item in rawStoryArchiveSubscriptions.items {
|
|
||||||
if item.hasUnseen {
|
|
||||||
unseenCount += 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let hasUnseenCloseFriends = rawStoryArchiveSubscriptions.items.contains(where: { $0.hasUnseenCloseFriends })
|
|
||||||
archiveStoryState = ChatListNodeState.StoryState(
|
|
||||||
stats: EngineChatList.StoryStats(
|
|
||||||
totalCount: rawStoryArchiveSubscriptions.items.count,
|
|
||||||
unseenCount: unseenCount,
|
|
||||||
hasUnseenCloseFriends: hasUnseenCloseFriends
|
|
||||||
),
|
|
||||||
hasUnseenCloseFriends: hasUnseenCloseFriends
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.chatListDisplayNode.mainContainerNode.currentItemNode.updateState { chatListState in
|
self.chatListDisplayNode.temporaryContentOffsetChangeTransition = transition
|
||||||
var chatListState = chatListState
|
self.requestLayout(transition: transition)
|
||||||
|
self.chatListDisplayNode.temporaryContentOffsetChangeTransition = nil
|
||||||
chatListState.archiveStoryState = archiveStoryState
|
|
||||||
|
if !shouldDisplayStoriesInChatListHeader(storySubscriptions: rawStorySubscriptions, isHidden: self.location == .chatList(groupId: .archive)) {
|
||||||
return chatListState
|
self.chatListDisplayNode.scrollToTopIfStoriesAreExpanded()
|
||||||
}
|
}
|
||||||
|
|
||||||
self.storiesReady.set(.single(true))
|
self.storiesReady.set(.single(true))
|
||||||
@ -1961,9 +1909,65 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
|
|||||||
}
|
}
|
||||||
self.maybeDisplayStoryTooltip()
|
self.maybeDisplayStoryTooltip()
|
||||||
})
|
})
|
||||||
|
|
||||||
self.hasPendingStoriesPromise.set(rawStoryArchiveSubscriptions.accountItem?.hasPending ?? false)
|
|
||||||
})
|
})
|
||||||
|
self.storyProgressDisposable = (self.context.engine.messages.allStoriesUploadProgress()
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] progress in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.updateStoryUploadProgress(progress)
|
||||||
|
})
|
||||||
|
|
||||||
|
if case .chatList(.root) = self.location {
|
||||||
|
self.storyArchiveSubscriptionsDisposable = (self.context.engine.messages.storySubscriptions(isHidden: true)
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] rawStoryArchiveSubscriptions in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
self.rawStoryArchiveSubscriptions = rawStoryArchiveSubscriptions
|
||||||
|
|
||||||
|
let archiveStoryState: ChatListNodeState.StoryState?
|
||||||
|
if rawStoryArchiveSubscriptions.items.isEmpty {
|
||||||
|
archiveStoryState = nil
|
||||||
|
} else {
|
||||||
|
var unseenCount = 0
|
||||||
|
for item in rawStoryArchiveSubscriptions.items {
|
||||||
|
if item.hasUnseen {
|
||||||
|
unseenCount += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let hasUnseenCloseFriends = rawStoryArchiveSubscriptions.items.contains(where: { $0.hasUnseenCloseFriends })
|
||||||
|
archiveStoryState = ChatListNodeState.StoryState(
|
||||||
|
stats: EngineChatList.StoryStats(
|
||||||
|
totalCount: rawStoryArchiveSubscriptions.items.count,
|
||||||
|
unseenCount: unseenCount,
|
||||||
|
hasUnseenCloseFriends: hasUnseenCloseFriends
|
||||||
|
),
|
||||||
|
hasUnseenCloseFriends: hasUnseenCloseFriends
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
self.chatListDisplayNode.mainContainerNode.currentItemNode.updateState { chatListState in
|
||||||
|
var chatListState = chatListState
|
||||||
|
|
||||||
|
chatListState.archiveStoryState = archiveStoryState
|
||||||
|
|
||||||
|
return chatListState
|
||||||
|
}
|
||||||
|
|
||||||
|
self.storiesReady.set(.single(true))
|
||||||
|
|
||||||
|
Queue.mainQueue().after(1.0, { [weak self] in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.maybeDisplayStoryTooltip()
|
||||||
|
})
|
||||||
|
|
||||||
|
self.hasPendingStoriesPromise.set(rawStoryArchiveSubscriptions.accountItem?.hasPending ?? false)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2169,7 +2169,7 @@ final class ChatListControllerNode: ASDisplayNode, UIGestureRecognizerDelegate {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if let storySubscriptions = self.controller?.orderedStorySubscriptions {
|
if let controller = self.controller, let storySubscriptions = controller.orderedStorySubscriptions, shouldDisplayStoriesInChatListHeader(storySubscriptions: storySubscriptions, isHidden: controller.location == .chatList(groupId: .archive)) {
|
||||||
let _ = storySubscriptions
|
let _ = storySubscriptions
|
||||||
|
|
||||||
self.tempAllowAvatarExpansion = true
|
self.tempAllowAvatarExpansion = true
|
||||||
|
@ -124,6 +124,7 @@ private final class MediaPlayerContext {
|
|||||||
private var baseRate: Double
|
private var baseRate: Double
|
||||||
private let fetchAutomatically: Bool
|
private let fetchAutomatically: Bool
|
||||||
private var playAndRecord: Bool
|
private var playAndRecord: Bool
|
||||||
|
private var soundMuted: Bool
|
||||||
private var ambient: Bool
|
private var ambient: Bool
|
||||||
private var mixWithOthers: Bool
|
private var mixWithOthers: Bool
|
||||||
private var keepAudioSessionWhilePaused: Bool
|
private var keepAudioSessionWhilePaused: Bool
|
||||||
@ -150,7 +151,7 @@ private final class MediaPlayerContext {
|
|||||||
|
|
||||||
private var stoppedAtEnd = false
|
private var stoppedAtEnd = false
|
||||||
|
|
||||||
init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise<MediaPlayerStatus>, audioLevelPipe: ValuePipe<Float>, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, ambient: Bool, mixWithOthers: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) {
|
init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise<MediaPlayerStatus>, audioLevelPipe: ValuePipe<Float>, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, soundMuted: Bool, ambient: Bool, mixWithOthers: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool) {
|
||||||
assert(queue.isCurrent())
|
assert(queue.isCurrent())
|
||||||
|
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
@ -169,6 +170,7 @@ private final class MediaPlayerContext {
|
|||||||
self.baseRate = baseRate
|
self.baseRate = baseRate
|
||||||
self.fetchAutomatically = fetchAutomatically
|
self.fetchAutomatically = fetchAutomatically
|
||||||
self.playAndRecord = playAndRecord
|
self.playAndRecord = playAndRecord
|
||||||
|
self.soundMuted = soundMuted
|
||||||
self.ambient = ambient
|
self.ambient = ambient
|
||||||
self.mixWithOthers = mixWithOthers
|
self.mixWithOthers = mixWithOthers
|
||||||
self.keepAudioSessionWhilePaused = keepAudioSessionWhilePaused
|
self.keepAudioSessionWhilePaused = keepAudioSessionWhilePaused
|
||||||
@ -404,7 +406,7 @@ private final class MediaPlayerContext {
|
|||||||
self.audioRenderer = nil
|
self.audioRenderer = nil
|
||||||
|
|
||||||
let queue = self.queue
|
let queue = self.queue
|
||||||
renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
|
renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, soundMuted: self.soundMuted, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
|
||||||
queue.async {
|
queue.async {
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
strongSelf.tick()
|
strongSelf.tick()
|
||||||
@ -483,7 +485,7 @@ private final class MediaPlayerContext {
|
|||||||
self.lastStatusUpdateTimestamp = nil
|
self.lastStatusUpdateTimestamp = nil
|
||||||
if self.enableSound {
|
if self.enableSound {
|
||||||
let queue = self.queue
|
let queue = self.queue
|
||||||
let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
|
let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), forAudioVideoMessage: self.isAudioVideoMessage, playAndRecord: self.playAndRecord, soundMuted: self.soundMuted, ambient: self.ambient, mixWithOthers: self.mixWithOthers, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in
|
||||||
queue.async {
|
queue.async {
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
strongSelf.tick()
|
strongSelf.tick()
|
||||||
@ -601,43 +603,15 @@ private final class MediaPlayerContext {
|
|||||||
self.stoppedAtEnd = false
|
self.stoppedAtEnd = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fileprivate func setSoundMuted(soundMuted: Bool) {
|
||||||
|
self.soundMuted = soundMuted
|
||||||
|
self.audioRenderer?.renderer.setSoundMuted(soundMuted: soundMuted)
|
||||||
|
}
|
||||||
|
|
||||||
fileprivate func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
fileprivate func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||||
if !isAmbient {
|
if self.ambient != isAmbient {
|
||||||
self.ambient = false
|
self.ambient = isAmbient
|
||||||
var loadedState: MediaPlayerLoadedState?
|
self.audioRenderer?.renderer.reconfigureAudio(ambient: self.ambient)
|
||||||
switch self.state {
|
|
||||||
case .empty:
|
|
||||||
break
|
|
||||||
case let .playing(currentLoadedState):
|
|
||||||
loadedState = currentLoadedState
|
|
||||||
case let .paused(currentLoadedState):
|
|
||||||
loadedState = currentLoadedState
|
|
||||||
case .seeking:
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if let loadedState = loadedState {
|
|
||||||
let timestamp = CMTimeGetSeconds(CMTimebaseGetTime(loadedState.controlTimebase.timebase))
|
|
||||||
self.seek(timestamp: timestamp, action: .play)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.ambient = true
|
|
||||||
var loadedState: MediaPlayerLoadedState?
|
|
||||||
switch self.state {
|
|
||||||
case .empty:
|
|
||||||
break
|
|
||||||
case let .playing(currentLoadedState):
|
|
||||||
loadedState = currentLoadedState
|
|
||||||
case let .paused(currentLoadedState):
|
|
||||||
loadedState = currentLoadedState
|
|
||||||
case .seeking:
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if let loadedState = loadedState {
|
|
||||||
let timestamp = CMTimeGetSeconds(CMTimebaseGetTime(loadedState.controlTimebase.timebase))
|
|
||||||
self.seek(timestamp: timestamp, action: .play)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1154,10 +1128,10 @@ public final class MediaPlayer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, ambient: Bool = false, mixWithOthers: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) {
|
public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, userLocation: MediaResourceUserLocation, userContentType: MediaResourceUserContentType, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, soundMuted: Bool = false, ambient: Bool = false, mixWithOthers: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, storeAfterDownload: (() -> Void)? = nil, isAudioVideoMessage: Bool = false) {
|
||||||
let audioLevelPipe = self.audioLevelPipe
|
let audioLevelPipe = self.audioLevelPipe
|
||||||
self.queue.async {
|
self.queue.async {
|
||||||
let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, ambient: ambient, mixWithOthers: mixWithOthers, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
|
let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, soundMuted: soundMuted, ambient: ambient, mixWithOthers: mixWithOthers, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
|
||||||
self.contextRef = Unmanaged.passRetained(context)
|
self.contextRef = Unmanaged.passRetained(context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1185,6 +1159,14 @@ public final class MediaPlayer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func setSoundMuted(soundMuted: Bool) {
|
||||||
|
self.queue.async {
|
||||||
|
if let context = self.contextRef?.takeUnretainedValue() {
|
||||||
|
context.setSoundMuted(soundMuted: soundMuted)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||||
self.queue.async {
|
self.queue.async {
|
||||||
if let context = self.contextRef?.takeUnretainedValue() {
|
if let context = self.contextRef?.takeUnretainedValue() {
|
||||||
|
@ -237,7 +237,9 @@ private final class AudioPlayerRendererContext {
|
|||||||
let audioSessionDisposable = MetaDisposable()
|
let audioSessionDisposable = MetaDisposable()
|
||||||
var audioSessionControl: ManagedAudioSessionControl?
|
var audioSessionControl: ManagedAudioSessionControl?
|
||||||
let playAndRecord: Bool
|
let playAndRecord: Bool
|
||||||
let ambient: Bool
|
var soundMuted: Bool
|
||||||
|
var ambient: Bool
|
||||||
|
var volume: Double = 1.0
|
||||||
let mixWithOthers: Bool
|
let mixWithOthers: Bool
|
||||||
var forceAudioToSpeaker: Bool {
|
var forceAudioToSpeaker: Bool {
|
||||||
didSet {
|
didSet {
|
||||||
@ -252,7 +254,7 @@ private final class AudioPlayerRendererContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool, playAndRecord: Bool, useVoiceProcessingMode: Bool, soundMuted: Bool, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
||||||
assert(audioPlayerRendererQueue.isCurrent())
|
assert(audioPlayerRendererQueue.isCurrent())
|
||||||
|
|
||||||
self.audioSession = audioSession
|
self.audioSession = audioSession
|
||||||
@ -267,6 +269,7 @@ private final class AudioPlayerRendererContext {
|
|||||||
|
|
||||||
self.playAndRecord = playAndRecord
|
self.playAndRecord = playAndRecord
|
||||||
self.useVoiceProcessingMode = useVoiceProcessingMode
|
self.useVoiceProcessingMode = useVoiceProcessingMode
|
||||||
|
self.soundMuted = soundMuted
|
||||||
self.ambient = ambient
|
self.ambient = ambient
|
||||||
self.mixWithOthers = mixWithOthers
|
self.mixWithOthers = mixWithOthers
|
||||||
|
|
||||||
@ -318,8 +321,10 @@ private final class AudioPlayerRendererContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fileprivate func setVolume(_ volume: Double) {
|
fileprivate func setVolume(_ volume: Double) {
|
||||||
|
self.volume = volume
|
||||||
|
|
||||||
if let mixerAudioUnit = self.mixerAudioUnit {
|
if let mixerAudioUnit = self.mixerAudioUnit {
|
||||||
AudioUnitSetParameter(mixerAudioUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(volume), 0)
|
AudioUnitSetParameter(mixerAudioUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(volume) * (self.soundMuted ? 0.0 : 1.0), 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -345,6 +350,36 @@ private final class AudioPlayerRendererContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fileprivate func setSoundMuted(soundMuted: Bool) {
|
||||||
|
self.soundMuted = soundMuted
|
||||||
|
|
||||||
|
if let mixerAudioUnit = self.mixerAudioUnit {
|
||||||
|
AudioUnitSetParameter(mixerAudioUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(self.volume) * (self.soundMuted ? 0.0 : 1.0), 0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fileprivate func reconfigureAudio(ambient: Bool) {
|
||||||
|
self.ambient = ambient
|
||||||
|
|
||||||
|
if let audioGraph = self.audioGraph {
|
||||||
|
var isRunning: DarwinBoolean = false
|
||||||
|
AUGraphIsRunning(audioGraph, &isRunning)
|
||||||
|
if isRunning.boolValue {
|
||||||
|
AUGraphStop(audioGraph)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.audioSessionControl?.setType(self.ambient ? .ambient : (self.playAndRecord ? .playWithPossiblePortOverride : .play(mixWithOthers: self.mixWithOthers)), completion: { [weak self] in
|
||||||
|
audioPlayerRendererQueue.async {
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if let audioGraph = self.audioGraph {
|
||||||
|
AUGraphStart(audioGraph)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fileprivate func flushBuffers(at timestamp: CMTime, completion: () -> Void) {
|
fileprivate func flushBuffers(at timestamp: CMTime, completion: () -> Void) {
|
||||||
assert(audioPlayerRendererQueue.isCurrent())
|
assert(audioPlayerRendererQueue.isCurrent())
|
||||||
|
|
||||||
@ -554,6 +589,8 @@ private final class AudioPlayerRendererContext {
|
|||||||
|
|
||||||
if self.forAudioVideoMessage && !self.ambient {
|
if self.forAudioVideoMessage && !self.ambient {
|
||||||
AudioUnitSetParameter(equalizerAudioUnit, kAUNBandEQParam_GlobalGain, kAudioUnitScope_Global, 0, self.forceAudioToSpeaker ? 0.0 : 12.0, 0)
|
AudioUnitSetParameter(equalizerAudioUnit, kAUNBandEQParam_GlobalGain, kAudioUnitScope_Global, 0, self.forceAudioToSpeaker ? 0.0 : 12.0, 0)
|
||||||
|
} else if self.soundMuted {
|
||||||
|
AudioUnitSetParameter(equalizerAudioUnit, kAUNBandEQParam_GlobalGain, kAudioUnitScope_Global, 0, 0.0, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
var maybeOutputAudioUnit: AudioComponentInstance?
|
var maybeOutputAudioUnit: AudioComponentInstance?
|
||||||
@ -590,6 +627,8 @@ private final class AudioPlayerRendererContext {
|
|||||||
AudioUnitSetProperty(mixerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
|
AudioUnitSetProperty(mixerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
|
||||||
AudioUnitSetProperty(equalizerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
|
AudioUnitSetProperty(equalizerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
|
||||||
AudioUnitSetProperty(outputAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
|
AudioUnitSetProperty(outputAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
|
||||||
|
|
||||||
|
AudioUnitSetParameter(mixerAudioUnit, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(self.volume) * (self.soundMuted ? 0.0 : 1.0), 0)
|
||||||
|
|
||||||
guard AUGraphInitialize(audioGraph) == noErr else {
|
guard AUGraphInitialize(audioGraph) == noErr else {
|
||||||
return
|
return
|
||||||
@ -827,7 +866,7 @@ public final class MediaPlayerAudioRenderer {
|
|||||||
private let audioClock: CMClock
|
private let audioClock: CMClock
|
||||||
public let audioTimebase: CMTimebase
|
public let audioTimebase: CMTimebase
|
||||||
|
|
||||||
public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
public init(audioSession: MediaPlayerAudioSessionControl, forAudioVideoMessage: Bool = false, playAndRecord: Bool, useVoiceProcessingMode: Bool = false, soundMuted: Bool, ambient: Bool, mixWithOthers: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe<Float>, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) {
|
||||||
var audioClock: CMClock?
|
var audioClock: CMClock?
|
||||||
CMAudioClockCreate(allocator: nil, clockOut: &audioClock)
|
CMAudioClockCreate(allocator: nil, clockOut: &audioClock)
|
||||||
if audioClock == nil {
|
if audioClock == nil {
|
||||||
@ -840,7 +879,7 @@ public final class MediaPlayerAudioRenderer {
|
|||||||
self.audioTimebase = audioTimebase!
|
self.audioTimebase = audioTimebase!
|
||||||
|
|
||||||
audioPlayerRendererQueue.async {
|
audioPlayerRendererQueue.async {
|
||||||
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, ambient: ambient, mixWithOthers: mixWithOthers, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
|
let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, forAudioVideoMessage: forAudioVideoMessage, playAndRecord: playAndRecord, useVoiceProcessingMode: useVoiceProcessingMode, soundMuted: soundMuted, ambient: ambient, mixWithOthers: mixWithOthers, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused)
|
||||||
self.contextRef = Unmanaged.passRetained(context)
|
self.contextRef = Unmanaged.passRetained(context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -870,6 +909,24 @@ public final class MediaPlayerAudioRenderer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func setSoundMuted(soundMuted: Bool) {
|
||||||
|
audioPlayerRendererQueue.async {
|
||||||
|
if let contextRef = self.contextRef {
|
||||||
|
let context = contextRef.takeUnretainedValue()
|
||||||
|
context.setSoundMuted(soundMuted: soundMuted)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func reconfigureAudio(ambient: Bool) {
|
||||||
|
audioPlayerRendererQueue.async {
|
||||||
|
if let contextRef = self.contextRef {
|
||||||
|
let context = contextRef.takeUnretainedValue()
|
||||||
|
context.reconfigureAudio(ambient: ambient)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public func setRate(_ rate: Double) {
|
public func setRate(_ rate: Double) {
|
||||||
audioPlayerRendererQueue.async {
|
audioPlayerRendererQueue.async {
|
||||||
if let contextRef = self.contextRef {
|
if let contextRef = self.contextRef {
|
||||||
|
@ -120,7 +120,7 @@ public enum AudioSessionOutputMode: Equatable {
|
|||||||
|
|
||||||
private final class HolderRecord {
|
private final class HolderRecord {
|
||||||
let id: Int32
|
let id: Int32
|
||||||
let audioSessionType: ManagedAudioSessionType
|
var audioSessionType: ManagedAudioSessionType
|
||||||
let control: ManagedAudioSessionControl
|
let control: ManagedAudioSessionControl
|
||||||
let activate: (ManagedAudioSessionControl) -> Void
|
let activate: (ManagedAudioSessionControl) -> Void
|
||||||
let deactivate: (Bool) -> Signal<Void, NoError>
|
let deactivate: (Bool) -> Signal<Void, NoError>
|
||||||
@ -161,12 +161,14 @@ public class ManagedAudioSessionControl {
|
|||||||
private let activateImpl: (ManagedAudioSessionControlActivate) -> Void
|
private let activateImpl: (ManagedAudioSessionControlActivate) -> Void
|
||||||
private let setupAndActivateImpl: (Bool, ManagedAudioSessionControlActivate) -> Void
|
private let setupAndActivateImpl: (Bool, ManagedAudioSessionControlActivate) -> Void
|
||||||
private let setOutputModeImpl: (AudioSessionOutputMode) -> Void
|
private let setOutputModeImpl: (AudioSessionOutputMode) -> Void
|
||||||
|
private let setTypeImpl: (ManagedAudioSessionType, @escaping () -> Void) -> Void
|
||||||
|
|
||||||
fileprivate init(setupImpl: @escaping (Bool) -> Void, activateImpl: @escaping (ManagedAudioSessionControlActivate) -> Void, setOutputModeImpl: @escaping (AudioSessionOutputMode) -> Void, setupAndActivateImpl: @escaping (Bool, ManagedAudioSessionControlActivate) -> Void) {
|
fileprivate init(setupImpl: @escaping (Bool) -> Void, activateImpl: @escaping (ManagedAudioSessionControlActivate) -> Void, setOutputModeImpl: @escaping (AudioSessionOutputMode) -> Void, setupAndActivateImpl: @escaping (Bool, ManagedAudioSessionControlActivate) -> Void, setTypeImpl: @escaping (ManagedAudioSessionType, @escaping () -> Void) -> Void) {
|
||||||
self.setupImpl = setupImpl
|
self.setupImpl = setupImpl
|
||||||
self.activateImpl = activateImpl
|
self.activateImpl = activateImpl
|
||||||
self.setOutputModeImpl = setOutputModeImpl
|
self.setOutputModeImpl = setOutputModeImpl
|
||||||
self.setupAndActivateImpl = setupAndActivateImpl
|
self.setupAndActivateImpl = setupAndActivateImpl
|
||||||
|
self.setTypeImpl = setTypeImpl
|
||||||
}
|
}
|
||||||
|
|
||||||
public func setup(synchronous: Bool = false) {
|
public func setup(synchronous: Bool = false) {
|
||||||
@ -184,6 +186,10 @@ public class ManagedAudioSessionControl {
|
|||||||
public func setOutputMode(_ mode: AudioSessionOutputMode) {
|
public func setOutputMode(_ mode: AudioSessionOutputMode) {
|
||||||
self.setOutputModeImpl(mode)
|
self.setOutputModeImpl(mode)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func setType(_ audioSessionType: ManagedAudioSessionType, completion: @escaping () -> Void) {
|
||||||
|
self.setTypeImpl(audioSessionType, completion)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class ManagedAudioSession: NSObject {
|
public final class ManagedAudioSession: NSObject {
|
||||||
@ -548,6 +554,24 @@ public final class ManagedAudioSession: NSObject {
|
|||||||
queue.async(f)
|
queue.async(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}, setTypeImpl: { [weak self] audioSessionType, completion in
|
||||||
|
queue.async {
|
||||||
|
if let strongSelf = self {
|
||||||
|
for holder in strongSelf.holders {
|
||||||
|
if holder.id == id {
|
||||||
|
if holder.audioSessionType != audioSessionType {
|
||||||
|
holder.audioSessionType = audioSessionType
|
||||||
|
}
|
||||||
|
|
||||||
|
if holder.active {
|
||||||
|
strongSelf.updateAudioSessionType(audioSessionType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
completion()
|
||||||
|
}
|
||||||
}), activate: { [weak self] state in
|
}), activate: { [weak self] state in
|
||||||
manualActivate(state)
|
manualActivate(state)
|
||||||
queue.async {
|
queue.async {
|
||||||
@ -801,7 +825,11 @@ public final class ManagedAudioSession: NSObject {
|
|||||||
|
|
||||||
switch type {
|
switch type {
|
||||||
case .play(mixWithOthers: true), .ambient:
|
case .play(mixWithOthers: true), .ambient:
|
||||||
try AVAudioSession.sharedInstance().setActive(false)
|
do {
|
||||||
|
try AVAudioSession.sharedInstance().setActive(false)
|
||||||
|
} catch let error {
|
||||||
|
managedAudioSessionLog("ManagedAudioSession setActive error \(error)")
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -1004,6 +1032,12 @@ public final class ManagedAudioSession: NSObject {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func updateAudioSessionType(_ audioSessionType: ManagedAudioSessionType) {
|
||||||
|
if let (_, outputMode) = self.currentTypeAndOutputMode {
|
||||||
|
self.setup(type: audioSessionType, outputMode: outputMode, activateNow: true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private func updateOutputMode(_ outputMode: AudioSessionOutputMode) {
|
private func updateOutputMode(_ outputMode: AudioSessionOutputMode) {
|
||||||
if let (type, _) = self.currentTypeAndOutputMode {
|
if let (type, _) = self.currentTypeAndOutputMode {
|
||||||
self.setup(type: type, outputMode: outputMode, activateNow: true)
|
self.setup(type: type, outputMode: outputMode, activateNow: true)
|
||||||
|
@ -724,7 +724,7 @@ private func apiInputPrivacyRules(privacy: EngineStoryPrivacy, transaction: Tran
|
|||||||
privacyRules = [.inputPrivacyValueAllowCloseFriends]
|
privacyRules = [.inputPrivacyValueAllowCloseFriends]
|
||||||
case .nobody:
|
case .nobody:
|
||||||
if privacy.additionallyIncludePeers.isEmpty {
|
if privacy.additionallyIncludePeers.isEmpty {
|
||||||
privacyRules = [.inputPrivacyValueDisallowAll]
|
privacyRules = [.inputPrivacyValueAllowUsers(users: [.inputUserSelf])]
|
||||||
} else {
|
} else {
|
||||||
privacyRules = []
|
privacyRules = []
|
||||||
}
|
}
|
||||||
|
@ -171,7 +171,7 @@ public extension StoryContainerScreen {
|
|||||||
|> take(1)
|
|> take(1)
|
||||||
|> mapToSignal { state -> Signal<StoryContentContextState, NoError> in
|
|> mapToSignal { state -> Signal<StoryContentContextState, NoError> in
|
||||||
if let slice = state.slice {
|
if let slice = state.slice {
|
||||||
#if DEBUG && true
|
#if DEBUG && false
|
||||||
if "".isEmpty {
|
if "".isEmpty {
|
||||||
return .single(state)
|
return .single(state)
|
||||||
|> delay(4.0, queue: .mainQueue())
|
|> delay(4.0, queue: .mainQueue())
|
||||||
|
@ -153,7 +153,8 @@ final class StoryItemContentComponent: Component {
|
|||||||
imageReference: nil,
|
imageReference: nil,
|
||||||
streamVideo: .story,
|
streamVideo: .story,
|
||||||
loopVideo: true,
|
loopVideo: true,
|
||||||
enableSound: component.audioMode != .off,
|
enableSound: true,
|
||||||
|
soundMuted: component.audioMode == .off,
|
||||||
beginWithAmbientSound: component.audioMode == .ambient,
|
beginWithAmbientSound: component.audioMode == .ambient,
|
||||||
mixWithOthers: true,
|
mixWithOthers: true,
|
||||||
useLargeThumbnail: false,
|
useLargeThumbnail: false,
|
||||||
@ -255,7 +256,7 @@ final class StoryItemContentComponent: Component {
|
|||||||
override func leaveAmbientMode() {
|
override func leaveAmbientMode() {
|
||||||
if let videoNode = self.videoNode {
|
if let videoNode = self.videoNode {
|
||||||
self.ignoreBufferingTimestamp = CFAbsoluteTimeGetCurrent()
|
self.ignoreBufferingTimestamp = CFAbsoluteTimeGetCurrent()
|
||||||
videoNode.setSoundEnabled(true)
|
videoNode.setSoundMuted(soundMuted: false)
|
||||||
videoNode.continueWithOverridingAmbientMode(isAmbient: false)
|
videoNode.continueWithOverridingAmbientMode(isAmbient: false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -266,7 +267,7 @@ final class StoryItemContentComponent: Component {
|
|||||||
if ambient {
|
if ambient {
|
||||||
videoNode.continueWithOverridingAmbientMode(isAmbient: true)
|
videoNode.continueWithOverridingAmbientMode(isAmbient: true)
|
||||||
} else {
|
} else {
|
||||||
videoNode.setSoundEnabled(false)
|
videoNode.setSoundMuted(soundMuted: true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,8 +25,6 @@ final class StoryItemImageView: UIView {
|
|||||||
private(set) var isContentLoaded: Bool = false
|
private(set) var isContentLoaded: Bool = false
|
||||||
var didLoadContents: (() -> Void)?
|
var didLoadContents: (() -> Void)?
|
||||||
|
|
||||||
private var isCaptureProtected: Bool = false
|
|
||||||
|
|
||||||
override init(frame: CGRect) {
|
override init(frame: CGRect) {
|
||||||
self.contentView = UIImageView()
|
self.contentView = UIImageView()
|
||||||
self.contentView.contentMode = .scaleAspectFill
|
self.contentView.contentMode = .scaleAspectFill
|
||||||
@ -44,8 +42,8 @@ final class StoryItemImageView: UIView {
|
|||||||
self.disposable?.dispose()
|
self.disposable?.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
private func updateImage(image: UIImage) {
|
private func updateImage(image: UIImage, isCaptureProtected: Bool) {
|
||||||
if self.isCaptureProtected {
|
if isCaptureProtected {
|
||||||
let captureProtectedContentLayer: CaptureProtectedContentLayer
|
let captureProtectedContentLayer: CaptureProtectedContentLayer
|
||||||
if let current = self.captureProtectedContentLayer {
|
if let current = self.captureProtectedContentLayer {
|
||||||
captureProtectedContentLayer = current
|
captureProtectedContentLayer = current
|
||||||
@ -71,8 +69,6 @@ final class StoryItemImageView: UIView {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func update(context: AccountContext, strings: PresentationStrings, peer: EnginePeer, storyId: Int32, media: EngineMedia, size: CGSize, isCaptureProtected: Bool, attemptSynchronous: Bool, transition: Transition) {
|
func update(context: AccountContext, strings: PresentationStrings, peer: EnginePeer, storyId: Int32, media: EngineMedia, size: CGSize, isCaptureProtected: Bool, attemptSynchronous: Bool, transition: Transition) {
|
||||||
self.isCaptureProtected = isCaptureProtected
|
|
||||||
|
|
||||||
self.backgroundColor = isCaptureProtected ? UIColor(rgb: 0x181818) : nil
|
self.backgroundColor = isCaptureProtected ? UIColor(rgb: 0x181818) : nil
|
||||||
|
|
||||||
var dimensions: CGSize?
|
var dimensions: CGSize?
|
||||||
@ -90,14 +86,28 @@ final class StoryItemImageView: UIView {
|
|||||||
dimensions = representation.dimensions.cgSize
|
dimensions = representation.dimensions.cgSize
|
||||||
|
|
||||||
if isMediaUpdated {
|
if isMediaUpdated {
|
||||||
|
if isCaptureProtected {
|
||||||
|
if let thumbnailData = image.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
|
||||||
|
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
|
||||||
|
self.updateImage(image: image, isCaptureProtected: false)
|
||||||
|
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.1, execute: { [weak self] in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.contentView.image = nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if attemptSynchronous, let path = context.account.postbox.mediaBox.completedResourcePath(id: representation.resource.id, pathExtension: nil) {
|
if attemptSynchronous, let path = context.account.postbox.mediaBox.completedResourcePath(id: representation.resource.id, pathExtension: nil) {
|
||||||
if #available(iOS 15.0, *) {
|
if #available(iOS 15.0, *) {
|
||||||
if let image = UIImage(contentsOfFile: path)?.preparingForDisplay() {
|
if let image = UIImage(contentsOfFile: path)?.preparingForDisplay() {
|
||||||
self.updateImage(image: image)
|
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if let image = UIImage(contentsOfFile: path)?.precomposed() {
|
if let image = UIImage(contentsOfFile: path)?.precomposed() {
|
||||||
self.updateImage(image: image)
|
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.isContentLoaded = true
|
self.isContentLoaded = true
|
||||||
@ -105,7 +115,7 @@ final class StoryItemImageView: UIView {
|
|||||||
} else {
|
} else {
|
||||||
if let thumbnailData = image.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
|
if let thumbnailData = image.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
|
||||||
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
|
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
|
||||||
self.updateImage(image: image)
|
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,7 +147,7 @@ final class StoryItemImageView: UIView {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if let image {
|
if let image {
|
||||||
self.updateImage(image: image)
|
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
|
||||||
self.isContentLoaded = true
|
self.isContentLoaded = true
|
||||||
self.didLoadContents?()
|
self.didLoadContents?()
|
||||||
}
|
}
|
||||||
@ -149,16 +159,30 @@ final class StoryItemImageView: UIView {
|
|||||||
dimensions = file.dimensions?.cgSize
|
dimensions = file.dimensions?.cgSize
|
||||||
|
|
||||||
if isMediaUpdated {
|
if isMediaUpdated {
|
||||||
|
if isCaptureProtected {
|
||||||
|
if let thumbnailData = file.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
|
||||||
|
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
|
||||||
|
self.updateImage(image: image, isCaptureProtected: false)
|
||||||
|
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.1, execute: { [weak self] in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.contentView.image = nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let cachedPath = context.account.postbox.mediaBox.cachedRepresentationCompletePath(file.resource.id, representation: CachedVideoFirstFrameRepresentation())
|
let cachedPath = context.account.postbox.mediaBox.cachedRepresentationCompletePath(file.resource.id, representation: CachedVideoFirstFrameRepresentation())
|
||||||
|
|
||||||
if attemptSynchronous, FileManager.default.fileExists(atPath: cachedPath) {
|
if attemptSynchronous, FileManager.default.fileExists(atPath: cachedPath) {
|
||||||
if #available(iOS 15.0, *) {
|
if #available(iOS 15.0, *) {
|
||||||
if let image = UIImage(contentsOfFile: cachedPath)?.preparingForDisplay() {
|
if let image = UIImage(contentsOfFile: cachedPath)?.preparingForDisplay() {
|
||||||
self.updateImage(image: image)
|
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if let image = UIImage(contentsOfFile: cachedPath)?.precomposed() {
|
if let image = UIImage(contentsOfFile: cachedPath)?.precomposed() {
|
||||||
self.updateImage(image: image)
|
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.isContentLoaded = true
|
self.isContentLoaded = true
|
||||||
@ -166,7 +190,7 @@ final class StoryItemImageView: UIView {
|
|||||||
} else {
|
} else {
|
||||||
if let thumbnailData = file.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
|
if let thumbnailData = file.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
|
||||||
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
|
if let image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3) {
|
||||||
self.updateImage(image: image)
|
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -195,7 +219,7 @@ final class StoryItemImageView: UIView {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if let image {
|
if let image {
|
||||||
self.updateImage(image: image)
|
self.updateImage(image: image, isCaptureProtected: isCaptureProtected)
|
||||||
self.isContentLoaded = true
|
self.isContentLoaded = true
|
||||||
self.didLoadContents?()
|
self.didLoadContents?()
|
||||||
}
|
}
|
||||||
@ -217,7 +241,7 @@ final class StoryItemImageView: UIView {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.isCaptureProtected {
|
if isCaptureProtected {
|
||||||
let captureProtectedInfo: ComponentView<Empty>
|
let captureProtectedInfo: ComponentView<Empty>
|
||||||
var captureProtectedInfoTransition = transition
|
var captureProtectedInfoTransition = transition
|
||||||
if let current = self.captureProtectedInfo {
|
if let current = self.captureProtectedInfo {
|
||||||
|
@ -363,6 +363,8 @@ final class StoryItemSetContainerSendMessage {
|
|||||||
controller.present(tooltipScreen, in: .current)
|
controller.present(tooltipScreen, in: .current)
|
||||||
self.tooltipScreen = tooltipScreen
|
self.tooltipScreen = tooltipScreen
|
||||||
view.updateIsProgressPaused()
|
view.updateIsProgressPaused()
|
||||||
|
|
||||||
|
HapticFeedback().success()
|
||||||
}
|
}
|
||||||
|
|
||||||
func presentSendMessageOptions(view: StoryItemSetContainerComponent.View, sourceView: UIView, gesture: ContextGesture?) {
|
func presentSendMessageOptions(view: StoryItemSetContainerComponent.View, sourceView: UIView, gesture: ContextGesture?) {
|
||||||
|
@ -2392,7 +2392,6 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@available(iOS 10.0, *)
|
|
||||||
func userNotificationCenter(_ center: UNUserNotificationCenter, didReceive response: UNNotificationResponse, withCompletionHandler completionHandler: @escaping () -> Void) {
|
func userNotificationCenter(_ center: UNUserNotificationCenter, didReceive response: UNNotificationResponse, withCompletionHandler completionHandler: @escaping () -> Void) {
|
||||||
let _ = (accountIdFromNotification(response.notification, sharedContext: self.sharedContextPromise.get())
|
let _ = (accountIdFromNotification(response.notification, sharedContext: self.sharedContextPromise.get())
|
||||||
|> deliverOnMainQueue).start(next: { accountId in
|
|> deliverOnMainQueue).start(next: { accountId in
|
||||||
@ -2493,11 +2492,11 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
|
|||||||
return settings.displayNameOnLockscreen
|
return settings.displayNameOnLockscreen
|
||||||
}
|
}
|
||||||
|> deliverOnMainQueue).start(next: { displayNames in
|
|> deliverOnMainQueue).start(next: { displayNames in
|
||||||
self.registerForNotifications(replyString: presentationData.strings.Notification_Reply, messagePlaceholderString: presentationData.strings.Conversation_InputTextPlaceholder, hiddenContentString: presentationData.strings.Watch_MessageView_Title, hiddenReactionContentString: presentationData.strings.Notification_LockScreenReactionPlaceholder, includeNames: displayNames, authorize: authorize, completion: completion)
|
self.registerForNotifications(replyString: presentationData.strings.Notification_Reply, messagePlaceholderString: presentationData.strings.Conversation_InputTextPlaceholder, hiddenContentString: presentationData.strings.Watch_MessageView_Title, hiddenReactionContentString: presentationData.strings.Notification_LockScreenReactionPlaceholder, hiddenStoryContentString: presentationData.strings.Notification_LockScreenStoryPlaceholder, includeNames: displayNames, authorize: authorize, completion: completion)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
private func registerForNotifications(replyString: String, messagePlaceholderString: String, hiddenContentString: String, hiddenReactionContentString: String, includeNames: Bool, authorize: Bool = true, completion: @escaping (Bool) -> Void = { _ in }) {
|
private func registerForNotifications(replyString: String, messagePlaceholderString: String, hiddenContentString: String, hiddenReactionContentString: String, hiddenStoryContentString: String, includeNames: Bool, authorize: Bool = true, completion: @escaping (Bool) -> Void = { _ in }) {
|
||||||
let notificationCenter = UNUserNotificationCenter.current()
|
let notificationCenter = UNUserNotificationCenter.current()
|
||||||
Logger.shared.log("App \(self.episodeId)", "register for notifications: get settings (authorize: \(authorize))")
|
Logger.shared.log("App \(self.episodeId)", "register for notifications: get settings (authorize: \(authorize))")
|
||||||
notificationCenter.getNotificationSettings(completionHandler: { settings in
|
notificationCenter.getNotificationSettings(completionHandler: { settings in
|
||||||
@ -2527,38 +2526,28 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
|
|||||||
let groupRepliableMediaMessageCategory: UNNotificationCategory
|
let groupRepliableMediaMessageCategory: UNNotificationCategory
|
||||||
let channelMessageCategory: UNNotificationCategory
|
let channelMessageCategory: UNNotificationCategory
|
||||||
let reactionMessageCategory: UNNotificationCategory
|
let reactionMessageCategory: UNNotificationCategory
|
||||||
|
let storyCategory: UNNotificationCategory
|
||||||
|
|
||||||
if #available(iOS 11.0, *) {
|
var options: UNNotificationCategoryOptions = []
|
||||||
var options: UNNotificationCategoryOptions = []
|
if includeNames {
|
||||||
if includeNames {
|
options.insert(.hiddenPreviewsShowTitle)
|
||||||
options.insert(.hiddenPreviewsShowTitle)
|
|
||||||
}
|
|
||||||
|
|
||||||
var carPlayOptions = options
|
|
||||||
carPlayOptions.insert(.allowInCarPlay)
|
|
||||||
if #available(iOS 13.2, *) {
|
|
||||||
carPlayOptions.insert(.allowAnnouncement)
|
|
||||||
}
|
|
||||||
|
|
||||||
unknownMessageCategory = UNNotificationCategory(identifier: "unknown", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
|
|
||||||
repliableMessageCategory = UNNotificationCategory(identifier: "r", actions: [reply], intentIdentifiers: [INSearchForMessagesIntentIdentifier], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: carPlayOptions)
|
|
||||||
repliableMediaMessageCategory = UNNotificationCategory(identifier: "m", actions: [reply], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: carPlayOptions)
|
|
||||||
groupRepliableMessageCategory = UNNotificationCategory(identifier: "gr", actions: [reply], intentIdentifiers: [INSearchForMessagesIntentIdentifier], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
|
|
||||||
groupRepliableMediaMessageCategory = UNNotificationCategory(identifier: "gm", actions: [reply], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
|
|
||||||
channelMessageCategory = UNNotificationCategory(identifier: "c", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
|
|
||||||
reactionMessageCategory = UNNotificationCategory(identifier: "t", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenReactionContentString, options: options)
|
|
||||||
} else {
|
|
||||||
let carPlayOptions: UNNotificationCategoryOptions = [.allowInCarPlay]
|
|
||||||
|
|
||||||
unknownMessageCategory = UNNotificationCategory(identifier: "unknown", actions: [], intentIdentifiers: [], options: [])
|
|
||||||
repliableMessageCategory = UNNotificationCategory(identifier: "r", actions: [reply], intentIdentifiers: [INSearchForMessagesIntentIdentifier], options: carPlayOptions)
|
|
||||||
repliableMediaMessageCategory = UNNotificationCategory(identifier: "m", actions: [reply], intentIdentifiers: [], options: [])
|
|
||||||
groupRepliableMessageCategory = UNNotificationCategory(identifier: "gr", actions: [reply], intentIdentifiers: [INSearchForMessagesIntentIdentifier], options: [])
|
|
||||||
groupRepliableMediaMessageCategory = UNNotificationCategory(identifier: "gm", actions: [reply], intentIdentifiers: [], options: [])
|
|
||||||
channelMessageCategory = UNNotificationCategory(identifier: "c", actions: [], intentIdentifiers: [], options: [])
|
|
||||||
reactionMessageCategory = UNNotificationCategory(identifier: "t", actions: [], intentIdentifiers: [], options: [])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var carPlayOptions = options
|
||||||
|
carPlayOptions.insert(.allowInCarPlay)
|
||||||
|
if #available(iOS 13.2, *) {
|
||||||
|
carPlayOptions.insert(.allowAnnouncement)
|
||||||
|
}
|
||||||
|
|
||||||
|
unknownMessageCategory = UNNotificationCategory(identifier: "unknown", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
|
||||||
|
repliableMessageCategory = UNNotificationCategory(identifier: "r", actions: [reply], intentIdentifiers: [INSearchForMessagesIntentIdentifier], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: carPlayOptions)
|
||||||
|
repliableMediaMessageCategory = UNNotificationCategory(identifier: "m", actions: [reply], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: carPlayOptions)
|
||||||
|
groupRepliableMessageCategory = UNNotificationCategory(identifier: "gr", actions: [reply], intentIdentifiers: [INSearchForMessagesIntentIdentifier], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
|
||||||
|
groupRepliableMediaMessageCategory = UNNotificationCategory(identifier: "gm", actions: [reply], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
|
||||||
|
channelMessageCategory = UNNotificationCategory(identifier: "c", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenContentString, options: options)
|
||||||
|
reactionMessageCategory = UNNotificationCategory(identifier: "t", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenReactionContentString, options: options)
|
||||||
|
storyCategory = UNNotificationCategory(identifier: "st", actions: [], intentIdentifiers: [], hiddenPreviewsBodyPlaceholder: hiddenStoryContentString, options: options)
|
||||||
|
|
||||||
UNUserNotificationCenter.current().setNotificationCategories([
|
UNUserNotificationCenter.current().setNotificationCategories([
|
||||||
unknownMessageCategory,
|
unknownMessageCategory,
|
||||||
repliableMessageCategory,
|
repliableMessageCategory,
|
||||||
@ -2566,7 +2555,8 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
|
|||||||
channelMessageCategory,
|
channelMessageCategory,
|
||||||
reactionMessageCategory,
|
reactionMessageCategory,
|
||||||
groupRepliableMessageCategory,
|
groupRepliableMessageCategory,
|
||||||
groupRepliableMediaMessageCategory
|
groupRepliableMediaMessageCategory,
|
||||||
|
storyCategory
|
||||||
])
|
])
|
||||||
|
|
||||||
Logger.shared.log("App \(self.episodeId)", "register for notifications: invoke registerForRemoteNotifications")
|
Logger.shared.log("App \(self.episodeId)", "register for notifications: invoke registerForRemoteNotifications")
|
||||||
|
@ -4524,7 +4524,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if let story = message.associatedStories[storyId], story.data.isEmpty {
|
if let story = message.associatedStories[storyId], story.data.isEmpty {
|
||||||
self.present(UndoOverlayController(presentationData: self.presentationData, content: .info(title: nil, text: self.presentationData.strings.Story_TooltipExpired, timeout: nil), elevatedLayout: false, action: { _ in return true }), in: .current)
|
self.present(UndoOverlayController(presentationData: self.presentationData, content: .universal(animation: "story_expired", scale: 0.066, colors: [:], title: nil, text: self.presentationData.strings.Story_TooltipExpired, customUndoText: nil, timeout: nil), elevatedLayout: false, action: { _ in return true }), in: .current)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -19170,6 +19170,10 @@ func canAddMessageReactions(message: Message) -> Bool {
|
|||||||
for media in message.media {
|
for media in message.media {
|
||||||
if let _ = media as? TelegramMediaAction {
|
if let _ = media as? TelegramMediaAction {
|
||||||
return false
|
return false
|
||||||
|
} else if let story = media as? TelegramMediaStory {
|
||||||
|
if story.isMention {
|
||||||
|
return false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
|
@ -151,6 +151,9 @@ private func canEditMessage(accountPeerId: PeerId, limitsConfiguration: EngineCo
|
|||||||
} else if let _ = media as? TelegramMediaInvoice {
|
} else if let _ = media as? TelegramMediaInvoice {
|
||||||
hasUneditableAttributes = true
|
hasUneditableAttributes = true
|
||||||
break
|
break
|
||||||
|
} else if let _ = media as? TelegramMediaStory {
|
||||||
|
hasUneditableAttributes = true
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -562,6 +565,10 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
|
|||||||
}
|
}
|
||||||
} else if let dice = media as? TelegramMediaDice {
|
} else if let dice = media as? TelegramMediaDice {
|
||||||
diceEmoji = dice.emoji
|
diceEmoji = dice.emoji
|
||||||
|
} else if let story = media as? TelegramMediaStory {
|
||||||
|
if story.isMention {
|
||||||
|
isAction = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -626,6 +633,8 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
|
|||||||
if let story = media as? TelegramMediaStory {
|
if let story = media as? TelegramMediaStory {
|
||||||
if let story = message.associatedStories[story.storyId], story.data.isEmpty {
|
if let story = message.associatedStories[story.storyId], story.data.isEmpty {
|
||||||
canPin = false
|
canPin = false
|
||||||
|
} else if story.isMention {
|
||||||
|
canPin = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -875,136 +884,6 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if context.sharedContext.immediateExperimentalUISettings.enableReactionOverrides {
|
|
||||||
for media in message.media {
|
|
||||||
if let file = media as? TelegramMediaFile, file.isAnimatedSticker {
|
|
||||||
actions.append(.action(ContextMenuActionItem(text: "Set as Reaction Effect", icon: { _ in
|
|
||||||
return nil
|
|
||||||
}, action: { c, _ in
|
|
||||||
let subItems: Signal<ContextController.Items, NoError> = context.engine.stickers.availableReactions()
|
|
||||||
|> map { reactions -> ContextController.Items in
|
|
||||||
var subActions: [ContextMenuItem] = []
|
|
||||||
|
|
||||||
if let reactions = reactions {
|
|
||||||
for reaction in reactions.reactions {
|
|
||||||
if !reaction.isEnabled || !reaction.isPremium {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
guard case let .builtin(emojiValue) = reaction.value else {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
subActions.append(.action(ContextMenuActionItem(text: emojiValue, icon: { _ in
|
|
||||||
return nil
|
|
||||||
}, action: { _, f in
|
|
||||||
let _ = updateExperimentalUISettingsInteractively(accountManager: context.sharedContext.accountManager, { settings in
|
|
||||||
var settings = settings
|
|
||||||
|
|
||||||
var currentItems: [ExperimentalUISettings.AccountReactionOverrides.Item]
|
|
||||||
if let value = settings.accountReactionEffectOverrides.first(where: { $0.accountId == context.account.id.int64 }) {
|
|
||||||
currentItems = value.items
|
|
||||||
} else {
|
|
||||||
currentItems = []
|
|
||||||
}
|
|
||||||
|
|
||||||
currentItems.removeAll(where: { $0.key == reaction.value })
|
|
||||||
currentItems.append(ExperimentalUISettings.AccountReactionOverrides.Item(
|
|
||||||
key: reaction.value,
|
|
||||||
messageId: message.id,
|
|
||||||
mediaId: file.fileId
|
|
||||||
))
|
|
||||||
|
|
||||||
settings.accountReactionEffectOverrides.removeAll(where: { $0.accountId == context.account.id.int64 })
|
|
||||||
settings.accountReactionEffectOverrides.append(ExperimentalUISettings.AccountReactionOverrides(accountId: context.account.id.int64, items: currentItems))
|
|
||||||
|
|
||||||
return settings
|
|
||||||
}).start()
|
|
||||||
|
|
||||||
f(.default)
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ContextController.Items(content: .list(subActions), disablePositionLock: true, tip: nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
c.pushItems(items: subItems)
|
|
||||||
})))
|
|
||||||
|
|
||||||
actions.append(.action(ContextMenuActionItem(text: "Set as Sticker Effect", icon: { _ in
|
|
||||||
return nil
|
|
||||||
}, action: { c, _ in
|
|
||||||
let stickersKey: PostboxViewKey = .orderedItemList(id: Namespaces.OrderedItemList.CloudPremiumStickers)
|
|
||||||
let subItems: Signal<ContextController.Items, NoError> = context.account.postbox.combinedView(keys: [stickersKey])
|
|
||||||
|> map { views -> [String] in
|
|
||||||
if let view = views.views[stickersKey] as? OrderedItemListView, !view.items.isEmpty {
|
|
||||||
return view.items.compactMap { item -> String? in
|
|
||||||
guard let mediaItem = item.contents.get(RecentMediaItem.self) else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
let file = mediaItem.media
|
|
||||||
for attribute in file.attributes {
|
|
||||||
switch attribute {
|
|
||||||
case let .Sticker(text, _, _):
|
|
||||||
return text
|
|
||||||
default:
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|> map { stickerNames -> ContextController.Items in
|
|
||||||
var subActions: [ContextMenuItem] = []
|
|
||||||
|
|
||||||
for stickerName in stickerNames {
|
|
||||||
subActions.append(.action(ContextMenuActionItem(text: stickerName, icon: { _ in
|
|
||||||
return nil
|
|
||||||
}, action: { _, f in
|
|
||||||
let _ = updateExperimentalUISettingsInteractively(accountManager: context.sharedContext.accountManager, { settings in
|
|
||||||
var settings = settings
|
|
||||||
|
|
||||||
var currentItems: [ExperimentalUISettings.AccountReactionOverrides.Item]
|
|
||||||
if let value = settings.accountStickerEffectOverrides.first(where: { $0.accountId == context.account.id.int64 }) {
|
|
||||||
currentItems = value.items
|
|
||||||
} else {
|
|
||||||
currentItems = []
|
|
||||||
}
|
|
||||||
|
|
||||||
currentItems.removeAll(where: { $0.key == MessageReaction.Reaction.builtin(stickerName) })
|
|
||||||
currentItems.append(ExperimentalUISettings.AccountReactionOverrides.Item(
|
|
||||||
key: .builtin(stickerName),
|
|
||||||
messageId: message.id,
|
|
||||||
mediaId: file.fileId
|
|
||||||
))
|
|
||||||
|
|
||||||
settings.accountStickerEffectOverrides.removeAll(where: { $0.accountId == context.account.id.int64 })
|
|
||||||
settings.accountStickerEffectOverrides.append(ExperimentalUISettings.AccountReactionOverrides(accountId: context.account.id.int64, items: currentItems))
|
|
||||||
|
|
||||||
return settings
|
|
||||||
}).start()
|
|
||||||
|
|
||||||
f(.default)
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
return ContextController.Items(content: .list(subActions), disablePositionLock: true, tip: nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
c.pushItems(items: subItems)
|
|
||||||
})))
|
|
||||||
|
|
||||||
actions.append(.separator)
|
|
||||||
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var isDownloading = false
|
var isDownloading = false
|
||||||
@ -1951,6 +1830,8 @@ func chatAvailableMessageActionsImpl(engine: TelegramEngine, accountPeerId: Peer
|
|||||||
} else if let story = media as? TelegramMediaStory {
|
} else if let story = media as? TelegramMediaStory {
|
||||||
if let story = message.associatedStories[story.storyId], story.data.isEmpty {
|
if let story = message.associatedStories[story.storyId], story.data.isEmpty {
|
||||||
isShareProtected = true
|
isShareProtected = true
|
||||||
|
} else if story.isMention {
|
||||||
|
isShareProtected = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -214,7 +214,7 @@ final class ManagedAudioRecorderContext {
|
|||||||
}
|
}
|
||||||
return ActionDisposable {
|
return ActionDisposable {
|
||||||
}
|
}
|
||||||
}), playAndRecord: true, ambient: false, mixWithOthers: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: ValuePipe<Float>(), updatedRate: {
|
}), playAndRecord: true, soundMuted: false, ambient: false, mixWithOthers: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: ValuePipe<Float>(), updatedRate: {
|
||||||
}, audioPaused: {})
|
}, audioPaused: {})
|
||||||
self.toneRenderer = toneRenderer
|
self.toneRenderer = toneRenderer
|
||||||
|
|
||||||
|
@ -115,6 +115,9 @@ final class OverlayInstantVideoNode: OverlayMediaItemNode {
|
|||||||
self.videoNode.playOnceWithSound(playAndRecord: playAndRecord)
|
self.videoNode.playOnceWithSound(playAndRecord: playAndRecord)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setSoundMuted(soundMuted: Bool) {
|
||||||
|
}
|
||||||
|
|
||||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2140,6 +2140,8 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
|
|||||||
private var expiringStoryListState: PeerExpiringStoryListContext.State?
|
private var expiringStoryListState: PeerExpiringStoryListContext.State?
|
||||||
private var expiringStoryListDisposable: Disposable?
|
private var expiringStoryListDisposable: Disposable?
|
||||||
|
|
||||||
|
private let storiesReady = ValuePromise<Bool>(true, ignoreRepeated: true)
|
||||||
|
|
||||||
private let _ready = Promise<Bool>()
|
private let _ready = Promise<Bool>()
|
||||||
var ready: Promise<Bool> {
|
var ready: Promise<Bool> {
|
||||||
return self._ready
|
return self._ready
|
||||||
@ -3863,6 +3865,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
|
|||||||
self?.translationState = translationState
|
self?.translationState = translationState
|
||||||
})
|
})
|
||||||
} else if peerId.namespace == Namespaces.Peer.CloudUser {
|
} else if peerId.namespace == Namespaces.Peer.CloudUser {
|
||||||
|
self.storiesReady.set(false)
|
||||||
let expiringStoryList = PeerExpiringStoryListContext(account: context.account, peerId: peerId)
|
let expiringStoryList = PeerExpiringStoryListContext(account: context.account, peerId: peerId)
|
||||||
self.expiringStoryList = expiringStoryList
|
self.expiringStoryList = expiringStoryList
|
||||||
self.expiringStoryListDisposable = (combineLatest(queue: .mainQueue(),
|
self.expiringStoryListDisposable = (combineLatest(queue: .mainQueue(),
|
||||||
@ -3897,6 +3900,8 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
|
|||||||
}, state.items.count, state.hasUnseen, state.hasUnseenCloseFriends)
|
}, state.items.count, state.hasUnseen, state.hasUnseenCloseFriends)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.storiesReady.set(true)
|
||||||
|
|
||||||
self.requestLayout(animated: false)
|
self.requestLayout(animated: false)
|
||||||
|
|
||||||
if self.headerNode.avatarListNode.openStories == nil {
|
if self.headerNode.avatarListNode.openStories == nil {
|
||||||
@ -9515,10 +9520,11 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
|
|||||||
let avatarReady = self.headerNode.avatarListNode.isReady.get()
|
let avatarReady = self.headerNode.avatarListNode.isReady.get()
|
||||||
let combinedSignal = combineLatest(queue: .mainQueue(),
|
let combinedSignal = combineLatest(queue: .mainQueue(),
|
||||||
avatarReady,
|
avatarReady,
|
||||||
|
self.storiesReady.get(),
|
||||||
self.paneContainerNode.isReady.get()
|
self.paneContainerNode.isReady.get()
|
||||||
)
|
)
|
||||||
|> map { lhs, rhs in
|
|> map { a, b, c in
|
||||||
return lhs && rhs
|
return a && b && c
|
||||||
}
|
}
|
||||||
self._ready.set(combinedSignal
|
self._ready.set(combinedSignal
|
||||||
|> filter { $0 }
|
|> filter { $0 }
|
||||||
|
@ -36,6 +36,7 @@ public final class NativeVideoContent: UniversalVideoContent {
|
|||||||
public let streamVideo: MediaPlayerStreaming
|
public let streamVideo: MediaPlayerStreaming
|
||||||
public let loopVideo: Bool
|
public let loopVideo: Bool
|
||||||
public let enableSound: Bool
|
public let enableSound: Bool
|
||||||
|
public let soundMuted: Bool
|
||||||
public let beginWithAmbientSound: Bool
|
public let beginWithAmbientSound: Bool
|
||||||
public let mixWithOthers: Bool
|
public let mixWithOthers: Bool
|
||||||
public let baseRate: Double
|
public let baseRate: Double
|
||||||
@ -55,7 +56,7 @@ public final class NativeVideoContent: UniversalVideoContent {
|
|||||||
let displayImage: Bool
|
let displayImage: Bool
|
||||||
let hasSentFramesToDisplay: (() -> Void)?
|
let hasSentFramesToDisplay: (() -> Void)?
|
||||||
|
|
||||||
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
|
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, soundMuted: Bool = false, beginWithAmbientSound: Bool = false, mixWithOthers: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
|
||||||
self.id = id
|
self.id = id
|
||||||
self.nativeId = id
|
self.nativeId = id
|
||||||
self.userLocation = userLocation
|
self.userLocation = userLocation
|
||||||
@ -78,6 +79,7 @@ public final class NativeVideoContent: UniversalVideoContent {
|
|||||||
self.streamVideo = streamVideo
|
self.streamVideo = streamVideo
|
||||||
self.loopVideo = loopVideo
|
self.loopVideo = loopVideo
|
||||||
self.enableSound = enableSound
|
self.enableSound = enableSound
|
||||||
|
self.soundMuted = soundMuted
|
||||||
self.beginWithAmbientSound = beginWithAmbientSound
|
self.beginWithAmbientSound = beginWithAmbientSound
|
||||||
self.mixWithOthers = mixWithOthers
|
self.mixWithOthers = mixWithOthers
|
||||||
self.baseRate = baseRate
|
self.baseRate = baseRate
|
||||||
@ -99,7 +101,7 @@ public final class NativeVideoContent: UniversalVideoContent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
|
public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
|
||||||
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
|
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, soundMuted: self.soundMuted, beginWithAmbientSound: self.beginWithAmbientSound, mixWithOthers: self.mixWithOthers, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func isEqual(to other: UniversalVideoContent) -> Bool {
|
public func isEqual(to other: UniversalVideoContent) -> Bool {
|
||||||
@ -121,6 +123,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
|||||||
private let userLocation: MediaResourceUserLocation
|
private let userLocation: MediaResourceUserLocation
|
||||||
private let fileReference: FileMediaReference
|
private let fileReference: FileMediaReference
|
||||||
private let enableSound: Bool
|
private let enableSound: Bool
|
||||||
|
private let soundMuted: Bool
|
||||||
private let beginWithAmbientSound: Bool
|
private let beginWithAmbientSound: Bool
|
||||||
private let mixWithOthers: Bool
|
private let mixWithOthers: Bool
|
||||||
private let loopVideo: Bool
|
private let loopVideo: Bool
|
||||||
@ -180,12 +183,13 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
|||||||
|
|
||||||
private let hasSentFramesToDisplay: (() -> Void)?
|
private let hasSentFramesToDisplay: (() -> Void)?
|
||||||
|
|
||||||
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
|
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, soundMuted: Bool, beginWithAmbientSound: Bool, mixWithOthers: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
|
||||||
self.postbox = postbox
|
self.postbox = postbox
|
||||||
self.userLocation = userLocation
|
self.userLocation = userLocation
|
||||||
self.fileReference = fileReference
|
self.fileReference = fileReference
|
||||||
self.placeholderColor = placeholderColor
|
self.placeholderColor = placeholderColor
|
||||||
self.enableSound = enableSound
|
self.enableSound = enableSound
|
||||||
|
self.soundMuted = soundMuted
|
||||||
self.beginWithAmbientSound = beginWithAmbientSound
|
self.beginWithAmbientSound = beginWithAmbientSound
|
||||||
self.mixWithOthers = mixWithOthers
|
self.mixWithOthers = mixWithOthers
|
||||||
self.loopVideo = loopVideo
|
self.loopVideo = loopVideo
|
||||||
@ -206,7 +210,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(fileReference.media.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
|
self.player = MediaPlayer(audioSessionManager: audioSessionManager, postbox: postbox, userLocation: userLocation, userContentType: userContentType, resourceReference: fileReference.resourceReference(fileReference.media.resource), tempFilePath: tempFilePath, streamable: streamVideo, video: true, preferSoftwareDecoding: false, playAutomatically: false, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, soundMuted: soundMuted, ambient: beginWithAmbientSound, mixWithOthers: mixWithOthers, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession, storeAfterDownload: storeAfterDownload, isAudioVideoMessage: isAudioVideoMessage)
|
||||||
|
|
||||||
var actionAtEndImpl: (() -> Void)?
|
var actionAtEndImpl: (() -> Void)?
|
||||||
if enableSound && !loopVideo {
|
if enableSound && !loopVideo {
|
||||||
@ -483,6 +487,10 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
|||||||
self.player.setForceAudioToSpeaker(forceAudioToSpeaker)
|
self.player.setForceAudioToSpeaker(forceAudioToSpeaker)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setSoundMuted(soundMuted: Bool) {
|
||||||
|
self.player.setSoundMuted(soundMuted: soundMuted)
|
||||||
|
}
|
||||||
|
|
||||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||||
self.player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
|
self.player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
|
||||||
}
|
}
|
||||||
|
@ -430,6 +430,9 @@ private final class PlatformVideoContentNode: ASDisplayNode, UniversalVideoConte
|
|||||||
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
|
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setSoundMuted(soundMuted: Bool) {
|
||||||
|
}
|
||||||
|
|
||||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,6 +267,9 @@ private final class SystemVideoContentNode: ASDisplayNode, UniversalVideoContent
|
|||||||
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
|
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setSoundMuted(soundMuted: Bool) {
|
||||||
|
}
|
||||||
|
|
||||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,6 +164,9 @@ final class WebEmbedVideoContentNode: ASDisplayNode, UniversalVideoContentNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setSoundMuted(soundMuted: Bool) {
|
||||||
|
}
|
||||||
|
|
||||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user