Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin 2023-07-02 00:03:24 +02:00
commit 56795dd1c4
15 changed files with 435 additions and 260 deletions

View File

@ -187,7 +187,7 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
private var storyProgressDisposable: Disposable?
private var storySubscriptionsDisposable: Disposable?
private var preloadStorySubscriptionsDisposable: Disposable?
private var preloadStoryResourceDisposables: [MediaResourceId: Disposable] = [:]
private var preloadStoryResourceDisposables: [MediaId: Disposable] = [:]
private var fullScreenEffectView: RippleEffectView?
@ -1821,23 +1821,17 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
resources.removeAll()
}
var validIds: [MediaResourceId] = []
var validIds: [MediaId] = []
for (_, info) in resources.sorted(by: { $0.value.priority < $1.value.priority }) {
let resource = info.resource
validIds.append(resource.resource.id)
if self.preloadStoryResourceDisposables[resource.resource.id] == nil {
var fetchRange: (Range<Int64>, MediaBoxFetchPriority)?
if let size = info.size {
fetchRange = (0 ..< Int64(size), .default)
if let mediaId = info.media.id {
validIds.append(mediaId)
if self.preloadStoryResourceDisposables[mediaId] == nil {
self.preloadStoryResourceDisposables[mediaId] = preloadStoryMedia(context: self.context, peer: info.peer, storyId: info.storyId, media: info.media).start()
}
#if DEBUG
fetchRange = nil
#endif
self.preloadStoryResourceDisposables[resource.resource.id] = fetchedMediaResource(mediaBox: self.context.account.postbox.mediaBox, userLocation: .other, userContentType: .other, reference: resource, range: fetchRange).start()
}
}
var removeIds: [MediaResourceId] = []
var removeIds: [MediaId] = []
for (id, disposable) in self.preloadStoryResourceDisposables {
if !validIds.contains(id) {
removeIds.append(id)

View File

@ -136,6 +136,8 @@ public class InteractiveTransitionGestureRecognizer: UIPanGestureRecognizer {
let size = self.view?.bounds.size ?? CGSize()
print("moved: \(CFAbsoluteTimeGetCurrent()) absTranslationX: \(absTranslationX) absTranslationY: \(absTranslationY)")
if self.currentAllowedDirections.contains(.down) {
if !self.validatedGesture {
if absTranslationX > 2.0 && absTranslationX > absTranslationY * 2.0 {

View File

@ -115,7 +115,9 @@ public final class MediaPlayerNode: ASDisplayNode {
if abs(rotationAngle).remainder(dividingBy: Double.pi) > 0.1 {
transform = transform.scaledBy(x: CGFloat(aspect), y: CGFloat(1.0 / aspect))
}
videoLayer.setAffineTransform(transform)
if videoLayer.affineTransform() != transform {
videoLayer.setAffineTransform(transform)
}
}
if self.videoInHierarchy || self.canPlaybackWithoutHierarchy {
@ -435,6 +437,9 @@ public final class MediaPlayerNode: ASDisplayNode {
private func updateLayout() {
let bounds = self.bounds
if bounds.isEmpty {
return
}
let fittedRect: CGRect
if let arguments = self.transformArguments {

View File

@ -14,17 +14,20 @@ public final class StoryPreloadInfo {
case next(position: Int)
}
public let resource: MediaResourceReference
public let size: Int32?
public let peer: PeerReference
public let storyId: Int32
public let media: EngineMedia
public let priority: Priority
public init(
resource: MediaResourceReference,
size: Int32?,
peer: PeerReference,
storyId: Int32,
media: EngineMedia,
priority: Priority
) {
self.resource = resource
self.size = size
self.peer = peer
self.storyId = storyId
self.media = media
self.priority = priority
}
}
@ -822,7 +825,7 @@ public extension TelegramEngine {
}
}
public func preloadStorySubscriptions(isHidden: Bool) -> Signal<[EngineMediaResource.Id: StoryPreloadInfo], NoError> {
public func preloadStorySubscriptions(isHidden: Bool) -> Signal<[EngineMedia.Id: StoryPreloadInfo], NoError> {
let basicPeerKey = PostboxViewKey.basicPeer(self.account.peerId)
let subscriptionsKey: PostboxStorySubscriptionsKey = isHidden ? .hidden : .filtered
let storySubscriptionsKey = PostboxViewKey.storySubscriptions(key: subscriptionsKey)
@ -831,7 +834,7 @@ public extension TelegramEngine {
storySubscriptionsKey,
PostboxViewKey.storiesState(key: .subscriptions(subscriptionsKey))
])
|> mapToSignal { views -> Signal<[EngineMediaResource.Id: StoryPreloadInfo], NoError> in
|> mapToSignal { views -> Signal<[EngineMedia.Id: StoryPreloadInfo], NoError> in
guard let basicPeerView = views.views[basicPeerKey] as? BasicPeerView, let accountPeer = basicPeerView.peer else {
return .single([:])
}
@ -854,7 +857,7 @@ public extension TelegramEngine {
})
return self.account.postbox.combinedView(keys: additionalDataKeys)
|> map { views -> [EngineMediaResource.Id: StoryPreloadInfo] in
|> map { views -> [EngineMedia.Id: StoryPreloadInfo] in
let _ = accountPeer
let _ = storiesStateView
@ -893,42 +896,23 @@ public extension TelegramEngine {
})
var nextPriority: Int = 0
var resultResources: [EngineMediaResource.Id: StoryPreloadInfo] = [:]
var resultResources: [EngineMedia.Id: StoryPreloadInfo] = [:]
for itemAndPeer in sortedItems.prefix(10) {
guard let peerReference = PeerReference(itemAndPeer.peer) else {
continue
}
guard let media = itemAndPeer.item.media else {
guard let media = itemAndPeer.item.media, let mediaId = media.id else {
continue
}
if let image = media as? TelegramMediaImage, let resource = image.representations.last?.resource {
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: itemAndPeer.item.id, media: media), resource: resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: nil,
priority: .top(position: nextPriority)
)
nextPriority += 1
} else if let file = media as? TelegramMediaFile {
if let preview = file.previewRepresentations.last {
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: itemAndPeer.item.id, media: file), resource: preview.resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: nil,
priority: .top(position: nextPriority)
)
nextPriority += 1
}
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: itemAndPeer.item.id, media: file), resource: file.resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: file.preloadSize,
priority: .top(position: nextPriority)
)
nextPriority += 1
}
resultResources[mediaId] = StoryPreloadInfo(
peer: peerReference,
storyId: itemAndPeer.item.id,
media: EngineMedia(media),
priority: .top(position: nextPriority)
)
nextPriority += 1
}
return resultResources

View File

@ -12,7 +12,8 @@ swift_library(
deps = [
"//submodules/Display",
"//submodules/AsyncDisplayKit",
"//submodules/AnimationUI",
"//submodules/ComponentFlow",
"//submodules/TelegramUI/Components/LottieComponent",
],
visibility = [
"//visibility:public",

View File

@ -2,7 +2,8 @@ import Foundation
import UIKit
import Display
import AsyncDisplayKit
import AnimationUI
import ComponentFlow
import LottieComponent
public final class MoreHeaderButton: HighlightableButtonNode {
public enum Content {
@ -13,7 +14,7 @@ public final class MoreHeaderButton: HighlightableButtonNode {
public let referenceNode: ContextReferenceContentNode
public let containerNode: ContextControllerSourceNode
private let iconNode: ASImageNode
private var animationNode: AnimationNode?
private let animationView = ComponentView<Empty>()
public var contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
@ -70,15 +71,23 @@ public final class MoreHeaderButton: HighlightableButtonNode {
private var content: Content?
public func setContent(_ content: Content, animated: Bool = false) {
if case .more = content, self.animationNode == nil {
let iconColor = self.color
let animationNode = AnimationNode(animation: "anim_profilemore", colors: ["Point 2.Group 1.Fill 1": iconColor,
"Point 3.Group 1.Fill 1": iconColor,
"Point 1.Group 1.Fill 1": iconColor], scale: 1.0)
if case .more = content {
let animationSize = CGSize(width: 22.0, height: 22.0)
animationNode.frame = CGRect(origin: CGPoint(x: floor((self.containerNode.bounds.width - animationSize.width) / 2.0), y: floor((self.containerNode.bounds.height - animationSize.height) / 2.0)), size: animationSize)
self.addSubnode(animationNode)
self.animationNode = animationNode
let _ = self.animationView.update(
transition: .immediate,
component: AnyComponent(LottieComponent(
content: LottieComponent.AppBundleContent(name: "anim_profilemore"),
color: self.color
)),
environment: {},
containerSize: animationSize
)
if let animationComponentView = self.animationView.view {
if animationComponentView.superview == nil {
self.view.addSubview(animationComponentView)
}
animationComponentView.frame = CGRect(origin: CGPoint(x: floor((self.containerNode.bounds.width - animationSize.width) / 2.0), y: floor((self.containerNode.bounds.height - animationSize.height) / 2.0)), size: animationSize)
}
}
if animated {
if let snapshotView = self.referenceNode.view.snapshotContentTree() {
@ -93,8 +102,10 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
self.iconNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.3)
self.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
self.animationNode?.layer.animateScale(from: 0.1, to: 1.0, duration: 0.3)
if let animationComponentView = self.animationView.view {
animationComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
animationComponentView.layer.animateScale(from: 0.1, to: 1.0, duration: 0.3)
}
}
switch content {
@ -105,7 +116,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.image = image
self.iconNode.isHidden = false
self.animationNode?.isHidden = true
if let animationComponentView = self.animationView.view {
animationComponentView.isHidden = true
}
case let .more(image):
if let image = image {
self.iconNode.frame = CGRect(origin: CGPoint(x: floor((self.containerNode.bounds.width - image.size.width) / 2.0), y: floor((self.containerNode.bounds.height - image.size.height) / 2.0)), size: image.size)
@ -113,7 +126,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.image = image
self.iconNode.isHidden = false
self.animationNode?.isHidden = false
if let animationComponentView = self.animationView.view {
animationComponentView.isHidden = false
}
}
} else {
self.content = content
@ -125,7 +140,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.image = image
self.iconNode.isHidden = false
self.animationNode?.isHidden = true
if let animationComponentView = self.animationView.view {
animationComponentView.isHidden = true
}
case let .more(image):
if let image = image {
self.iconNode.frame = CGRect(origin: CGPoint(x: floor((self.containerNode.bounds.width - image.size.width) / 2.0), y: floor((self.containerNode.bounds.height - image.size.height) / 2.0)), size: image.size)
@ -133,7 +150,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.image = image
self.iconNode.isHidden = false
self.animationNode?.isHidden = false
if let animationComponentView = self.animationView.view {
animationComponentView.isHidden = false
}
}
}
}
@ -151,7 +170,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
}
public func play() {
self.animationNode?.playOnce()
if let animationComponentView = self.animationView.view as? LottieComponent.View {
animationComponentView.playOnce()
}
}
public static func optionsCircleImage(color: UIColor) -> UIImage? {

View File

@ -65,9 +65,13 @@ swift_library(
"//submodules/OverlayStatusController",
"//submodules/Utils/VolumeButtons",
"//submodules/TelegramUI/Components/PeerReportScreen",
"//submodules/MediaResources",
"//submodules/LocalMediaResources",
"//submodules/SaveToCameraRoll",
"//submodules/Components/BundleIconComponent",
"//submodules/TinyThumbnail",
"//submodules/ImageBlur",
],
visibility = [
"//visibility:public",

View File

@ -6,6 +6,7 @@ import SwiftSignalKit
import AccountContext
import TelegramCore
import Postbox
import MediaResources
private struct StoryKey: Hashable {
var peerId: EnginePeer.Id
@ -396,7 +397,7 @@ public final class StoryContentContextImpl: StoryContentContext {
private var requestedStoryKeys = Set<StoryKey>()
private var requestStoryDisposables = DisposableSet()
private var preloadStoryResourceDisposables: [MediaResourceId: Disposable] = [:]
private var preloadStoryResourceDisposables: [MediaId: Disposable] = [:]
private var pollStoryMetadataDisposables = DisposableSet()
private var singlePeerListContext: PeerExpiringStoryListContext?
@ -761,55 +762,30 @@ public final class StoryContentContextImpl: StoryContentContext {
}
var nextPriority = 0
var resultResources: [EngineMediaResource.Id: StoryPreloadInfo] = [:]
var resultResources: [EngineMedia.Id: StoryPreloadInfo] = [:]
for i in 0 ..< min(possibleItems.count, 3) {
let peer = possibleItems[i].0
let item = possibleItems[i].1
if let peerReference = PeerReference(peer._asPeer()) {
if let image = item.media._asMedia() as? TelegramMediaImage, let resource = image.representations.last?.resource {
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: item.id, media: image), resource: resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: nil,
priority: .top(position: nextPriority)
)
nextPriority += 1
} else if let file = item.media._asMedia() as? TelegramMediaFile {
if let preview = file.previewRepresentations.last {
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: item.id, media: file), resource: preview.resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: nil,
priority: .top(position: nextPriority)
)
nextPriority += 1
}
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: item.id, media: file), resource: file.resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: file.preloadSize,
priority: .top(position: nextPriority)
)
nextPriority += 1
}
if let peerReference = PeerReference(peer._asPeer()), let mediaId = item.media.id {
resultResources[mediaId] = StoryPreloadInfo(
peer: peerReference,
storyId: item.id,
media: item.media,
priority: .top(position: nextPriority)
)
nextPriority += 1
}
}
var validIds: [MediaResourceId] = []
for (_, info) in resultResources.sorted(by: { $0.value.priority < $1.value.priority }) {
let resource = info.resource
validIds.append(resource.resource.id)
if self.preloadStoryResourceDisposables[resource.resource.id] == nil {
var fetchRange: (Range<Int64>, MediaBoxFetchPriority)?
if let size = info.size {
fetchRange = (0 ..< Int64(size), .default)
}
self.preloadStoryResourceDisposables[resource.resource.id] = fetchedMediaResource(mediaBox: self.context.account.postbox.mediaBox, userLocation: .other, userContentType: .other, reference: resource, range: fetchRange).start()
var validIds: [EngineMedia.Id] = []
for (id, info) in resultResources.sorted(by: { $0.value.priority < $1.value.priority }) {
validIds.append(id)
if self.preloadStoryResourceDisposables[id] == nil {
self.preloadStoryResourceDisposables[id] = preloadStoryMedia(context: context, peer: info.peer, storyId: info.storyId, media: info.media).start()
}
}
var removeIds: [MediaResourceId] = []
var removeIds: [EngineMedia.Id] = []
for (id, disposable) in self.preloadStoryResourceDisposables {
if !validIds.contains(id) {
removeIds.append(id)
@ -1075,7 +1051,7 @@ public final class PeerStoryListContentContextImpl: StoryContentContext {
private var focusedId: Int32?
private var focusedIdUpdated = Promise<Void>(Void())
private var preloadStoryResourceDisposables: [MediaResourceId: Disposable] = [:]
private var preloadStoryResourceDisposables: [EngineMedia.Id: Disposable] = [:]
private var pollStoryMetadataDisposables = DisposableSet()
public init(context: AccountContext, peerId: EnginePeer.Id, listContext: PeerStoryListContext, initialId: Int32?) {
@ -1184,7 +1160,7 @@ public final class PeerStoryListContentContextImpl: StoryContentContext {
self.statePromise.set(.single(stateValue))
self.updatedPromise.set(.single(Void()))
var resultResources: [EngineMediaResource.Id: StoryPreloadInfo] = [:]
var resultResources: [EngineMedia.Id: StoryPreloadInfo] = [:]
var pollItems: [StoryKey] = []
if let focusedIndex, let slice = stateValue.slice {
@ -1207,52 +1183,29 @@ public final class PeerStoryListContentContextImpl: StoryContentContext {
for i in 0 ..< min(possibleItems.count, 3) {
let peer = possibleItems[i].0
let item = possibleItems[i].1
if let peerReference = PeerReference(peer._asPeer()) {
if let image = item.media._asMedia() as? TelegramMediaImage, let resource = image.representations.last?.resource {
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: item.id, media: image), resource: resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: nil,
priority: .top(position: nextPriority)
)
nextPriority += 1
} else if let file = item.media._asMedia() as? TelegramMediaFile {
if let preview = file.previewRepresentations.last {
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: item.id, media: file), resource: preview.resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: nil,
priority: .top(position: nextPriority)
)
nextPriority += 1
}
let resource = MediaResourceReference.media(media: .story(peer: peerReference, id: item.id, media: file), resource: file.resource)
resultResources[EngineMediaResource.Id(resource.resource.id)] = StoryPreloadInfo(
resource: resource,
size: file.preloadSize,
priority: .top(position: nextPriority)
)
nextPriority += 1
}
if let peerReference = PeerReference(peer._asPeer()), let mediaId = item.media.id {
resultResources[mediaId] = StoryPreloadInfo(
peer: peerReference,
storyId: item.id,
media: item.media,
priority: .top(position: nextPriority)
)
nextPriority += 1
}
}
}
var validIds: [MediaResourceId] = []
var validIds: [EngineMedia.Id] = []
for (_, info) in resultResources.sorted(by: { $0.value.priority < $1.value.priority }) {
let resource = info.resource
validIds.append(resource.resource.id)
if self.preloadStoryResourceDisposables[resource.resource.id] == nil {
var fetchRange: (Range<Int64>, MediaBoxFetchPriority)?
if let size = info.size {
fetchRange = (0 ..< Int64(size), .default)
if let mediaId = info.media.id {
validIds.append(mediaId)
if self.preloadStoryResourceDisposables[mediaId] == nil {
self.preloadStoryResourceDisposables[mediaId] = preloadStoryMedia(context: context, peer: info.peer, storyId: info.storyId, media: info.media).start()
}
self.preloadStoryResourceDisposables[resource.resource.id] = fetchedMediaResource(mediaBox: self.context.account.postbox.mediaBox, userLocation: .other, userContentType: .other, reference: resource, range: fetchRange).start()
}
}
var removeIds: [MediaResourceId] = []
var removeIds: [EngineMedia.Id] = []
for (id, disposable) in self.preloadStoryResourceDisposables {
if !validIds.contains(id) {
removeIds.append(id)
@ -1330,3 +1283,40 @@ public final class PeerStoryListContentContextImpl: StoryContentContext {
let _ = self.context.engine.messages.markStoryAsSeen(peerId: id.peerId, id: id.id, asPinned: true).start()
}
}
public func preloadStoryMedia(context: AccountContext, peer: PeerReference, storyId: Int32, media: EngineMedia) -> Signal<Never, NoError> {
var signals: [Signal<Never, NoError>] = []
switch media {
case let .image(image):
if let representation = largestImageRepresentation(image.representations) {
signals.append(fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .peer(peer.id), userContentType: .other, reference: .media(media: .story(peer: peer, id: storyId, media: media._asMedia()), resource: representation.resource), range: nil)
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in
return .complete()
})
}
case let .file(file):
var fetchRange: (Range<Int64>, MediaBoxFetchPriority)?
for attribute in file.attributes {
if case let .Video(_, _, _, preloadSize) = attribute {
if let preloadSize {
fetchRange = (0 ..< Int64(preloadSize), .default)
}
break
}
}
signals.append(fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .peer(peer.id), userContentType: .other, reference: .media(media: .story(peer: peer, id: storyId, media: media._asMedia()), resource: file.resource), range: fetchRange)
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in
return .complete()
})
signals.append(context.account.postbox.mediaBox.cachedResourceRepresentation(file.resource, representation: CachedVideoFirstFrameRepresentation(), complete: true, fetch: true, attemptSynchronously: false)
|> ignoreValues)
default:
break
}
return combineLatest(signals) |> ignoreValues
}

View File

@ -18,6 +18,7 @@ import AttachmentUI
import simd
import VolumeButtons
import TooltipUI
import ChatEntityKeyboardInputNode
func hasFirstResponder(_ view: UIView) -> Bool {
if view.isFirstResponder {
@ -178,6 +179,10 @@ private final class StoryContainerScreenComponent: Component {
private var volumeButtonsListener: VolumeButtonsListener?
private let volumeButtonsListenerShouldBeActive = ValuePromise<Bool>(false, ignoreRepeated: true)
private let inputMediaNodeDataPromise = Promise<ChatEntityKeyboardInputNode.InputData>()
private var availableReactions: StoryAvailableReactions?
private var isAnimatingOut: Bool = false
private var didAnimateOut: Bool = false
@ -399,6 +404,7 @@ private final class StoryContainerScreenComponent: Component {
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
switch recognizer.state {
case .began:
print("began: \(CFAbsoluteTimeGetCurrent())")
self.beginHorizontalPan(translation: recognizer.translation(in: self))
case .changed:
self.updateHorizontalPan(translation: recognizer.translation(in: self))
@ -659,6 +665,34 @@ private final class StoryContainerScreenComponent: Component {
self.environment = environment
if self.component?.content !== component.content {
if self.component == nil {
var update = false
let _ = (allowedStoryReactions(context: component.context)
|> deliverOnMainQueue).start(next: { [weak self] reactionItems in
guard let self else {
return
}
self.availableReactions = StoryAvailableReactions(reactionItems: reactionItems)
if update {
self.state?.updated(transition: .immediate)
}
})
update = true
self.inputMediaNodeDataPromise.set(
ChatEntityKeyboardInputNode.inputData(
context: component.context,
chatPeerId: nil,
areCustomEmojiEnabled: true,
hasTrending: false,
hasSearch: false,
hideBackground: true,
sendGif: nil
)
)
}
self.contentUpdatedDisposable?.dispose()
var update = false
self.contentUpdatedDisposable = (component.content.updated
@ -825,6 +859,7 @@ private final class StoryContainerScreenComponent: Component {
context: component.context,
externalState: itemSetView.externalState,
storyItemSharedState: self.storyItemSharedState,
availableReactions: self.availableReactions,
slice: slice,
theme: environment.theme,
strings: environment.strings,
@ -944,7 +979,8 @@ private final class StoryContainerScreenComponent: Component {
}
}
}
}
},
keyboardInputData: self.inputMediaNodeDataPromise.get()
)),
environment: {},
containerSize: itemSetContainerSize

View File

@ -50,7 +50,7 @@ final class StoryItemContentComponent: Component {
}
final class View: StoryContentItem.View {
private let imageNode: TransformImageNode
private let imageView: StoryItemImageView
private var videoNode: UniversalVideoNode?
private var currentMessageMedia: EngineMedia?
@ -80,13 +80,13 @@ final class StoryItemContentComponent: Component {
override init(frame: CGRect) {
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
self.imageNode = TransformImageNode()
self.imageView = StoryItemImageView()
super.init(frame: frame)
self.layer.addSublayer(self.hierarchyTrackingLayer)
self.addSubnode(self.imageNode)
self.addSubview(self.imageView)
self.hierarchyTrackingLayer.isInHierarchyUpdated = { [weak self] value in
guard let self else {
@ -144,10 +144,17 @@ final class StoryItemContentComponent: Component {
captureProtected: component.item.isForwardingDisabled,
hintDimensions: file.dimensions?.cgSize,
storeAfterDownload: nil,
displayImage: false
displayImage: false,
hasSentFramesToDisplay: { [weak self] in
guard let self else {
return
}
self.videoNode?.isHidden = false
}
),
priority: .gallery
)
videoNode.isHidden = true
self.videoNode = videoNode
self.addSubnode(videoNode)
@ -372,6 +379,8 @@ final class StoryItemContentComponent: Component {
synchronousLoad = hint.synchronousLoad
}
let startTime = CFAbsoluteTimeGetCurrent()
let peerReference = PeerReference(component.peer._asPeer())
var messageMedia: EngineMedia?
@ -398,45 +407,13 @@ final class StoryItemContentComponent: Component {
}
if reloadMedia, let messageMedia, let peerReference {
var signal: Signal<(TransformImageArguments) -> DrawingContext?, NoError>?
var fetchSignal: Signal<Never, NoError>?
switch messageMedia {
case let .image(image):
signal = chatMessagePhoto(
postbox: component.context.account.postbox,
userLocation: .other,
photoReference: .story(peer: peerReference, id: component.item.id, media: image),
synchronousLoad: synchronousLoad,
highQuality: true
)
if let representation = image.representations.last {
fetchSignal = fetchedMediaResource(
mediaBox: component.context.account.postbox.mediaBox,
userLocation: .other,
userContentType: .image,
reference: ImageMediaReference.story(peer: peerReference, id: component.item.id, media: image).resourceReference(representation.resource)
)
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in
return .complete()
}
}
case .image:
self.contentLoaded = true
case let .file(file):
self.contentLoaded = true
signal = mediaGridMessageVideo(
postbox: component.context.account.postbox,
userLocation: .other,
videoReference: .story(peer: peerReference, id: component.item.id, media: file),
onlyFullSize: false,
useLargeThumbnail: false,
synchronousLoad: synchronousLoad,
autoFetchFullSizeThumbnail: false,
overlayColor: nil,
nilForEmptyResult: false,
useMiniThumbnailIfAvailable: false,
blurred: false
)
fetchSignal = fetchedMediaResource(
mediaBox: component.context.account.postbox.mediaBox,
userLocation: .other,
@ -451,20 +428,6 @@ final class StoryItemContentComponent: Component {
break
}
if let signal {
var wasSynchronous = true
self.imageNode.setSignal(signal |> afterCompleted { [weak self] in
Queue.mainQueue().async {
guard let self else {
return
}
self.performActionAfterImageContentLoaded(update: !wasSynchronous)
}
}, attemptSynchronously: true)
wasSynchronous = false
}
self.performActionAfterImageContentLoaded(update: false)
self.fetchDisposable?.dispose()
@ -483,6 +446,18 @@ final class StoryItemContentComponent: Component {
}
if let messageMedia {
self.imageView.update(
context: component.context,
peer: component.peer,
storyId: component.item.id,
media: component.item.media,
size: availableSize,
isCaptureProtected: component.item.isForwardingDisabled,
attemptSynchronous: synchronousLoad,
transition: transition
)
transition.setFrame(view: self.imageView, frame: CGRect(origin: CGPoint(), size: availableSize))
var dimensions: CGSize?
switch messageMedia {
case let .image(image):
@ -501,14 +476,7 @@ final class StoryItemContentComponent: Component {
if imageSize.height < availableSize.height && imageSize.height >= availableSize.height - 5.0 {
imageSize.height = availableSize.height
}
self.imageNode.captureProtected = component.item.isForwardingDisabled
let apply = self.imageNode.asyncLayout()(TransformImageArguments(
corners: ImageCorners(),
imageSize: imageSize,
boundingSize: availableSize,
intrinsicInsets: UIEdgeInsets()
))
apply()
let _ = imageSize
if let videoNode = self.videoNode {
let videoSize = dimensions.aspectFilled(availableSize)
@ -516,7 +484,6 @@ final class StoryItemContentComponent: Component {
videoNode.updateLayout(size: videoSize, transition: .immediate)
}
}
self.imageNode.frame = CGRect(origin: CGPoint(), size: availableSize)
}
switch component.item.media {
@ -614,6 +581,10 @@ final class StoryItemContentComponent: Component {
self.updateIsProgressPaused(update: false)
if reloadMedia && synchronousLoad {
print("\(CFAbsoluteTimeGetCurrent()) Synchronous: \((CFAbsoluteTimeGetCurrent() - startTime) * 1000.0) ms")
}
return availableSize
}
}

View File

@ -0,0 +1,159 @@
import Foundation
import UIKit
import AccountContext
import TelegramCore
import Postbox
import SwiftSignalKit
import ComponentFlow
import TinyThumbnail
import ImageBlur
import MediaResources
final class StoryItemImageView: UIView {
private let contentView: UIImageView
private var currentMedia: EngineMedia?
private var disposable: Disposable?
private var fetchDisposable: Disposable?
override init(frame: CGRect) {
self.contentView = UIImageView()
self.contentView.contentMode = .scaleAspectFill
super.init(frame: frame)
self.addSubview(self.contentView)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.disposable?.dispose()
}
private func updateImage(image: UIImage) {
self.contentView.image = image
}
func update(context: AccountContext, peer: EnginePeer, storyId: Int32, media: EngineMedia, size: CGSize, isCaptureProtected: Bool, attemptSynchronous: Bool, transition: Transition) {
var dimensions: CGSize?
switch media {
case let .image(image):
if let representation = largestImageRepresentation(image.representations) {
dimensions = representation.dimensions.cgSize
if self.currentMedia != media {
if attemptSynchronous, let path = context.account.postbox.mediaBox.completedResourcePath(id: representation.resource.id, pathExtension: nil) {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: path)?.preparingForDisplay() {
self.updateImage(image: image)
}
} else {
if let image = UIImage(contentsOfFile: path)?.precomposed() {
self.updateImage(image: image)
}
}
} else {
if let thumbnailData = image.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
self.contentView.image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3)
}
if let peerReference = PeerReference(peer._asPeer()) {
self.fetchDisposable = fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .peer(peer.id), userContentType: .image, reference: .media(media: .story(peer: peerReference, id: storyId, media: media._asMedia()), resource: representation.resource), ranges: nil).start()
}
self.disposable = (context.account.postbox.mediaBox.resourceData(representation.resource, option: .complete(waitUntilFetchStatus: false))
|> map { result -> UIImage? in
if result.complete {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: result.path)?.preparingForDisplay() {
return image
} else {
return nil
}
} else {
if let image = UIImage(contentsOfFile: result.path)?.precomposed() {
return image
} else {
return nil
}
}
} else {
return nil
}
}
|> deliverOnMainQueue).start(next: { [weak self] image in
guard let self else {
return
}
if let image {
self.updateImage(image: image)
}
})
}
}
}
case let .file(file):
dimensions = file.dimensions?.cgSize
if self.currentMedia != media {
let cachedPath = context.account.postbox.mediaBox.cachedRepresentationCompletePath(file.resource.id, representation: CachedVideoFirstFrameRepresentation())
if attemptSynchronous, FileManager.default.fileExists(atPath: cachedPath) {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: cachedPath)?.preparingForDisplay() {
self.updateImage(image: image)
}
} else {
if let image = UIImage(contentsOfFile: cachedPath)?.precomposed() {
self.updateImage(image: image)
}
}
} else {
if let thumbnailData = file.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
self.contentView.image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3)
}
self.disposable = (context.account.postbox.mediaBox.cachedResourceRepresentation(file.resource, representation: CachedVideoFirstFrameRepresentation(), complete: true, fetch: true, attemptSynchronously: false)
|> map { result -> UIImage? in
if result.complete {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: result.path)?.preparingForDisplay() {
return image
} else {
return nil
}
} else {
if let image = UIImage(contentsOfFile: result.path)?.precomposed() {
return image
} else {
return nil
}
}
} else {
return nil
}
}
|> deliverOnMainQueue).start(next: { [weak self] image in
guard let self else {
return
}
if let image {
self.updateImage(image: image)
}
})
}
}
default:
break
}
self.currentMedia = media
if let dimensions {
let filledSize = dimensions.aspectFilled(size)
let contentFrame = CGRect(origin: CGPoint(x: floor((size.width - filledSize.width) * 0.5), y: floor((size.height - filledSize.height) * 0.5)), size: filledSize)
transition.setFrame(view: self.contentView, frame: contentFrame)
}
}
}

View File

@ -33,6 +33,18 @@ import PeerListItemComponent
import PremiumUI
import AttachmentUI
public final class StoryAvailableReactions: Equatable {
let reactionItems: [ReactionItem]
init(reactionItems: [ReactionItem]) {
self.reactionItems = reactionItems
}
public static func ==(lhs: StoryAvailableReactions, rhs: StoryAvailableReactions) -> Bool {
return lhs === rhs
}
}
public final class StoryItemSetContainerComponent: Component {
public final class ExternalState {
public fileprivate(set) var derivedBottomInset: CGFloat = 0.0
@ -63,6 +75,7 @@ public final class StoryItemSetContainerComponent: Component {
public let context: AccountContext
public let externalState: ExternalState
public let storyItemSharedState: StoryContentItem.SharedState
public let availableReactions: StoryAvailableReactions?
public let slice: StoryContentContextState.FocusedSlice
public let theme: PresentationTheme
public let strings: PresentationStrings
@ -84,11 +97,13 @@ public final class StoryItemSetContainerComponent: Component {
public let markAsSeen: (StoryId) -> Void
public let controller: () -> ViewController?
public let toggleAmbientMode: () -> Void
public let keyboardInputData: Signal<ChatEntityKeyboardInputNode.InputData, NoError>
public init(
context: AccountContext,
externalState: ExternalState,
storyItemSharedState: StoryContentItem.SharedState,
availableReactions: StoryAvailableReactions?,
slice: StoryContentContextState.FocusedSlice,
theme: PresentationTheme,
strings: PresentationStrings,
@ -109,11 +124,13 @@ public final class StoryItemSetContainerComponent: Component {
delete: @escaping () -> Void,
markAsSeen: @escaping (StoryId) -> Void,
controller: @escaping () -> ViewController?,
toggleAmbientMode: @escaping () -> Void
toggleAmbientMode: @escaping () -> Void,
keyboardInputData: Signal<ChatEntityKeyboardInputNode.InputData, NoError>
) {
self.context = context
self.externalState = externalState
self.storyItemSharedState = storyItemSharedState
self.availableReactions = availableReactions
self.slice = slice
self.theme = theme
self.strings = strings
@ -135,6 +152,7 @@ public final class StoryItemSetContainerComponent: Component {
self.markAsSeen = markAsSeen
self.controller = controller
self.toggleAmbientMode = toggleAmbientMode
self.keyboardInputData = keyboardInputData
}
public static func ==(lhs: StoryItemSetContainerComponent, rhs: StoryItemSetContainerComponent) -> Bool {
@ -307,7 +325,6 @@ public final class StoryItemSetContainerComponent: Component {
var scrollingOffsetX: CGFloat = 0.0
var scrollingCenterX: CGFloat = 0.0
var reactionItems: [ReactionItem]?
var reactionContextNode: ReactionContextNode?
weak var disappearingReactionContextNode: ReactionContextNode?
@ -1410,23 +1427,7 @@ public final class StoryItemSetContainerComponent: Component {
}
if self.component == nil {
self.sendMessageContext.setup(context: component.context, view: self, inputPanelExternalState: self.inputPanelExternalState)
let _ = (allowedStoryReactions(context: component.context)
|> deliverOnMainQueue).start(next: { [weak self] reactionItems in
guard let self, let component = self.component else {
return
}
component.controller()?.forEachController { c in
if let c = c as? UndoOverlayController {
c.dismiss()
}
return true
}
self.reactionItems = reactionItems
})
self.sendMessageContext.setup(context: component.context, view: self, inputPanelExternalState: self.inputPanelExternalState, keyboardInputData: component.keyboardInputData)
}
if self.component?.slice.item.storyItem.id != component.slice.item.storyItem.id {
@ -2295,7 +2296,7 @@ public final class StoryItemSetContainerComponent: Component {
effectiveDisplayReactions = true
}
if let reactionItems = self.reactionItems, effectiveDisplayReactions {
if let reactionItems = component.availableReactions?.reactionItems, effectiveDisplayReactions {
let reactionContextNode: ReactionContextNode
var reactionContextNodeTransition = transition
if let current = self.reactionContextNode {

View File

@ -59,7 +59,6 @@ final class StoryItemSetContainerSendMessage {
var recordedAudioPreview: ChatRecordedMediaPreview?
var inputMediaNodeData: ChatEntityKeyboardInputNode.InputData?
var inputMediaNodeDataPromise = Promise<ChatEntityKeyboardInputNode.InputData>()
var inputMediaNodeDataDisposable: Disposable?
var inputMediaNodeStateContext = ChatEntityKeyboardInputNode.StateContext()
var inputMediaInteraction: ChatEntityKeyboardInputNode.Interaction?
@ -78,13 +77,6 @@ final class StoryItemSetContainerSendMessage {
var wasRecordingDismissed: Bool = false
init() {
self.inputMediaNodeDataDisposable = (self.inputMediaNodeDataPromise.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let self else {
return
}
self.inputMediaNodeData = value
})
}
deinit {
@ -95,22 +87,20 @@ final class StoryItemSetContainerSendMessage {
self.inputMediaNodeDataDisposable?.dispose()
}
func setup(context: AccountContext, view: StoryItemSetContainerComponent.View, inputPanelExternalState: MessageInputPanelComponent.ExternalState) {
func setup(context: AccountContext, view: StoryItemSetContainerComponent.View, inputPanelExternalState: MessageInputPanelComponent.ExternalState, keyboardInputData: Signal<ChatEntityKeyboardInputNode.InputData, NoError>) {
self.context = context
self.inputPanelExternalState = inputPanelExternalState
self.view = view
self.inputMediaNodeDataPromise.set(
ChatEntityKeyboardInputNode.inputData(
context: context,
chatPeerId: nil,
areCustomEmojiEnabled: true,
hasTrending: false,
hasSearch: false,
hideBackground: true,
sendGif: nil
)
)
if self.inputMediaNodeDataDisposable == nil {
self.inputMediaNodeDataDisposable = (keyboardInputData
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let self else {
return
}
self.inputMediaNodeData = value
})
}
self.inputMediaInteraction = ChatEntityKeyboardInputNode.Interaction(
sendSticker: { [weak self] fileReference, _, _, _, _, _, _, _, _ in
@ -194,7 +184,7 @@ final class StoryItemSetContainerSendMessage {
return
}
if case .media = self.currentInputMode, let inputData = self.inputMediaNodeData {
if let component = self.view?.component, case .media = self.currentInputMode, let inputData = self.inputMediaNodeData {
let inputMediaNode: ChatEntityKeyboardInputNode
if let current = self.inputMediaNode {
inputMediaNode = current
@ -202,7 +192,7 @@ final class StoryItemSetContainerSendMessage {
inputMediaNode = ChatEntityKeyboardInputNode(
context: context,
currentInputData: inputData,
updatedInputData: self.inputMediaNodeDataPromise.get(),
updatedInputData: component.keyboardInputData,
defaultToEmojiTab: self.inputPanelExternalState?.hasText ?? false,
opaqueTopPanelBackground: false,
interaction: self.inputMediaInteraction,

View File

@ -693,7 +693,7 @@ public final class StoryPeerListComponent: Component {
expandBoundsFraction = 0.0
}
let blurRadius: CGFloat = collapsedState.sideAlphaFraction * 0.0 + (1.0 - collapsedState.sideAlphaFraction) * 14.0
/*let blurRadius: CGFloat = collapsedState.sideAlphaFraction * 0.0 + (1.0 - collapsedState.sideAlphaFraction) * 14.0
if blurRadius == 0.0 {
self.sharedBlurEffect = nil
} else {
@ -706,7 +706,7 @@ public final class StoryPeerListComponent: Component {
self.sharedBlurEffect = nil
}
}
}
}*/
var targetCollapsedContentWidth: CGFloat = 0.0
if collapsedItemCount > 0 {

View File

@ -52,8 +52,9 @@ public final class NativeVideoContent: UniversalVideoContent {
let hintDimensions: CGSize?
let storeAfterDownload: (() -> Void)?
let displayImage: Bool
let hasSentFramesToDisplay: (() -> Void)?
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, beginWithAmbientSound: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true) {
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, beginWithAmbientSound: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
self.id = id
self.nativeId = id
self.userLocation = userLocation
@ -92,10 +93,11 @@ public final class NativeVideoContent: UniversalVideoContent {
self.hintDimensions = hintDimensions
self.storeAfterDownload = storeAfterDownload
self.displayImage = displayImage
self.hasSentFramesToDisplay = hasSentFramesToDisplay
}
public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, beginWithAmbientSound: self.beginWithAmbientSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage)
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, beginWithAmbientSound: self.beginWithAmbientSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
}
public func isEqual(to other: UniversalVideoContent) -> Bool {
@ -173,7 +175,9 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private var shouldPlay: Bool = false
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, beginWithAmbientSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool) {
private let hasSentFramesToDisplay: (() -> Void)?
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, beginWithAmbientSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
self.postbox = postbox
self.userLocation = userLocation
self.fileReference = fileReference
@ -186,6 +190,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
self.isAudioVideoMessage = isAudioVideoMessage
self.captureProtected = captureProtected
self.displayImage = displayImage
self.hasSentFramesToDisplay = hasSentFramesToDisplay
self.imageNode = TransformImageNode()
@ -211,6 +216,15 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
super.init()
var didProcessFramesToDisplay = false
self.playerNode.hasSentFramesToDisplay = { [weak self] in
guard let self, !didProcessFramesToDisplay else {
return
}
didProcessFramesToDisplay = true
self.hasSentFramesToDisplay?()
}
if let dimensions = hintDimensions {
self.dimensions = dimensions
self.dimensionsPromise.set(dimensions)
@ -330,10 +344,13 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
var processedSentFramesToDisplay = false
self.playerNode.hasSentFramesToDisplay = { [weak self] in
guard !processedSentFramesToDisplay, let _ = self else {
guard !processedSentFramesToDisplay, let strongSelf = self else {
return
}
processedSentFramesToDisplay = true
strongSelf.hasSentFramesToDisplay?()
Queue.mainQueue().after(0.1, {
guard let strongSelf = self else {
return