Re-implement story display

This commit is contained in:
Ali 2023-07-01 17:56:25 +02:00
parent e5e5bc1eac
commit d6434fa17f
11 changed files with 334 additions and 127 deletions

View File

@ -136,6 +136,8 @@ public class InteractiveTransitionGestureRecognizer: UIPanGestureRecognizer {
let size = self.view?.bounds.size ?? CGSize()
print("moved: \(CFAbsoluteTimeGetCurrent()) absTranslationX: \(absTranslationX) absTranslationY: \(absTranslationY)")
if self.currentAllowedDirections.contains(.down) {
if !self.validatedGesture {
if absTranslationX > 2.0 && absTranslationX > absTranslationY * 2.0 {

View File

@ -115,7 +115,9 @@ public final class MediaPlayerNode: ASDisplayNode {
if abs(rotationAngle).remainder(dividingBy: Double.pi) > 0.1 {
transform = transform.scaledBy(x: CGFloat(aspect), y: CGFloat(1.0 / aspect))
}
videoLayer.setAffineTransform(transform)
if videoLayer.affineTransform() != transform {
videoLayer.setAffineTransform(transform)
}
}
if self.videoInHierarchy || self.canPlaybackWithoutHierarchy {
@ -435,6 +437,9 @@ public final class MediaPlayerNode: ASDisplayNode {
private func updateLayout() {
let bounds = self.bounds
if bounds.isEmpty {
return
}
let fittedRect: CGRect
if let arguments = self.transformArguments {

View File

@ -12,7 +12,8 @@ swift_library(
deps = [
"//submodules/Display",
"//submodules/AsyncDisplayKit",
"//submodules/AnimationUI",
"//submodules/ComponentFlow",
"//submodules/TelegramUI/Components/LottieComponent",
],
visibility = [
"//visibility:public",

View File

@ -2,7 +2,8 @@ import Foundation
import UIKit
import Display
import AsyncDisplayKit
import AnimationUI
import ComponentFlow
import LottieComponent
public final class MoreHeaderButton: HighlightableButtonNode {
public enum Content {
@ -13,7 +14,7 @@ public final class MoreHeaderButton: HighlightableButtonNode {
public let referenceNode: ContextReferenceContentNode
public let containerNode: ContextControllerSourceNode
private let iconNode: ASImageNode
private var animationNode: AnimationNode?
private let animationView = ComponentView<Empty>()
public var contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
@ -70,15 +71,23 @@ public final class MoreHeaderButton: HighlightableButtonNode {
private var content: Content?
public func setContent(_ content: Content, animated: Bool = false) {
if case .more = content, self.animationNode == nil {
let iconColor = self.color
let animationNode = AnimationNode(animation: "anim_profilemore", colors: ["Point 2.Group 1.Fill 1": iconColor,
"Point 3.Group 1.Fill 1": iconColor,
"Point 1.Group 1.Fill 1": iconColor], scale: 1.0)
if case .more = content {
let animationSize = CGSize(width: 22.0, height: 22.0)
animationNode.frame = CGRect(origin: CGPoint(x: floor((self.containerNode.bounds.width - animationSize.width) / 2.0), y: floor((self.containerNode.bounds.height - animationSize.height) / 2.0)), size: animationSize)
self.addSubnode(animationNode)
self.animationNode = animationNode
let _ = self.animationView.update(
transition: .immediate,
component: AnyComponent(LottieComponent(
content: LottieComponent.AppBundleContent(name: "anim_profilemore"),
color: self.color
)),
environment: {},
containerSize: animationSize
)
if let animationComponentView = self.animationView.view {
if animationComponentView.superview == nil {
self.view.addSubview(animationComponentView)
}
animationComponentView.frame = CGRect(origin: CGPoint(x: floor((self.containerNode.bounds.width - animationSize.width) / 2.0), y: floor((self.containerNode.bounds.height - animationSize.height) / 2.0)), size: animationSize)
}
}
if animated {
if let snapshotView = self.referenceNode.view.snapshotContentTree() {
@ -93,8 +102,10 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
self.iconNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.3)
self.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
self.animationNode?.layer.animateScale(from: 0.1, to: 1.0, duration: 0.3)
if let animationComponentView = self.animationView.view {
animationComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
animationComponentView.layer.animateScale(from: 0.1, to: 1.0, duration: 0.3)
}
}
switch content {
@ -105,7 +116,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.image = image
self.iconNode.isHidden = false
self.animationNode?.isHidden = true
if let animationComponentView = self.animationView.view {
animationComponentView.isHidden = true
}
case let .more(image):
if let image = image {
self.iconNode.frame = CGRect(origin: CGPoint(x: floor((self.containerNode.bounds.width - image.size.width) / 2.0), y: floor((self.containerNode.bounds.height - image.size.height) / 2.0)), size: image.size)
@ -113,7 +126,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.image = image
self.iconNode.isHidden = false
self.animationNode?.isHidden = false
if let animationComponentView = self.animationView.view {
animationComponentView.isHidden = false
}
}
} else {
self.content = content
@ -125,7 +140,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.image = image
self.iconNode.isHidden = false
self.animationNode?.isHidden = true
if let animationComponentView = self.animationView.view {
animationComponentView.isHidden = true
}
case let .more(image):
if let image = image {
self.iconNode.frame = CGRect(origin: CGPoint(x: floor((self.containerNode.bounds.width - image.size.width) / 2.0), y: floor((self.containerNode.bounds.height - image.size.height) / 2.0)), size: image.size)
@ -133,7 +150,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
self.iconNode.image = image
self.iconNode.isHidden = false
self.animationNode?.isHidden = false
if let animationComponentView = self.animationView.view {
animationComponentView.isHidden = false
}
}
}
}
@ -151,7 +170,9 @@ public final class MoreHeaderButton: HighlightableButtonNode {
}
public func play() {
self.animationNode?.playOnce()
if let animationComponentView = self.animationView.view as? LottieComponent.View {
animationComponentView.playOnce()
}
}
public static func optionsCircleImage(color: UIColor) -> UIImage? {

View File

@ -65,9 +65,13 @@ swift_library(
"//submodules/OverlayStatusController",
"//submodules/Utils/VolumeButtons",
"//submodules/TelegramUI/Components/PeerReportScreen",
"//submodules/MediaResources",
"//submodules/LocalMediaResources",
"//submodules/SaveToCameraRoll",
"//submodules/Components/BundleIconComponent",
"//submodules/TinyThumbnail",
"//submodules/ImageBlur",
],
visibility = [
"//visibility:public",

View File

@ -18,6 +18,7 @@ import AttachmentUI
import simd
import VolumeButtons
import TooltipUI
import ChatEntityKeyboardInputNode
func hasFirstResponder(_ view: UIView) -> Bool {
if view.isFirstResponder {
@ -178,6 +179,10 @@ private final class StoryContainerScreenComponent: Component {
private var volumeButtonsListener: VolumeButtonsListener?
private let volumeButtonsListenerShouldBeActive = ValuePromise<Bool>(false, ignoreRepeated: true)
private let inputMediaNodeDataPromise = Promise<ChatEntityKeyboardInputNode.InputData>()
private var availableReactions: StoryAvailableReactions?
private var isAnimatingOut: Bool = false
private var didAnimateOut: Bool = false
@ -389,6 +394,7 @@ private final class StoryContainerScreenComponent: Component {
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
switch recognizer.state {
case .began:
print("began: \(CFAbsoluteTimeGetCurrent())")
self.beginHorizontalPan(translation: recognizer.translation(in: self))
case .changed:
self.updateHorizontalPan(translation: recognizer.translation(in: self))
@ -655,6 +661,34 @@ private final class StoryContainerScreenComponent: Component {
self.environment = environment
if self.component?.content !== component.content {
if self.component == nil {
var update = false
let _ = (allowedStoryReactions(context: component.context)
|> deliverOnMainQueue).start(next: { [weak self] reactionItems in
guard let self else {
return
}
self.availableReactions = StoryAvailableReactions(reactionItems: reactionItems)
if update {
self.state?.updated(transition: .immediate)
}
})
update = true
self.inputMediaNodeDataPromise.set(
ChatEntityKeyboardInputNode.inputData(
context: component.context,
chatPeerId: nil,
areCustomEmojiEnabled: true,
hasTrending: false,
hasSearch: false,
hideBackground: true,
sendGif: nil
)
)
}
self.contentUpdatedDisposable?.dispose()
var update = false
self.contentUpdatedDisposable = (component.content.updated
@ -821,6 +855,7 @@ private final class StoryContainerScreenComponent: Component {
context: component.context,
externalState: itemSetView.externalState,
storyItemSharedState: self.storyItemSharedState,
availableReactions: self.availableReactions,
slice: slice,
theme: environment.theme,
strings: environment.strings,
@ -940,7 +975,8 @@ private final class StoryContainerScreenComponent: Component {
}
}
}
}
},
keyboardInputData: self.inputMediaNodeDataPromise.get()
)),
environment: {},
containerSize: itemSetContainerSize

View File

@ -50,7 +50,7 @@ final class StoryItemContentComponent: Component {
}
final class View: StoryContentItem.View {
private let imageNode: TransformImageNode
private let imageView: StoryItemImageView
private var videoNode: UniversalVideoNode?
private var currentMessageMedia: EngineMedia?
@ -80,13 +80,13 @@ final class StoryItemContentComponent: Component {
override init(frame: CGRect) {
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
self.imageNode = TransformImageNode()
self.imageView = StoryItemImageView()
super.init(frame: frame)
self.layer.addSublayer(self.hierarchyTrackingLayer)
self.addSubnode(self.imageNode)
self.addSubview(self.imageView)
self.hierarchyTrackingLayer.isInHierarchyUpdated = { [weak self] value in
guard let self else {
@ -144,10 +144,17 @@ final class StoryItemContentComponent: Component {
captureProtected: component.item.isForwardingDisabled,
hintDimensions: file.dimensions?.cgSize,
storeAfterDownload: nil,
displayImage: false
displayImage: false,
hasSentFramesToDisplay: { [weak self] in
guard let self else {
return
}
self.videoNode?.isHidden = false
}
),
priority: .gallery
)
videoNode.isHidden = true
self.videoNode = videoNode
self.addSubnode(videoNode)
@ -372,6 +379,8 @@ final class StoryItemContentComponent: Component {
synchronousLoad = hint.synchronousLoad
}
let startTime = CFAbsoluteTimeGetCurrent()
let peerReference = PeerReference(component.peer._asPeer())
var messageMedia: EngineMedia?
@ -398,45 +407,13 @@ final class StoryItemContentComponent: Component {
}
if reloadMedia, let messageMedia, let peerReference {
var signal: Signal<(TransformImageArguments) -> DrawingContext?, NoError>?
var fetchSignal: Signal<Never, NoError>?
switch messageMedia {
case let .image(image):
signal = chatMessagePhoto(
postbox: component.context.account.postbox,
userLocation: .other,
photoReference: .story(peer: peerReference, id: component.item.id, media: image),
synchronousLoad: synchronousLoad,
highQuality: true
)
if let representation = image.representations.last {
fetchSignal = fetchedMediaResource(
mediaBox: component.context.account.postbox.mediaBox,
userLocation: .other,
userContentType: .image,
reference: ImageMediaReference.story(peer: peerReference, id: component.item.id, media: image).resourceReference(representation.resource)
)
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in
return .complete()
}
}
case .image:
self.contentLoaded = true
case let .file(file):
self.contentLoaded = true
signal = mediaGridMessageVideo(
postbox: component.context.account.postbox,
userLocation: .other,
videoReference: .story(peer: peerReference, id: component.item.id, media: file),
onlyFullSize: false,
useLargeThumbnail: false,
synchronousLoad: synchronousLoad,
autoFetchFullSizeThumbnail: false,
overlayColor: nil,
nilForEmptyResult: false,
useMiniThumbnailIfAvailable: false,
blurred: false
)
fetchSignal = fetchedMediaResource(
mediaBox: component.context.account.postbox.mediaBox,
userLocation: .other,
@ -451,20 +428,6 @@ final class StoryItemContentComponent: Component {
break
}
if let signal {
var wasSynchronous = true
self.imageNode.setSignal(signal |> afterCompleted { [weak self] in
Queue.mainQueue().async {
guard let self else {
return
}
self.performActionAfterImageContentLoaded(update: !wasSynchronous)
}
}, attemptSynchronously: true)
wasSynchronous = false
}
self.performActionAfterImageContentLoaded(update: false)
self.fetchDisposable?.dispose()
@ -483,6 +446,18 @@ final class StoryItemContentComponent: Component {
}
if let messageMedia {
self.imageView.update(
context: component.context,
peer: component.peer,
storyId: component.item.id,
media: component.item.media,
size: availableSize,
isCaptureProtected: component.item.isForwardingDisabled,
attemptSynchronous: synchronousLoad,
transition: transition
)
transition.setFrame(view: self.imageView, frame: CGRect(origin: CGPoint(), size: availableSize))
var dimensions: CGSize?
switch messageMedia {
case let .image(image):
@ -501,14 +476,7 @@ final class StoryItemContentComponent: Component {
if imageSize.height < availableSize.height && imageSize.height >= availableSize.height - 5.0 {
imageSize.height = availableSize.height
}
self.imageNode.captureProtected = component.item.isForwardingDisabled
let apply = self.imageNode.asyncLayout()(TransformImageArguments(
corners: ImageCorners(),
imageSize: imageSize,
boundingSize: availableSize,
intrinsicInsets: UIEdgeInsets()
))
apply()
let _ = imageSize
if let videoNode = self.videoNode {
let videoSize = dimensions.aspectFilled(availableSize)
@ -516,7 +484,6 @@ final class StoryItemContentComponent: Component {
videoNode.updateLayout(size: videoSize, transition: .immediate)
}
}
self.imageNode.frame = CGRect(origin: CGPoint(), size: availableSize)
}
switch component.item.media {
@ -614,6 +581,10 @@ final class StoryItemContentComponent: Component {
self.updateIsProgressPaused(update: false)
if reloadMedia && synchronousLoad {
print("\(CFAbsoluteTimeGetCurrent()) Synchronous: \((CFAbsoluteTimeGetCurrent() - startTime) * 1000.0) ms")
}
return availableSize
}
}

View File

@ -0,0 +1,159 @@
import Foundation
import UIKit
import AccountContext
import TelegramCore
import Postbox
import SwiftSignalKit
import ComponentFlow
import TinyThumbnail
import ImageBlur
import MediaResources
final class StoryItemImageView: UIView {
private let contentView: UIImageView
private var currentMedia: EngineMedia?
private var disposable: Disposable?
private var fetchDisposable: Disposable?
override init(frame: CGRect) {
self.contentView = UIImageView()
self.contentView.contentMode = .scaleAspectFill
super.init(frame: frame)
self.addSubview(self.contentView)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.disposable?.dispose()
}
private func updateImage(image: UIImage) {
self.contentView.image = image
}
func update(context: AccountContext, peer: EnginePeer, storyId: Int32, media: EngineMedia, size: CGSize, isCaptureProtected: Bool, attemptSynchronous: Bool, transition: Transition) {
var dimensions: CGSize?
switch media {
case let .image(image):
if let representation = largestImageRepresentation(image.representations) {
dimensions = representation.dimensions.cgSize
if self.currentMedia != media {
if attemptSynchronous, let path = context.account.postbox.mediaBox.completedResourcePath(id: representation.resource.id, pathExtension: nil) {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: path)?.preparingForDisplay() {
self.updateImage(image: image)
}
} else {
if let image = UIImage(contentsOfFile: path)?.precomposed() {
self.updateImage(image: image)
}
}
} else {
if let thumbnailData = image.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
self.contentView.image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3)
}
if let peerReference = PeerReference(peer._asPeer()) {
self.fetchDisposable = fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .peer(peer.id), userContentType: .image, reference: .media(media: .story(peer: peerReference, id: storyId, media: media._asMedia()), resource: representation.resource), ranges: nil).start()
}
self.disposable = (context.account.postbox.mediaBox.resourceData(representation.resource, option: .complete(waitUntilFetchStatus: false))
|> map { result -> UIImage? in
if result.complete {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: result.path)?.preparingForDisplay() {
return image
} else {
return nil
}
} else {
if let image = UIImage(contentsOfFile: result.path)?.precomposed() {
return image
} else {
return nil
}
}
} else {
return nil
}
}
|> deliverOnMainQueue).start(next: { [weak self] image in
guard let self else {
return
}
if let image {
self.updateImage(image: image)
}
})
}
}
}
case let .file(file):
dimensions = file.dimensions?.cgSize
if self.currentMedia != media {
let cachedPath = context.account.postbox.mediaBox.cachedRepresentationCompletePath(file.resource.id, representation: CachedVideoFirstFrameRepresentation())
if attemptSynchronous, FileManager.default.fileExists(atPath: cachedPath) {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: cachedPath)?.preparingForDisplay() {
self.updateImage(image: image)
}
} else {
if let image = UIImage(contentsOfFile: cachedPath)?.precomposed() {
self.updateImage(image: image)
}
}
} else {
if let thumbnailData = file.immediateThumbnailData.flatMap(decodeTinyThumbnail), let thumbnailImage = UIImage(data: thumbnailData) {
self.contentView.image = blurredImage(thumbnailImage, radius: 10.0, iterations: 3)
}
self.disposable = (context.account.postbox.mediaBox.cachedResourceRepresentation(file.resource, representation: CachedVideoFirstFrameRepresentation(), complete: true, fetch: true, attemptSynchronously: false)
|> map { result -> UIImage? in
if result.complete {
if #available(iOS 15.0, *) {
if let image = UIImage(contentsOfFile: result.path)?.preparingForDisplay() {
return image
} else {
return nil
}
} else {
if let image = UIImage(contentsOfFile: result.path)?.precomposed() {
return image
} else {
return nil
}
}
} else {
return nil
}
}
|> deliverOnMainQueue).start(next: { [weak self] image in
guard let self else {
return
}
if let image {
self.updateImage(image: image)
}
})
}
}
default:
break
}
self.currentMedia = media
if let dimensions {
let filledSize = dimensions.aspectFilled(size)
let contentFrame = CGRect(origin: CGPoint(x: floor((size.width - filledSize.width) * 0.5), y: floor((size.height - filledSize.height) * 0.5)), size: filledSize)
transition.setFrame(view: self.contentView, frame: contentFrame)
}
}
}

View File

@ -33,6 +33,18 @@ import PeerListItemComponent
import PremiumUI
import AttachmentUI
public final class StoryAvailableReactions: Equatable {
let reactionItems: [ReactionItem]
init(reactionItems: [ReactionItem]) {
self.reactionItems = reactionItems
}
public static func ==(lhs: StoryAvailableReactions, rhs: StoryAvailableReactions) -> Bool {
return lhs === rhs
}
}
public final class StoryItemSetContainerComponent: Component {
public final class ExternalState {
public fileprivate(set) var derivedBottomInset: CGFloat = 0.0
@ -63,6 +75,7 @@ public final class StoryItemSetContainerComponent: Component {
public let context: AccountContext
public let externalState: ExternalState
public let storyItemSharedState: StoryContentItem.SharedState
public let availableReactions: StoryAvailableReactions?
public let slice: StoryContentContextState.FocusedSlice
public let theme: PresentationTheme
public let strings: PresentationStrings
@ -84,11 +97,13 @@ public final class StoryItemSetContainerComponent: Component {
public let markAsSeen: (StoryId) -> Void
public let controller: () -> ViewController?
public let toggleAmbientMode: () -> Void
public let keyboardInputData: Signal<ChatEntityKeyboardInputNode.InputData, NoError>
public init(
context: AccountContext,
externalState: ExternalState,
storyItemSharedState: StoryContentItem.SharedState,
availableReactions: StoryAvailableReactions?,
slice: StoryContentContextState.FocusedSlice,
theme: PresentationTheme,
strings: PresentationStrings,
@ -109,11 +124,13 @@ public final class StoryItemSetContainerComponent: Component {
delete: @escaping () -> Void,
markAsSeen: @escaping (StoryId) -> Void,
controller: @escaping () -> ViewController?,
toggleAmbientMode: @escaping () -> Void
toggleAmbientMode: @escaping () -> Void,
keyboardInputData: Signal<ChatEntityKeyboardInputNode.InputData, NoError>
) {
self.context = context
self.externalState = externalState
self.storyItemSharedState = storyItemSharedState
self.availableReactions = availableReactions
self.slice = slice
self.theme = theme
self.strings = strings
@ -135,6 +152,7 @@ public final class StoryItemSetContainerComponent: Component {
self.markAsSeen = markAsSeen
self.controller = controller
self.toggleAmbientMode = toggleAmbientMode
self.keyboardInputData = keyboardInputData
}
public static func ==(lhs: StoryItemSetContainerComponent, rhs: StoryItemSetContainerComponent) -> Bool {
@ -307,7 +325,6 @@ public final class StoryItemSetContainerComponent: Component {
var scrollingOffsetX: CGFloat = 0.0
var scrollingCenterX: CGFloat = 0.0
var reactionItems: [ReactionItem]?
var reactionContextNode: ReactionContextNode?
weak var disappearingReactionContextNode: ReactionContextNode?
@ -1410,23 +1427,7 @@ public final class StoryItemSetContainerComponent: Component {
}
if self.component == nil {
self.sendMessageContext.setup(context: component.context, view: self, inputPanelExternalState: self.inputPanelExternalState)
let _ = (allowedStoryReactions(context: component.context)
|> deliverOnMainQueue).start(next: { [weak self] reactionItems in
guard let self, let component = self.component else {
return
}
component.controller()?.forEachController { c in
if let c = c as? UndoOverlayController {
c.dismiss()
}
return true
}
self.reactionItems = reactionItems
})
self.sendMessageContext.setup(context: component.context, view: self, inputPanelExternalState: self.inputPanelExternalState, keyboardInputData: component.keyboardInputData)
}
if self.component?.slice.item.storyItem.id != component.slice.item.storyItem.id {
@ -2295,7 +2296,7 @@ public final class StoryItemSetContainerComponent: Component {
effectiveDisplayReactions = true
}
if let reactionItems = self.reactionItems, effectiveDisplayReactions {
if let reactionItems = component.availableReactions?.reactionItems, effectiveDisplayReactions {
let reactionContextNode: ReactionContextNode
var reactionContextNodeTransition = transition
if let current = self.reactionContextNode {

View File

@ -59,7 +59,6 @@ final class StoryItemSetContainerSendMessage {
var recordedAudioPreview: ChatRecordedMediaPreview?
var inputMediaNodeData: ChatEntityKeyboardInputNode.InputData?
var inputMediaNodeDataPromise = Promise<ChatEntityKeyboardInputNode.InputData>()
var inputMediaNodeDataDisposable: Disposable?
var inputMediaNodeStateContext = ChatEntityKeyboardInputNode.StateContext()
var inputMediaInteraction: ChatEntityKeyboardInputNode.Interaction?
@ -78,13 +77,6 @@ final class StoryItemSetContainerSendMessage {
var wasRecordingDismissed: Bool = false
init() {
self.inputMediaNodeDataDisposable = (self.inputMediaNodeDataPromise.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let self else {
return
}
self.inputMediaNodeData = value
})
}
deinit {
@ -95,22 +87,20 @@ final class StoryItemSetContainerSendMessage {
self.inputMediaNodeDataDisposable?.dispose()
}
func setup(context: AccountContext, view: StoryItemSetContainerComponent.View, inputPanelExternalState: MessageInputPanelComponent.ExternalState) {
func setup(context: AccountContext, view: StoryItemSetContainerComponent.View, inputPanelExternalState: MessageInputPanelComponent.ExternalState, keyboardInputData: Signal<ChatEntityKeyboardInputNode.InputData, NoError>) {
self.context = context
self.inputPanelExternalState = inputPanelExternalState
self.view = view
self.inputMediaNodeDataPromise.set(
ChatEntityKeyboardInputNode.inputData(
context: context,
chatPeerId: nil,
areCustomEmojiEnabled: true,
hasTrending: false,
hasSearch: false,
hideBackground: true,
sendGif: nil
)
)
if self.inputMediaNodeDataDisposable == nil {
self.inputMediaNodeDataDisposable = (keyboardInputData
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let self else {
return
}
self.inputMediaNodeData = value
})
}
self.inputMediaInteraction = ChatEntityKeyboardInputNode.Interaction(
sendSticker: { [weak self] fileReference, _, _, _, _, _, _, _, _ in
@ -194,7 +184,7 @@ final class StoryItemSetContainerSendMessage {
return
}
if case .media = self.currentInputMode, let inputData = self.inputMediaNodeData {
if let component = self.view?.component, case .media = self.currentInputMode, let inputData = self.inputMediaNodeData {
let inputMediaNode: ChatEntityKeyboardInputNode
if let current = self.inputMediaNode {
inputMediaNode = current
@ -202,7 +192,7 @@ final class StoryItemSetContainerSendMessage {
inputMediaNode = ChatEntityKeyboardInputNode(
context: context,
currentInputData: inputData,
updatedInputData: self.inputMediaNodeDataPromise.get(),
updatedInputData: component.keyboardInputData,
defaultToEmojiTab: self.inputPanelExternalState?.hasText ?? false,
opaqueTopPanelBackground: false,
interaction: self.inputMediaInteraction,

View File

@ -52,8 +52,9 @@ public final class NativeVideoContent: UniversalVideoContent {
let hintDimensions: CGSize?
let storeAfterDownload: (() -> Void)?
let displayImage: Bool
let hasSentFramesToDisplay: (() -> Void)?
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, beginWithAmbientSound: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true) {
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference? = nil, streamVideo: MediaPlayerStreaming = .none, loopVideo: Bool = false, enableSound: Bool = true, beginWithAmbientSound: Bool = false, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false, startTimestamp: Double? = nil, endTimestamp: Double? = nil, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor = .white, tempFilePath: String? = nil, isAudioVideoMessage: Bool = false, captureProtected: Bool = false, hintDimensions: CGSize? = nil, storeAfterDownload: (() -> Void)?, displayImage: Bool = true, hasSentFramesToDisplay: (() -> Void)? = nil) {
self.id = id
self.nativeId = id
self.userLocation = userLocation
@ -92,10 +93,11 @@ public final class NativeVideoContent: UniversalVideoContent {
self.hintDimensions = hintDimensions
self.storeAfterDownload = storeAfterDownload
self.displayImage = displayImage
self.hasSentFramesToDisplay = hasSentFramesToDisplay
}
public func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, beginWithAmbientSound: self.beginWithAmbientSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage)
return NativeVideoContentNode(postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, imageReference: self.imageReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, beginWithAmbientSound: self.beginWithAmbientSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail, startTimestamp: self.startTimestamp, endTimestamp: self.endTimestamp, continuePlayingWithoutSoundOnLostAudioSession: self.continuePlayingWithoutSoundOnLostAudioSession, placeholderColor: self.placeholderColor, tempFilePath: self.tempFilePath, isAudioVideoMessage: self.isAudioVideoMessage, captureProtected: self.captureProtected, hintDimensions: self.hintDimensions, storeAfterDownload: self.storeAfterDownload, displayImage: self.displayImage, hasSentFramesToDisplay: self.hasSentFramesToDisplay)
}
public func isEqual(to other: UniversalVideoContent) -> Bool {
@ -173,7 +175,9 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
private var shouldPlay: Bool = false
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, beginWithAmbientSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool) {
private let hasSentFramesToDisplay: (() -> Void)?
init(postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, imageReference: ImageMediaReference?, streamVideo: MediaPlayerStreaming, loopVideo: Bool, enableSound: Bool, beginWithAmbientSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool, startTimestamp: Double?, endTimestamp: Double?, continuePlayingWithoutSoundOnLostAudioSession: Bool = false, placeholderColor: UIColor, tempFilePath: String?, isAudioVideoMessage: Bool, captureProtected: Bool, hintDimensions: CGSize?, storeAfterDownload: (() -> Void)? = nil, displayImage: Bool, hasSentFramesToDisplay: (() -> Void)?) {
self.postbox = postbox
self.userLocation = userLocation
self.fileReference = fileReference
@ -186,6 +190,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
self.isAudioVideoMessage = isAudioVideoMessage
self.captureProtected = captureProtected
self.displayImage = displayImage
self.hasSentFramesToDisplay = hasSentFramesToDisplay
self.imageNode = TransformImageNode()
@ -211,6 +216,15 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
super.init()
var didProcessFramesToDisplay = false
self.playerNode.hasSentFramesToDisplay = { [weak self] in
guard let self, !didProcessFramesToDisplay else {
return
}
didProcessFramesToDisplay = true
self.hasSentFramesToDisplay?()
}
if let dimensions = hintDimensions {
self.dimensions = dimensions
self.dimensionsPromise.set(dimensions)
@ -330,10 +344,13 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
var processedSentFramesToDisplay = false
self.playerNode.hasSentFramesToDisplay = { [weak self] in
guard !processedSentFramesToDisplay, let _ = self else {
guard !processedSentFramesToDisplay, let strongSelf = self else {
return
}
processedSentFramesToDisplay = true
strongSelf.hasSentFramesToDisplay?()
Queue.mainQueue().after(0.1, {
guard let strongSelf = self else {
return