mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-07-19 09:41:29 +00:00
Merge branch 'master' into monoforums
# Conflicts: # submodules/TelegramUI/Sources/Chat/ChatControllerLoadDisplayNode.swift # submodules/TranslateUI/Sources/ChatTranslation.swift
This commit is contained in:
commit
01e5d87002
@ -611,7 +611,7 @@ private final class ActionButtonPanelNode: ASDisplayNode {
|
||||
private(set) var isAccepted: Bool = false
|
||||
var isAcceptedUpdated: (() -> Void)?
|
||||
var openRecurrentTerms: (() -> Void)?
|
||||
private var recurrentConfirmationNode: RecurrentConfirmationNode?
|
||||
var recurrentConfirmationNode: RecurrentConfirmationNode?
|
||||
|
||||
func update(presentationData: PresentationData, layout: ContainerViewLayout, invoice: BotPaymentInvoice?, botName: String?) -> (CGFloat, CGFloat) {
|
||||
let bottomPanelVerticalInset: CGFloat = 16.0
|
||||
@ -1211,7 +1211,8 @@ final class BotCheckoutControllerNode: ItemListControllerNode, PKPaymentAuthoriz
|
||||
payString = self.presentationData.strings.CheckoutInfo_Pay
|
||||
}
|
||||
|
||||
self.actionButton.isEnabled = isButtonEnabled
|
||||
self.actionButton.isEnabled = true
|
||||
self.actionButton.isImplicitlyDisabled = !isButtonEnabled
|
||||
|
||||
if let currentPaymentMethod = self.currentPaymentMethod {
|
||||
switch currentPaymentMethod {
|
||||
@ -1268,8 +1269,12 @@ final class BotCheckoutControllerNode: ItemListControllerNode, PKPaymentAuthoriz
|
||||
}
|
||||
|
||||
@objc func actionButtonPressed() {
|
||||
if let recurrentConfirmationNode = self.actionButtonPanelNode.recurrentConfirmationNode, !self.actionButtonPanelNode.isAccepted {
|
||||
recurrentConfirmationNode.layer.addShakeAnimation()
|
||||
} else {
|
||||
self.pay()
|
||||
}
|
||||
}
|
||||
|
||||
private func pay(savedCredentialsToken: TemporaryTwoStepPasswordToken? = nil, liabilityNoticeAccepted: Bool = false, receivedCredentials: BotPaymentCredentials? = nil) {
|
||||
guard let paymentForm = self.paymentFormValue else {
|
||||
|
@ -1,6 +1,6 @@
|
||||
#import <FFMpegBinding/FFMpegAVCodec.h>
|
||||
|
||||
#import <third_party/ffmpeg/libavcodec/avcodec.h>
|
||||
#import "libavcodec/avcodec.h"
|
||||
|
||||
@interface FFMpegAVCodec () {
|
||||
AVCodec const *_impl;
|
||||
|
@ -3,8 +3,8 @@
|
||||
#import <FFMpegBinding/FFMpegAVFrame.h>
|
||||
#import <FFMpegBinding/FFMpegAVCodec.h>
|
||||
|
||||
#import <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#import <third_party/ffmpeg/libavcodec/avcodec.h>
|
||||
#import "libavformat/avformat.h"
|
||||
#import "libavcodec/avcodec.h"
|
||||
|
||||
static enum AVPixelFormat getPreferredPixelFormat(__unused AVCodecContext *ctx, __unused const enum AVPixelFormat *pix_fmts) {
|
||||
return AV_PIX_FMT_VIDEOTOOLBOX;
|
||||
|
@ -4,8 +4,8 @@
|
||||
#import <FFMpegBinding/FFMpegPacket.h>
|
||||
#import <FFMpegBinding/FFMpegAVCodecContext.h>
|
||||
|
||||
#import <third_party/ffmpeg/libavcodec/avcodec.h>
|
||||
#import <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#import "libavcodec/avcodec.h"
|
||||
#import "libavformat/avformat.h"
|
||||
|
||||
int FFMpegCodecIdH264 = AV_CODEC_ID_H264;
|
||||
int FFMpegCodecIdHEVC = AV_CODEC_ID_HEVC;
|
||||
|
@ -1,6 +1,6 @@
|
||||
#import <FFMpegBinding/FFMpegAVFrame.h>
|
||||
|
||||
#import <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#import "libavformat/avformat.h"
|
||||
|
||||
@interface FFMpegAVFrame () {
|
||||
AVFrame *_impl;
|
||||
|
@ -1,6 +1,6 @@
|
||||
#import <FFMpegBinding/FFMpegAVIOContext.h>
|
||||
|
||||
#import <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#import "libavformat/avformat.h"
|
||||
|
||||
int FFMPEG_CONSTANT_AVERROR_EOF = AVERROR_EOF;
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
#import <FFMpegBinding/FFMpegGlobals.h>
|
||||
|
||||
#import <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#import "libavformat/avformat.h"
|
||||
|
||||
@implementation FFMpegGlobals
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
#import <FFMpegBinding/FFMpegLiveMuxer.h>
|
||||
#import <FFMpegBinding/FFMpegAVIOContext.h>
|
||||
|
||||
#include <third_party/ffmpeg/libavutil/timestamp.h>
|
||||
#include <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#include <third_party/ffmpeg/libavcodec/avcodec.h>
|
||||
#include <third_party/ffmpeg/libswresample/swresample.h>
|
||||
#include "libavutil/timestamp.h"
|
||||
#include "libavformat/avformat.h"
|
||||
#include "libavcodec/avcodec.h"
|
||||
#include "libswresample/swresample.h"
|
||||
|
||||
#define MOV_TIMESCALE 1000
|
||||
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
#import <FFMpegBinding/FFMpegAVCodecContext.h>
|
||||
|
||||
#import <third_party/ffmpeg/libavcodec/avcodec.h>
|
||||
#import <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#import "libavcodec/avcodec.h"
|
||||
#import "libavformat/avformat.h"
|
||||
|
||||
@interface FFMpegPacket () {
|
||||
AVPacket *_impl;
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
#import <FFMpegBinding/FFMpegAVIOContext.h>
|
||||
|
||||
#include <third_party/ffmpeg/libavutil/timestamp.h>
|
||||
#include <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#include <third_party/ffmpeg/libavcodec/avcodec.h>
|
||||
#include "libavutil/timestamp.h"
|
||||
#include "libavformat/avformat.h"
|
||||
#include "libavcodec/avcodec.h"
|
||||
|
||||
#define MOV_TIMESCALE 1000
|
||||
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
#import <FFMpegBinding/FFMpegAVFrame.h>
|
||||
|
||||
#import <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#import <third_party/ffmpeg/libavcodec/avcodec.h>
|
||||
#import <third_party/ffmpeg/libswresample/swresample.h>
|
||||
#import "libavformat/avformat.h"
|
||||
#import "libavcodec/avcodec.h"
|
||||
#import "libswresample/swresample.h"
|
||||
|
||||
@interface FFMpegSWResample () {
|
||||
int _sourceSampleRate;
|
||||
|
@ -1,9 +1,9 @@
|
||||
#import <FFMpegBinding/FFMpegVideoWriter.h>
|
||||
#import <FFMpegBinding/FFMpegAVFrame.h>
|
||||
|
||||
#include <third_party/ffmpeg/libavformat/avformat.h>
|
||||
#include <third_party/ffmpeg/libavcodec/avcodec.h>
|
||||
#include <third_party/ffmpeg/libavutil/imgutils.h>
|
||||
#include "libavformat/avformat.h"
|
||||
#include "libavcodec/avcodec.h"
|
||||
#include "libavutil/imgutils.h"
|
||||
|
||||
@interface FFMpegVideoWriter ()
|
||||
|
||||
|
@ -528,29 +528,7 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att
|
||||
if case let .assets(_, mode) = controller.subject, [.wallpaper, .story, .addImage, .cover, .createSticker, .createAvatar].contains(mode) {
|
||||
|
||||
} else {
|
||||
let selectionGesture = MediaPickerGridSelectionGesture<TGMediaSelectableItem>()
|
||||
selectionGesture.delegate = self.wrappedGestureRecognizerDelegate
|
||||
selectionGesture.began = { [weak self] in
|
||||
self?.controller?.cancelPanGesture()
|
||||
}
|
||||
selectionGesture.updateIsScrollEnabled = { [weak self] isEnabled in
|
||||
self?.gridNode.scrollView.isScrollEnabled = isEnabled
|
||||
}
|
||||
selectionGesture.itemAt = { [weak self] point in
|
||||
if let self, let itemNode = self.gridNode.itemNodeAtPoint(point) as? MediaPickerGridItemNode, let selectableItem = itemNode.selectableItem {
|
||||
return (selectableItem, self.controller?.interaction?.selectionState?.isIdentifierSelected(selectableItem.uniqueIdentifier) ?? false)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
selectionGesture.updateSelection = { [weak self] asset, selected in
|
||||
if let strongSelf = self {
|
||||
strongSelf.controller?.interaction?.selectionState?.setItem(asset, selected: selected, animated: true, sender: nil)
|
||||
}
|
||||
}
|
||||
selectionGesture.sideInset = 44.0
|
||||
self.gridNode.view.addGestureRecognizer(selectionGesture)
|
||||
self.selectionGesture = selectionGesture
|
||||
self.setupSelectionGesture()
|
||||
}
|
||||
|
||||
if let controller = self.controller, case let .assets(collection, _) = controller.subject, collection != nil {
|
||||
@ -713,6 +691,35 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att
|
||||
}
|
||||
}
|
||||
|
||||
func setupSelectionGesture() {
|
||||
guard self.selectionGesture == nil else {
|
||||
return
|
||||
}
|
||||
let selectionGesture = MediaPickerGridSelectionGesture<TGMediaSelectableItem>()
|
||||
selectionGesture.delegate = self.wrappedGestureRecognizerDelegate
|
||||
selectionGesture.began = { [weak self] in
|
||||
self?.controller?.cancelPanGesture()
|
||||
}
|
||||
selectionGesture.updateIsScrollEnabled = { [weak self] isEnabled in
|
||||
self?.gridNode.scrollView.isScrollEnabled = isEnabled
|
||||
}
|
||||
selectionGesture.itemAt = { [weak self] point in
|
||||
if let self, let itemNode = self.gridNode.itemNodeAtPoint(point) as? MediaPickerGridItemNode, let selectableItem = itemNode.selectableItem {
|
||||
return (selectableItem, self.controller?.interaction?.selectionState?.isIdentifierSelected(selectableItem.uniqueIdentifier) ?? false)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
selectionGesture.updateSelection = { [weak self] asset, selected in
|
||||
if let strongSelf = self {
|
||||
strongSelf.controller?.interaction?.selectionState?.setItem(asset, selected: selected, animated: true, sender: nil)
|
||||
}
|
||||
}
|
||||
selectionGesture.sideInset = 44.0
|
||||
self.gridNode.view.addGestureRecognizer(selectionGesture)
|
||||
self.selectionGesture = selectionGesture
|
||||
}
|
||||
|
||||
@objc private func cameraTapped() {
|
||||
guard let camera = self.modernCamera, let previewView = self.modernCameraView else {
|
||||
return
|
||||
@ -2352,9 +2359,6 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att
|
||||
let transition = ContainedViewLayoutTransition.animated(duration: 0.25, curve: .easeInOut)
|
||||
var moreIsVisible = false
|
||||
if case let .assets(_, mode) = self.subject, [.story, .createSticker].contains(mode) {
|
||||
if count == 1 {
|
||||
self.requestAttachmentMenuExpansion()
|
||||
}
|
||||
moreIsVisible = true
|
||||
} else if case let .media(media) = self.subject {
|
||||
self.titleView.title = media.count == 1 ? self.presentationData.strings.Attachment_Pasteboard : self.presentationData.strings.Attachment_SelectedMedia(count)
|
||||
@ -2618,6 +2622,8 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att
|
||||
|
||||
self.navigationItem.setRightBarButton(nil, animated: true)
|
||||
self.explicitMultipleSelection = true
|
||||
self.controllerNode.setupSelectionGesture()
|
||||
self.requestAttachmentMenuExpansion()
|
||||
|
||||
if let state = self.controllerNode.state {
|
||||
self.controllerNode.updateState(state)
|
||||
|
@ -33,101 +33,6 @@ private final class ChunkMediaPlayerExternalSourceImpl: ChunkMediaPlayerSourceIm
|
||||
}
|
||||
|
||||
public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
public final class AudioContext {
|
||||
fileprivate let audioSessionManager: ManagedAudioSession
|
||||
private var audioSessionDisposable: Disposable?
|
||||
private(set) var hasAudioSession: Bool = false
|
||||
private(set) var isAmbientMode: Bool = false
|
||||
private(set) var isInitialized: Bool = false
|
||||
|
||||
private var updatedListeners = Bag<() -> Void>()
|
||||
|
||||
public init(
|
||||
audioSessionManager: ManagedAudioSession
|
||||
) {
|
||||
self.audioSessionManager = audioSessionManager
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.audioSessionDisposable?.dispose()
|
||||
}
|
||||
|
||||
func onUpdated(_ f: @escaping () -> Void) -> Disposable {
|
||||
let index = self.updatedListeners.add(f)
|
||||
return ActionDisposable { [weak self] in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updatedListeners.remove(index)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setIsAmbient(isAmbient: Bool) {
|
||||
self.hasAudioSession = false
|
||||
|
||||
for f in self.updatedListeners.copyItems() {
|
||||
f()
|
||||
}
|
||||
|
||||
self.audioSessionDisposable?.dispose()
|
||||
self.audioSessionDisposable = nil
|
||||
}
|
||||
|
||||
func update(type: ManagedAudioSessionType?) {
|
||||
if let type {
|
||||
if self.audioSessionDisposable == nil {
|
||||
self.isInitialized = true
|
||||
|
||||
self.audioSessionDisposable = self.audioSessionManager.push(params: ManagedAudioSessionClientParams(
|
||||
audioSessionType: type,
|
||||
activateImmediately: false,
|
||||
manualActivate: { [weak self] control in
|
||||
control.setupAndActivate(synchronous: false, { state in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.hasAudioSession = true
|
||||
for f in self.updatedListeners.copyItems() {
|
||||
f()
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
deactivate: { [weak self] _ in
|
||||
return Signal { subscriber in
|
||||
guard let self else {
|
||||
subscriber.putCompletion()
|
||||
return EmptyDisposable
|
||||
}
|
||||
|
||||
self.hasAudioSession = false
|
||||
for f in self.updatedListeners.copyItems() {
|
||||
f()
|
||||
}
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
|> runOn(.mainQueue())
|
||||
},
|
||||
headsetConnectionStatusChanged: { _ in },
|
||||
availableOutputsChanged: { _, _ in }
|
||||
))
|
||||
}
|
||||
} else {
|
||||
if let audioSessionDisposable = self.audioSessionDisposable {
|
||||
self.audioSessionDisposable = nil
|
||||
audioSessionDisposable.dispose()
|
||||
}
|
||||
|
||||
self.hasAudioSession = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public enum SourceDescription {
|
||||
public final class ResourceDescription {
|
||||
public let postbox: Postbox
|
||||
@ -261,10 +166,10 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
private let dataQueue: Queue
|
||||
|
||||
private let mediaDataReaderParams: MediaDataReaderParams
|
||||
private let audioSessionManager: ManagedAudioSession
|
||||
private let onSeeked: (() -> Void)?
|
||||
private weak var playerNode: MediaPlayerNode?
|
||||
|
||||
private let audioContext: AudioContext
|
||||
private let renderSynchronizer: AVSampleBufferRenderSynchronizer
|
||||
private var videoRenderer: AVSampleBufferDisplayLayer
|
||||
private var audioRenderer: AVSampleBufferAudioRenderer?
|
||||
@ -293,20 +198,13 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
}
|
||||
|
||||
public var actionAtEnd: MediaPlayerActionAtEnd = .stop
|
||||
public weak var migrateToNextPlayerOnEnd: ChunkMediaPlayerV2? {
|
||||
didSet {
|
||||
if self.migrateToNextPlayerOnEnd !== oldValue {
|
||||
self.updateInternalState()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private var didSeekOnce: Bool = false
|
||||
private var isPlaying: Bool = false
|
||||
private var baseRate: Double = 1.0
|
||||
private var isSoundEnabled: Bool
|
||||
private var isMuted: Bool
|
||||
private var initialIsAmbient: Bool
|
||||
private var isAmbientMode: Bool
|
||||
|
||||
private var seekId: Int = 0
|
||||
private var seekTimestamp: Double = 0.0
|
||||
@ -325,11 +223,12 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
private var partsStateDisposable: Disposable?
|
||||
private var updateTimer: Foundation.Timer?
|
||||
|
||||
private var audioContextUpdatedDisposable: Disposable?
|
||||
private var audioSessionDisposable: Disposable?
|
||||
private var hasAudioSession: Bool = false
|
||||
|
||||
public init(
|
||||
params: MediaDataReaderParams,
|
||||
audioContext: AudioContext,
|
||||
audioSessionManager: ManagedAudioSession,
|
||||
source: SourceDescription,
|
||||
video: Bool,
|
||||
playAutomatically: Bool = false,
|
||||
@ -348,7 +247,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
self.dataQueue = ChunkMediaPlayerV2.sharedDataQueue
|
||||
|
||||
self.mediaDataReaderParams = params
|
||||
self.audioContext = audioContext
|
||||
self.audioSessionManager = audioSessionManager
|
||||
self.onSeeked = onSeeked
|
||||
self.playerNode = playerNode
|
||||
|
||||
@ -358,7 +257,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
|
||||
self.isSoundEnabled = enableSound
|
||||
self.isMuted = soundMuted
|
||||
self.initialIsAmbient = ambient
|
||||
self.isAmbientMode = ambient
|
||||
self.baseRate = baseRate
|
||||
|
||||
self.renderSynchronizer = AVSampleBufferRenderSynchronizer()
|
||||
@ -397,19 +296,12 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
} else {
|
||||
self.renderSynchronizer.addRenderer(self.videoRenderer)
|
||||
}
|
||||
|
||||
self.audioContextUpdatedDisposable = self.audioContext.onUpdated({ [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updateInternalState()
|
||||
})
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.partsStateDisposable?.dispose()
|
||||
self.updateTimer?.invalidate()
|
||||
self.audioContextUpdatedDisposable?.dispose()
|
||||
self.audioSessionDisposable?.dispose()
|
||||
|
||||
if #available(iOS 17.0, *) {
|
||||
self.videoRenderer.sampleBufferRenderer.stopRequestingMediaData()
|
||||
@ -429,19 +321,51 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
}
|
||||
|
||||
private func updateInternalState() {
|
||||
var audioSessionType: ManagedAudioSessionType?
|
||||
if self.isSoundEnabled && self.hasSound {
|
||||
let isAmbient: Bool
|
||||
if self.audioContext.isInitialized {
|
||||
isAmbient = self.audioContext.isAmbientMode
|
||||
} else {
|
||||
isAmbient = self.initialIsAmbient
|
||||
if self.audioSessionDisposable == nil {
|
||||
self.audioSessionDisposable = self.audioSessionManager.push(params: ManagedAudioSessionClientParams(
|
||||
audioSessionType: self.isAmbientMode ? .ambient : .play(mixWithOthers: false),
|
||||
activateImmediately: false,
|
||||
manualActivate: { [weak self] control in
|
||||
control.setupAndActivate(synchronous: false, { state in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
audioSessionType = isAmbient ? .ambient : .play(mixWithOthers: false)
|
||||
self.hasAudioSession = true
|
||||
self.updateInternalState()
|
||||
}
|
||||
})
|
||||
},
|
||||
deactivate: { [weak self] _ in
|
||||
return Signal { subscriber in
|
||||
guard let self else {
|
||||
subscriber.putCompletion()
|
||||
return EmptyDisposable
|
||||
}
|
||||
self.audioContext.update(type: audioSessionType)
|
||||
|
||||
if self.isSoundEnabled && self.hasSound && self.audioContext.hasAudioSession {
|
||||
self.hasAudioSession = false
|
||||
self.updateInternalState()
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
|> runOn(.mainQueue())
|
||||
},
|
||||
headsetConnectionStatusChanged: { _ in },
|
||||
availableOutputsChanged: { _, _ in }
|
||||
))
|
||||
}
|
||||
} else {
|
||||
if let audioSessionDisposable = self.audioSessionDisposable {
|
||||
self.audioSessionDisposable = nil
|
||||
audioSessionDisposable.dispose()
|
||||
}
|
||||
|
||||
self.hasAudioSession = false
|
||||
}
|
||||
|
||||
if self.isSoundEnabled && self.hasSound && self.hasAudioSession {
|
||||
if self.audioRenderer == nil {
|
||||
let audioRenderer = AVSampleBufferAudioRenderer()
|
||||
audioRenderer.isMuted = self.isMuted
|
||||
@ -875,9 +799,13 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
}
|
||||
|
||||
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
if self.audioContext.isAmbientMode != isAmbient {
|
||||
self.initialIsAmbient = isAmbient
|
||||
self.audioContext.setIsAmbient(isAmbient: isAmbient)
|
||||
if self.isAmbientMode != isAmbient {
|
||||
self.isAmbientMode = isAmbient
|
||||
|
||||
self.hasAudioSession = false
|
||||
self.updateInternalState()
|
||||
self.audioSessionDisposable?.dispose()
|
||||
self.audioSessionDisposable = nil
|
||||
|
||||
let currentTimestamp: CMTime
|
||||
if let pendingSeekTimestamp = self.pendingSeekTimestamp {
|
||||
|
@ -53,6 +53,19 @@ private func chatInputStateString(attributedString: NSAttributedString) -> NSAtt
|
||||
}
|
||||
if let value = attributes[.font], let font = value as? UIFont {
|
||||
let fontName = font.fontName.lowercased()
|
||||
if fontName.hasPrefix(".sfui") {
|
||||
let traits = font.fontDescriptor.symbolicTraits
|
||||
if traits.contains(.traitMonoSpace) {
|
||||
string.addAttribute(ChatTextInputAttributes.monospace, value: true as NSNumber, range: range)
|
||||
} else {
|
||||
if traits.contains(.traitBold) {
|
||||
string.addAttribute(ChatTextInputAttributes.bold, value: true as NSNumber, range: range)
|
||||
}
|
||||
if traits.contains(.traitItalic) {
|
||||
string.addAttribute(ChatTextInputAttributes.italic, value: true as NSNumber, range: range)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if fontName.contains("bolditalic") {
|
||||
string.addAttribute(ChatTextInputAttributes.bold, value: true as NSNumber, range: range)
|
||||
string.addAttribute(ChatTextInputAttributes.italic, value: true as NSNumber, range: range)
|
||||
@ -64,6 +77,7 @@ private func chatInputStateString(attributedString: NSAttributedString) -> NSAtt
|
||||
string.addAttribute(ChatTextInputAttributes.monospace, value: true as NSNumber, range: range)
|
||||
}
|
||||
}
|
||||
}
|
||||
if let value = attributes[.backgroundColor] as? UIColor, value.rgb == UIColor.gray.rgb {
|
||||
string.addAttribute(ChatTextInputAttributes.spoiler, value: true as NSNumber, range: range)
|
||||
}
|
||||
|
@ -1108,6 +1108,10 @@ private final class SheetContent: CombinedComponent {
|
||||
func layoutLevel(_ level: Int32) {
|
||||
var perks: [LevelSectionComponent.Perk] = []
|
||||
|
||||
if !isGroup && level >= requiredBoostSubjectLevel(subject: .autoTranslate, group: isGroup, context: component.context, configuration: premiumConfiguration) {
|
||||
perks.append(.autoTranslate)
|
||||
}
|
||||
|
||||
perks.append(.story(level))
|
||||
|
||||
if !isGroup {
|
||||
@ -1171,12 +1175,6 @@ private final class SheetContent: CombinedComponent {
|
||||
if !isGroup && level >= requiredBoostSubjectLevel(subject: .noAds, group: isGroup, context: component.context, configuration: premiumConfiguration) {
|
||||
perks.append(.noAds)
|
||||
}
|
||||
if !isGroup && level >= requiredBoostSubjectLevel(subject: .autoTranslate, group: isGroup, context: component.context, configuration: premiumConfiguration) {
|
||||
perks.append(.autoTranslate)
|
||||
}
|
||||
// if !isGroup && level >= requiredBoostSubjectLevel(subject: .wearGift, group: isGroup, context: component.context, configuration: premiumConfiguration) {
|
||||
// perks.append(.wearGift)
|
||||
// }
|
||||
|
||||
levelItems.append(
|
||||
AnyComponentWithIdentity(
|
||||
|
@ -167,7 +167,7 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
self.conferenceAddParticipant?()
|
||||
}
|
||||
|
||||
var enableVideoSharpening = true
|
||||
var enableVideoSharpening = false
|
||||
if let data = call.context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_call_video_sharpening"] as? Double {
|
||||
enableVideoSharpening = value != 0.0
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ final class LivestreamVideoViewV1: UIView {
|
||||
var onSeeked: (() -> Void)?
|
||||
self.player = ChunkMediaPlayerV2(
|
||||
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
|
||||
audioContext: ChunkMediaPlayerV2.AudioContext(audioSessionManager: audioSessionManager),
|
||||
audioSessionManager: audioSessionManager,
|
||||
source: .externalParts(self.chunkPlayerPartsState.get()),
|
||||
video: true,
|
||||
enableSound: true,
|
||||
|
@ -1247,7 +1247,7 @@ final class VideoChatScreenComponent: Component {
|
||||
}
|
||||
self.callState = component.initialData.callState
|
||||
|
||||
self.enableVideoSharpening = true
|
||||
self.enableVideoSharpening = false
|
||||
if let data = component.initialCall.accountContext.currentAppConfiguration.with({ $0 }).data, let value = data["ios_call_video_sharpening"] as? Double {
|
||||
self.enableVideoSharpening = value != 0.0
|
||||
}
|
||||
|
@ -103,6 +103,10 @@ private final class FetchImpl {
|
||||
init(range: Range<Int64>) {
|
||||
self.range = range
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.disposable?.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
private final class HashRangeData {
|
||||
|
@ -202,6 +202,16 @@ func managedPromoInfoUpdates(accountPeerId: PeerId, postbox: Postbox, network: N
|
||||
switch data {
|
||||
case .promoDataEmpty:
|
||||
transaction.replaceAdditionalChatListItems([])
|
||||
|
||||
let suggestionInfo = ServerSuggestionInfo(
|
||||
legacyItems: [],
|
||||
items: [],
|
||||
dismissedIds: []
|
||||
)
|
||||
|
||||
transaction.updatePreferencesEntry(key: PreferencesKeys.serverSuggestionInfo(), { _ in
|
||||
return PreferencesEntry(suggestionInfo)
|
||||
})
|
||||
case let .promoData(flags, expires, peer, psaType, psaMessage, pendingSuggestions, dismissedSuggestions, customPendingSuggestion, chats, users):
|
||||
let _ = expires
|
||||
|
||||
|
@ -70,6 +70,7 @@ swift_library(
|
||||
"//submodules/MediaPlayer:UniversalMediaPlayer",
|
||||
"//submodules/TelegramVoip:TelegramVoip",
|
||||
"//submodules/DeviceAccess:DeviceAccess",
|
||||
"//submodules/Utils/DeviceModel",
|
||||
"//submodules/WatchCommon/Host:WatchCommon",
|
||||
"//submodules/BuildConfig:BuildConfig",
|
||||
"//submodules/BuildConfigExtra:BuildConfigExtra",
|
||||
|
@ -2553,6 +2553,8 @@ public class CameraScreenImpl: ViewController, CameraScreen {
|
||||
transitionCircleLayer.animateScale(from: sourceLocalFrame.width / 320.0, to: 6.0, duration: 0.6, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { _ in
|
||||
self.view.mask = nil
|
||||
colorFillView.removeFromSuperview()
|
||||
|
||||
self.requestUpdateLayout(hasAppeared: true, transition: .immediate)
|
||||
})
|
||||
} else {
|
||||
if case .story = controller.mode {
|
||||
|
@ -469,13 +469,7 @@ public final class ChatChannelSubscriberInputPanelNode: ChatInputPanelNode {
|
||||
|
||||
self.giftButton.isHidden = false
|
||||
self.helpButton.isHidden = true
|
||||
//TODO:release
|
||||
self.suggestedPostButton.isHidden = false
|
||||
self.presentGiftOrSuggestTooltip()
|
||||
} else if case .broadcast = peer.info {
|
||||
self.giftButton.isHidden = true
|
||||
self.helpButton.isHidden = true
|
||||
self.suggestedPostButton.isHidden = false
|
||||
self.suggestedPostButton.isHidden = true
|
||||
self.presentGiftOrSuggestTooltip()
|
||||
} else if peer.flags.contains(.isGigagroup), self.action == .muteNotifications || self.action == .unmuteNotifications {
|
||||
self.giftButton.isHidden = true
|
||||
|
@ -295,10 +295,6 @@ public func canAddMessageReactions(message: Message) -> Bool {
|
||||
return true
|
||||
}
|
||||
}
|
||||
} else if let story = media as? TelegramMediaStory {
|
||||
if story.isMention {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
@ -559,7 +559,7 @@ public final class GiftItemComponent: Component {
|
||||
let price: String
|
||||
switch component.subject {
|
||||
case let .premium(_, priceValue), let .starGift(_, priceValue):
|
||||
if priceValue.containsEmoji {
|
||||
if priceValue.contains("#") {
|
||||
buttonColor = component.theme.overallDarkAppearance ? UIColor(rgb: 0xffc337) : UIColor(rgb: 0xd3720a)
|
||||
if !component.isSoldOut {
|
||||
starsColor = UIColor(rgb: 0xffbe27)
|
||||
@ -868,9 +868,11 @@ public final class GiftItemComponent: Component {
|
||||
)
|
||||
let dateTimeFormat = component.context.sharedContext.currentPresentationData.with { $0 }.dateTimeFormat
|
||||
let labelText = NSMutableAttributedString(attributedString: parseMarkdownIntoAttributedString("# \(presentationStringsFormattedNumber(Int32(resellPrice), dateTimeFormat.groupingSeparator))", attributes: attributes))
|
||||
if let range = labelText.string.range(of: "#") {
|
||||
labelText.addAttribute(NSAttributedString.Key.font, value: Font.semibold(10.0), range: NSRange(range, in: labelText.string))
|
||||
labelText.addAttribute(ChatTextInputAttributes.customEmoji, value: ChatTextInputTextCustomEmojiAttribute(interactivelySelectedFromPackId: nil, fileId: 0, file: nil, custom: .stars(tinted: true)), range: NSRange(range, in: labelText.string))
|
||||
let range = (labelText.string as NSString).range(of: "#")
|
||||
if range.location != NSNotFound {
|
||||
labelText.addAttribute(NSAttributedString.Key.font, value: Font.semibold(10.0), range: range)
|
||||
labelText.addAttribute(ChatTextInputAttributes.customEmoji, value: ChatTextInputTextCustomEmojiAttribute(interactivelySelectedFromPackId: nil, fileId: 0, file: nil, custom: .stars(tinted: true)), range: range)
|
||||
labelText.addAttribute(.kern, value: -1.5, range: NSRange(location: range.upperBound, length: 1))
|
||||
}
|
||||
|
||||
let resellSize = self.reselLabel.update(
|
||||
@ -1048,11 +1050,13 @@ private final class ButtonContentComponent: Component {
|
||||
self.componentState = state
|
||||
|
||||
let attributedText = NSMutableAttributedString(string: component.text, font: Font.semibold(11.0), textColor: component.color)
|
||||
let range = (attributedText.string as NSString).range(of: "⭐️")
|
||||
let range = (attributedText.string as NSString).range(of: "#")
|
||||
if range.location != NSNotFound {
|
||||
attributedText.addAttribute(ChatTextInputAttributes.customEmoji, value: ChatTextInputTextCustomEmojiAttribute(interactivelySelectedFromPackId: nil, fileId: 0, file: nil, custom: .stars(tinted: component.tinted)), range: range)
|
||||
attributedText.addAttribute(.font, value: Font.semibold(15.0), range: range)
|
||||
attributedText.addAttribute(.baselineOffset, value: 2.0, range: NSRange(location: range.upperBound, length: attributedText.length - range.upperBound))
|
||||
attributedText.addAttribute(.font, value: Font.semibold(component.tinted ? 14.0 : 15.0), range: range)
|
||||
attributedText.addAttribute(.baselineOffset, value: -3.0, range: range)
|
||||
attributedText.addAttribute(.baselineOffset, value: 1.5, range: NSRange(location: range.upperBound + 1, length: attributedText.length - range.upperBound - 1))
|
||||
attributedText.addAttribute(.kern, value: -1.5, range: NSRange(location: range.upperBound, length: 1))
|
||||
}
|
||||
|
||||
let titleSize = self.title.update(
|
||||
|
@ -412,12 +412,12 @@ final class GiftOptionsScreenComponent: Component {
|
||||
if let availability = gift.availability, availability.remains == 0, let minResaleStars = availability.minResaleStars {
|
||||
let priceString = presentationStringsFormattedNumber(Int32(minResaleStars), environment.dateTimeFormat.groupingSeparator)
|
||||
if let resaleConfiguration = self.resaleConfiguration, minResaleStars == resaleConfiguration.starGiftResaleMaxAmount || availability.resale == 1 {
|
||||
subject = .starGift(gift: gift, price: "⭐️ \(priceString)")
|
||||
subject = .starGift(gift: gift, price: "# \(priceString)")
|
||||
} else {
|
||||
subject = .starGift(gift: gift, price: "⭐️ \(priceString)+")
|
||||
subject = .starGift(gift: gift, price: "# \(priceString)+")
|
||||
}
|
||||
} else {
|
||||
subject = .starGift(gift: gift, price: "⭐️ \(presentationStringsFormattedNumber(Int32(gift.price), environment.dateTimeFormat.groupingSeparator))")
|
||||
subject = .starGift(gift: gift, price: "# \(presentationStringsFormattedNumber(Int32(gift.price), environment.dateTimeFormat.groupingSeparator))")
|
||||
}
|
||||
case let .unique(gift):
|
||||
subject = .uniqueGift(gift: gift, price: nil)
|
||||
@ -1567,6 +1567,9 @@ final class GiftOptionsScreenComponent: Component {
|
||||
}
|
||||
}
|
||||
}
|
||||
if disallowedGifts.contains(.unique) && gift.availability?.remains == 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
@ -95,7 +95,8 @@ final class GiftStoreScreenComponent: Component {
|
||||
|
||||
private var starsStateDisposable: Disposable?
|
||||
private var starsState: StarsContext.State?
|
||||
private var initialCount: Int?
|
||||
private var initialCount: Int32?
|
||||
private var showLoading = true
|
||||
|
||||
private var component: GiftStoreScreenComponent?
|
||||
private(set) weak var state: State?
|
||||
@ -230,7 +231,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
color: ribbonColor
|
||||
)
|
||||
|
||||
let subject: GiftItemComponent.Subject = .uniqueGift(gift: uniqueGift, price: "⭐️\(presentationStringsFormattedNumber(Int32(uniqueGift.resellStars ?? 0), environment.dateTimeFormat.groupingSeparator))")
|
||||
let subject: GiftItemComponent.Subject = .uniqueGift(gift: uniqueGift, price: "# \(presentationStringsFormattedNumber(Int32(uniqueGift.resellStars ?? 0), environment.dateTimeFormat.groupingSeparator))")
|
||||
let _ = visibleItem.update(
|
||||
transition: itemTransition,
|
||||
component: AnyComponent(
|
||||
@ -338,7 +339,9 @@ final class GiftStoreScreenComponent: Component {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateFilterAttributes([])
|
||||
self.scrollToTop()
|
||||
},
|
||||
animateScale: false
|
||||
)
|
||||
@ -357,7 +360,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
var emptyResultsActionFrame = CGRect(
|
||||
origin: CGPoint(
|
||||
x: floorToScreenPixels((availableWidth - emptyResultsActionSize.width) / 2.0),
|
||||
y: max(self.scrollView.contentSize.height - 8.0, availableHeight - bottomInset - emptyResultsActionSize.height - 16.0)
|
||||
y: max(self.scrollView.contentSize.height - 70.0, availableHeight - bottomInset - emptyResultsActionSize.height - 16.0)
|
||||
),
|
||||
size: emptyResultsActionSize
|
||||
)
|
||||
@ -435,7 +438,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
if view.superview == nil {
|
||||
view.alpha = 0.0
|
||||
fadeTransition.setAlpha(view: view, alpha: 1.0)
|
||||
self.insertSubview(view, belowSubview: self.loadingNode.view)
|
||||
self.scrollView.addSubview(view)
|
||||
}
|
||||
view.bounds = CGRect(origin: .zero, size: emptyResultsActionFrame.size)
|
||||
ComponentTransition.immediate.setPosition(view: view, position: emptyResultsActionFrame.center)
|
||||
@ -451,7 +454,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
}
|
||||
|
||||
let bottomContentOffset = max(0.0, self.scrollView.contentSize.height - self.scrollView.contentOffset.y - self.scrollView.frame.height)
|
||||
if interactive, bottomContentOffset < 320.0 {
|
||||
if interactive, bottomContentOffset < 1000.0 {
|
||||
self.state?.starGiftsContext.loadMore()
|
||||
}
|
||||
}
|
||||
@ -471,6 +474,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateSorting(.value)
|
||||
self.scrollToTop()
|
||||
})))
|
||||
@ -481,6 +485,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateSorting(.date)
|
||||
self.scrollToTop()
|
||||
})))
|
||||
@ -491,6 +496,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateSorting(.number)
|
||||
self.scrollToTop()
|
||||
})))
|
||||
@ -514,7 +520,13 @@ final class GiftStoreScreenComponent: Component {
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}.sorted(by: { lhs, rhs in
|
||||
if case let .model(_, lhsFile, _) = lhs, case let .model(_, rhsFile, _) = rhs, let lhsCount = self.state?.starGiftsState?.attributeCount[.model(lhsFile.fileId.id)], let rhsCount = self.state?.starGiftsState?.attributeCount[.model(rhsFile.fileId.id)] {
|
||||
return lhsCount > rhsCount
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
let currentFilterAttributes = self.state?.starGiftsState?.filterAttributes ?? []
|
||||
let selectedModelAttributes = currentFilterAttributes.filter { attribute in
|
||||
@ -564,6 +576,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
updatedFilterAttributes.append(attribute)
|
||||
}
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateFilterAttributes(updatedFilterAttributes)
|
||||
self.scrollToTop()
|
||||
},
|
||||
@ -577,6 +590,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
}
|
||||
return true
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateFilterAttributes(updatedFilterAttributes)
|
||||
self.scrollToTop()
|
||||
}
|
||||
@ -607,7 +621,13 @@ final class GiftStoreScreenComponent: Component {
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}.sorted(by: { lhs, rhs in
|
||||
if case let .backdrop(_, lhsId, _, _, _, _, _) = lhs, case let .backdrop(_, rhsId, _, _, _, _, _) = rhs, let lhsCount = self.state?.starGiftsState?.attributeCount[.backdrop(lhsId)], let rhsCount = self.state?.starGiftsState?.attributeCount[.backdrop(rhsId)] {
|
||||
return lhsCount > rhsCount
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
let currentFilterAttributes = self.state?.starGiftsState?.filterAttributes ?? []
|
||||
let selectedBackdropAttributes = currentFilterAttributes.filter { attribute in
|
||||
@ -657,6 +677,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
updatedFilterAttributes.append(attribute)
|
||||
}
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateFilterAttributes(updatedFilterAttributes)
|
||||
self.scrollToTop()
|
||||
},
|
||||
@ -670,6 +691,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
}
|
||||
return true
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateFilterAttributes(updatedFilterAttributes)
|
||||
self.scrollToTop()
|
||||
}
|
||||
@ -700,7 +722,13 @@ final class GiftStoreScreenComponent: Component {
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}.sorted(by: { lhs, rhs in
|
||||
if case let .pattern(_, lhsFile, _) = lhs, case let .pattern(_, rhsFile, _) = rhs, let lhsCount = self.state?.starGiftsState?.attributeCount[.pattern(lhsFile.fileId.id)], let rhsCount = self.state?.starGiftsState?.attributeCount[.pattern(rhsFile.fileId.id)] {
|
||||
return lhsCount > rhsCount
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
let currentFilterAttributes = self.state?.starGiftsState?.filterAttributes ?? []
|
||||
let selectedPatternAttributes = currentFilterAttributes.filter { attribute in
|
||||
@ -750,6 +778,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
updatedFilterAttributes.append(attribute)
|
||||
}
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateFilterAttributes(updatedFilterAttributes)
|
||||
self.scrollToTop()
|
||||
},
|
||||
@ -763,6 +792,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
}
|
||||
return true
|
||||
}
|
||||
self.showLoading = true
|
||||
self.state?.starGiftsContext.updateFilterAttributes(updatedFilterAttributes)
|
||||
self.scrollToTop()
|
||||
}
|
||||
@ -804,6 +834,12 @@ final class GiftStoreScreenComponent: Component {
|
||||
self.component = component
|
||||
|
||||
let isLoading = self.effectiveIsLoading
|
||||
if case let .ready(loadMore, nextOffset) = self.state?.starGiftsState?.dataState {
|
||||
if loadMore && nextOffset == nil {
|
||||
} else {
|
||||
self.showLoading = false
|
||||
}
|
||||
}
|
||||
|
||||
let theme = environment.theme
|
||||
let strings = environment.strings
|
||||
@ -812,7 +848,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
self.backgroundColor = environment.theme.list.blocksBackgroundColor
|
||||
}
|
||||
|
||||
let bottomContentInset: CGFloat = 24.0
|
||||
let bottomContentInset: CGFloat = 56.0
|
||||
let sideInset: CGFloat = 16.0 + environment.safeInsets.left
|
||||
let headerSideInset: CGFloat = 24.0 + environment.safeInsets.left
|
||||
|
||||
@ -927,7 +963,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
}
|
||||
|
||||
let effectiveCount: Int32
|
||||
if let count = self.effectiveGifts?.count, count > 0 || self.initialCount != nil {
|
||||
if let count = self.state?.starGiftsState?.count, count > 0 || self.initialCount != nil {
|
||||
if self.initialCount == nil {
|
||||
self.initialCount = count
|
||||
}
|
||||
@ -1047,6 +1083,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
|
||||
let loadingTransition: ComponentTransition = .easeInOut(duration: 0.25)
|
||||
|
||||
var showingFilters = false
|
||||
let filterSize = self.filterSelector.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(FilterSelectorComponent(
|
||||
@ -1069,6 +1106,7 @@ final class GiftStoreScreenComponent: Component {
|
||||
|
||||
if let initialCount = self.initialCount, initialCount >= minimumCountToDisplayFilters {
|
||||
loadingTransition.setAlpha(view: filterSelectorView, alpha: 1.0)
|
||||
showingFilters = true
|
||||
}
|
||||
}
|
||||
|
||||
@ -1112,8 +1150,8 @@ final class GiftStoreScreenComponent: Component {
|
||||
|
||||
self.updateScrolling(transition: transition)
|
||||
|
||||
if isLoading {
|
||||
self.loadingNode.update(size: availableSize, theme: environment.theme, transition: .immediate)
|
||||
if isLoading && self.showLoading {
|
||||
self.loadingNode.update(size: availableSize, theme: environment.theme, showFilters: !showingFilters, transition: .immediate)
|
||||
loadingTransition.setAlpha(view: self.loadingNode.view, alpha: 1.0)
|
||||
} else {
|
||||
loadingTransition.setAlpha(view: self.loadingNode.view, alpha: 0.0)
|
||||
|
@ -125,7 +125,7 @@ final class LoadingShimmerNode: ASDisplayNode {
|
||||
private let backgroundColorNode: ASDisplayNode
|
||||
private let effectNode: SearchShimmerEffectNode
|
||||
private let maskNode: ASImageNode
|
||||
private var currentParams: (size: CGSize, theme: PresentationTheme)?
|
||||
private var currentParams: (size: CGSize, theme: PresentationTheme, showFilters: Bool)?
|
||||
|
||||
override init() {
|
||||
self.backgroundColorNode = ASDisplayNode()
|
||||
@ -142,11 +142,11 @@ final class LoadingShimmerNode: ASDisplayNode {
|
||||
self.addSubnode(self.maskNode)
|
||||
}
|
||||
|
||||
func update(size: CGSize, theme: PresentationTheme, transition: ContainedViewLayoutTransition) {
|
||||
func update(size: CGSize, theme: PresentationTheme, showFilters: Bool, transition: ContainedViewLayoutTransition) {
|
||||
let color = theme.list.itemSecondaryTextColor.mixedWith(theme.list.blocksBackgroundColor, alpha: 0.85)
|
||||
|
||||
if self.currentParams?.size != size || self.currentParams?.theme !== theme {
|
||||
self.currentParams = (size, theme)
|
||||
if self.currentParams?.size != size || self.currentParams?.theme !== theme || self.currentParams?.showFilters != showFilters {
|
||||
self.currentParams = (size, theme, showFilters)
|
||||
|
||||
self.backgroundColorNode.backgroundColor = color
|
||||
|
||||
@ -156,11 +156,13 @@ final class LoadingShimmerNode: ASDisplayNode {
|
||||
|
||||
let sideInset: CGFloat = 16.0
|
||||
|
||||
if showFilters {
|
||||
let filterSpacing: CGFloat = 6.0
|
||||
let filterWidth = (size.width - sideInset * 2.0 - filterSpacing * 3.0) / 4.0
|
||||
for i in 0 ..< 4 {
|
||||
context.addPath(CGPath(roundedRect: CGRect(origin: CGPoint(x: sideInset + (filterWidth + filterSpacing) * CGFloat(i), y: 0.0), size: CGSize(width: filterWidth, height: 28.0)), cornerWidth: 14.0, cornerHeight: 14.0, transform: nil))
|
||||
}
|
||||
}
|
||||
|
||||
var currentY: CGFloat = 39.0 + 7.0
|
||||
var rowIndex: Int = 0
|
||||
|
@ -309,6 +309,16 @@ private final class GiftViewSheetContent: CombinedComponent {
|
||||
let context = self.context
|
||||
let action = {
|
||||
if gifts {
|
||||
let profileGifts = ProfileGiftsContext(account: context.account, peerId: peer.id)
|
||||
let _ = (profileGifts.state
|
||||
|> filter { state in
|
||||
if case .ready = state.dataState {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).start(next: { [weak navigationController] _ in
|
||||
if let profileController = context.sharedContext.makePeerInfoController(
|
||||
context: context,
|
||||
updatedPresentationData: nil,
|
||||
@ -318,8 +328,10 @@ private final class GiftViewSheetContent: CombinedComponent {
|
||||
fromChat: false,
|
||||
requestsContext: nil
|
||||
) {
|
||||
navigationController.pushViewController(profileController)
|
||||
navigationController?.pushViewController(profileController)
|
||||
}
|
||||
let _ = profileGifts
|
||||
})
|
||||
} else {
|
||||
context.sharedContext.navigateToChatController(NavigateToChatControllerParams(
|
||||
navigationController: navigationController,
|
||||
@ -946,7 +958,7 @@ private final class GiftViewSheetContent: CombinedComponent {
|
||||
|
||||
let location = CGRect(origin: CGPoint(x: absoluteLocation.x, y: absoluteLocation.y - 12.0), size: CGSize())
|
||||
let tooltipController = TooltipScreen(account: self.context.account, sharedContext: self.context.sharedContext, text: .plain(text: text), style: .wide, location: .point(location, .bottom), displayDuration: .default, inset: 16.0, shouldDismissOnTouch: { _, _ in
|
||||
return .ignore
|
||||
return .dismiss(consume: false)
|
||||
})
|
||||
controller.present(tooltipController, in: .current)
|
||||
}
|
||||
|
@ -230,7 +230,7 @@ public enum MediaCropOrientation: Int32 {
|
||||
}
|
||||
}
|
||||
|
||||
public final class MediaEditorValues: Codable, Equatable {
|
||||
public final class MediaEditorValues: Codable, Equatable, CustomStringConvertible {
|
||||
public static func == (lhs: MediaEditorValues, rhs: MediaEditorValues) -> Bool {
|
||||
if lhs.peerId != rhs.peerId {
|
||||
return false
|
||||
@ -1010,6 +1010,114 @@ public final class MediaEditorValues: Codable, Equatable {
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
public var description: String {
|
||||
var components: [String] = []
|
||||
|
||||
components.append("originalDimensions: \(self.originalDimensions.width)x\(self.originalDimensions.height)")
|
||||
|
||||
if self.cropOffset != .zero {
|
||||
components.append("cropOffset: \(cropOffset)")
|
||||
}
|
||||
|
||||
if let cropRect = self.cropRect {
|
||||
components.append("cropRect: \(cropRect)")
|
||||
}
|
||||
|
||||
if self.cropScale != 1.0 {
|
||||
components.append("cropScale: \(self.cropScale)")
|
||||
}
|
||||
|
||||
if self.cropRotation != 0.0 {
|
||||
components.append("cropRotation: \(self.cropRotation)")
|
||||
}
|
||||
|
||||
if self.cropMirroring {
|
||||
components.append("cropMirroring: true")
|
||||
}
|
||||
|
||||
if let cropOrientation = self.cropOrientation {
|
||||
components.append("cropOrientation: \(cropOrientation)")
|
||||
}
|
||||
|
||||
if let gradientColors = self.gradientColors, !gradientColors.isEmpty {
|
||||
components.append("gradientColors: \(gradientColors.count) colors")
|
||||
}
|
||||
|
||||
if let videoTrimRange = self.videoTrimRange {
|
||||
components.append("videoTrimRange: \(videoTrimRange.lowerBound) - \(videoTrimRange.upperBound)")
|
||||
}
|
||||
|
||||
if self.videoIsMuted {
|
||||
components.append("videoIsMuted: true")
|
||||
}
|
||||
|
||||
if self.videoIsFullHd {
|
||||
components.append("videoIsFullHd: true")
|
||||
}
|
||||
|
||||
if self.videoIsMirrored {
|
||||
components.append("videoIsMirrored: true")
|
||||
}
|
||||
|
||||
if let videoVolume = self.videoVolume, videoVolume != 1.0 {
|
||||
components.append("videoVolume: \(videoVolume)")
|
||||
}
|
||||
|
||||
if let additionalVideoPath = self.additionalVideoPath {
|
||||
components.append("additionalVideo: \(additionalVideoPath)")
|
||||
}
|
||||
|
||||
if let position = self.additionalVideoPosition {
|
||||
components.append("additionalVideoPosition: \(position)")
|
||||
}
|
||||
|
||||
if let scale = self.additionalVideoScale {
|
||||
components.append("additionalVideoScale: \(scale)")
|
||||
}
|
||||
|
||||
if let rotation = self.additionalVideoRotation {
|
||||
components.append("additionalVideoRotation: \(rotation)")
|
||||
}
|
||||
|
||||
if !self.additionalVideoPositionChanges.isEmpty {
|
||||
components.append("additionalVideoPositionChanges: \(additionalVideoPositionChanges.count) changes")
|
||||
}
|
||||
|
||||
if !self.collage.isEmpty {
|
||||
components.append("collage: \(collage.count) items")
|
||||
}
|
||||
|
||||
if self.nightTheme {
|
||||
components.append("nightTheme: true")
|
||||
}
|
||||
|
||||
if self.drawing != nil {
|
||||
components.append("drawing: true")
|
||||
}
|
||||
|
||||
if self.maskDrawing != nil {
|
||||
components.append("maskDrawing: true")
|
||||
}
|
||||
|
||||
if !self.entities.isEmpty {
|
||||
components.append("entities: \(self.entities.count) items")
|
||||
}
|
||||
|
||||
if !self.toolValues.isEmpty {
|
||||
components.append("toolValues: \(self.toolValues.count) tools")
|
||||
}
|
||||
|
||||
if let audioTrack = self.audioTrack {
|
||||
components.append("audioTrack: \(audioTrack.path)")
|
||||
}
|
||||
|
||||
if let qualityPreset = self.qualityPreset {
|
||||
components.append("qualityPreset: \(qualityPreset)")
|
||||
}
|
||||
|
||||
return "MediaEditorValues(\(components.joined(separator: ", ")))"
|
||||
}
|
||||
}
|
||||
|
||||
public struct TintValue: Equatable, Codable {
|
||||
|
@ -264,6 +264,11 @@ public final class MediaEditorVideoExport {
|
||||
self.outputPath = outputPath
|
||||
self.textScale = textScale
|
||||
|
||||
Logger.shared.log("VideoExport", "Init")
|
||||
Logger.shared.log("VideoExport", "Subject: \(subject)")
|
||||
Logger.shared.log("VideoExport", "Output Path: \(outputPath)")
|
||||
Logger.shared.log("VideoExport", "Configuration: \(configuration)")
|
||||
|
||||
if FileManager.default.fileExists(atPath: outputPath) {
|
||||
try? FileManager.default.removeItem(atPath: outputPath)
|
||||
}
|
||||
@ -297,6 +302,9 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
private func setup() {
|
||||
Logger.shared.log("VideoExport", "Setting up")
|
||||
|
||||
|
||||
var mainAsset: AVAsset?
|
||||
|
||||
var signals: [Signal<Input, NoError>] = []
|
||||
@ -948,11 +956,6 @@ public final class MediaEditorVideoExport {
|
||||
return false
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// if !writer.appendVideoBuffer(sampleBuffer) {
|
||||
// writer.markVideoAsFinished()
|
||||
// return false
|
||||
// }
|
||||
}
|
||||
}
|
||||
return true
|
||||
@ -983,17 +986,21 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
private func start() {
|
||||
Logger.shared.log("VideoExport", "Start")
|
||||
guard self.internalStatus == .idle, let writer = self.writer else {
|
||||
Logger.shared.log("VideoExport", "Failed with invalid state")
|
||||
self.statusValue = .failed(.invalid)
|
||||
return
|
||||
}
|
||||
|
||||
guard writer.startWriting() else {
|
||||
Logger.shared.log("VideoExport", "Failed on startWriting")
|
||||
self.statusValue = .failed(.writing(nil))
|
||||
return
|
||||
}
|
||||
|
||||
if let reader = self.reader, !reader.startReading() {
|
||||
Logger.shared.log("VideoExport", "Failed on startReading")
|
||||
self.statusValue = .failed(.reading(nil))
|
||||
return
|
||||
}
|
||||
@ -1067,6 +1074,7 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
if cancelled {
|
||||
Logger.shared.log("VideoExport", "Cancelled")
|
||||
try? FileManager.default.removeItem(at: outputUrl)
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.cancelled)
|
||||
@ -1108,6 +1116,7 @@ public final class MediaEditorVideoExport {
|
||||
let exportDuration = end - self.startTimestamp
|
||||
print("video processing took \(exportDuration)s")
|
||||
if duration.seconds > 0 {
|
||||
Logger.shared.log("VideoExport", "Completed with path \(self.outputPath)")
|
||||
Logger.shared.log("VideoExport", "Video processing took \(exportDuration / duration.seconds)")
|
||||
}
|
||||
})
|
||||
|
@ -184,7 +184,9 @@ public final class PeerInfoGiftsCoverComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
private var scheduledAnimateIn = false
|
||||
public func willAnimateIn() {
|
||||
self.scheduledAnimateIn = true
|
||||
for (_, layer) in self.iconLayers {
|
||||
layer.opacity = 0.0
|
||||
}
|
||||
@ -194,6 +196,7 @@ public final class PeerInfoGiftsCoverComponent: Component {
|
||||
guard let _ = self.currentSize, let component = self.component else {
|
||||
return
|
||||
}
|
||||
self.scheduledAnimateIn = false
|
||||
|
||||
for (_, layer) in self.iconLayers {
|
||||
layer.opacity = 1.0
|
||||
@ -319,8 +322,12 @@ public final class PeerInfoGiftsCoverComponent: Component {
|
||||
self.iconLayers[id] = iconLayer
|
||||
self.layer.addSublayer(iconLayer)
|
||||
|
||||
if self.scheduledAnimateIn {
|
||||
iconLayer.opacity = 0.0
|
||||
} else {
|
||||
iconLayer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
iconLayer.animateScale(from: 0.01, to: 1.0, duration: 0.2)
|
||||
}
|
||||
|
||||
iconLayer.startAnimations(index: index)
|
||||
}
|
||||
@ -349,7 +356,10 @@ public final class PeerInfoGiftsCoverComponent: Component {
|
||||
iconTransition.setPosition(layer: iconLayer, position: absolutePosition)
|
||||
iconLayer.updateRotation(effectiveAngle, transition: iconTransition)
|
||||
iconTransition.setScale(layer: iconLayer, scale: iconPosition.scale * (1.0 - itemScaleFraction))
|
||||
|
||||
if !self.scheduledAnimateIn {
|
||||
iconTransition.setAlpha(layer: iconLayer, alpha: 1.0 - itemScaleFraction)
|
||||
}
|
||||
|
||||
index += 1
|
||||
}
|
||||
|
@ -2216,9 +2216,9 @@ private func editingItems(data: PeerInfoScreenData?, boostStatus: ChannelBoostSt
|
||||
}))
|
||||
|
||||
//TODO:localize
|
||||
items[.peerSettings]!.append(PeerInfoScreenDisclosureItem(id: ItemPostSuggestionsSettings, label: .text("Off"), additionalBadgeLabel: presentationData.strings.Settings_New, text: "Post Suggestions", icon: UIImage(bundleImageName: "Chat/Info/PostSuggestionsIcon"), action: {
|
||||
/*items[.peerSettings]!.append(PeerInfoScreenDisclosureItem(id: ItemPostSuggestionsSettings, label: .text("Off"), additionalBadgeLabel: presentationData.strings.Settings_New, text: "Post Suggestions", icon: UIImage(bundleImageName: "Chat/Info/PostSuggestionsIcon"), action: {
|
||||
interaction.editingOpenPostSuggestionsSetup()
|
||||
}))
|
||||
}))*/
|
||||
}
|
||||
|
||||
if isCreator || (channel.adminRights?.rights.contains(.canChangeInfo) == true) {
|
||||
|
@ -488,7 +488,7 @@ public final class PeerInfoGiftsPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScr
|
||||
|
||||
switch product.gift {
|
||||
case let .generic(gift):
|
||||
subject = .starGift(gift: gift, price: "⭐️ \(gift.price)")
|
||||
subject = .starGift(gift: gift, price: "# \(gift.price)")
|
||||
peer = product.fromPeer.flatMap { .peer($0) } ?? .anonymous
|
||||
|
||||
if let availability = gift.availability {
|
||||
|
@ -279,7 +279,7 @@ private final class SheetContent: CombinedComponent {
|
||||
case .starGiftResell:
|
||||
let amountInfoString: NSAttributedString
|
||||
if let value = state.amount?.value, value > 0 {
|
||||
let starsValue = Int32(floor(Float(value) * Float(resaleConfiguration.paidMessageCommissionPermille) / 1000.0))
|
||||
let starsValue = Int32(floor(Float(value) * Float(resaleConfiguration.starGiftCommissionPermille) / 1000.0))
|
||||
let starsString = environment.strings.Stars_SellGift_AmountInfo_Stars(starsValue)
|
||||
amountInfoString = NSAttributedString(attributedString: parseMarkdownIntoAttributedString(environment.strings.Stars_SellGift_AmountInfo(starsString).string, attributes: amountMarkdownAttributes, textAlignment: .natural))
|
||||
|
||||
@ -288,7 +288,7 @@ private final class SheetContent: CombinedComponent {
|
||||
amountRightLabel = "≈\(formatTonUsdValue(Int64(starsValue), divide: false, rate: usdRate, dateTimeFormat: environment.dateTimeFormat))"
|
||||
}
|
||||
} else {
|
||||
amountInfoString = NSAttributedString(attributedString: parseMarkdownIntoAttributedString(environment.strings.Stars_SellGift_AmountInfo("\(resaleConfiguration.paidMessageCommissionPermille / 10)%").string, attributes: amountMarkdownAttributes, textAlignment: .natural))
|
||||
amountInfoString = NSAttributedString(attributedString: parseMarkdownIntoAttributedString(environment.strings.Stars_SellGift_AmountInfo("\(resaleConfiguration.starGiftCommissionPermille / 10)%").string, attributes: amountMarkdownAttributes, textAlignment: .natural))
|
||||
}
|
||||
amountFooter = AnyComponent(MultilineTextComponent(
|
||||
text: .plain(amountInfoString),
|
||||
|
@ -6,7 +6,6 @@ import SwiftSignalKit
|
||||
import TelegramCore
|
||||
import Postbox
|
||||
import TelegramPresentationData
|
||||
import UniversalMediaPlayer
|
||||
|
||||
public final class StoryContentItem: Equatable {
|
||||
public final class ExternalState {
|
||||
@ -33,7 +32,6 @@ public final class StoryContentItem: Equatable {
|
||||
public final class SharedState {
|
||||
public var replyDrafts: [StoryId: NSAttributedString] = [:]
|
||||
public var baseRate: Double = 1.0
|
||||
public var audioContext: ChunkMediaPlayerV2.AudioContext?
|
||||
|
||||
public init() {
|
||||
}
|
||||
|
@ -15,275 +15,6 @@ import ButtonComponent
|
||||
import MultilineTextComponent
|
||||
import TelegramPresentationData
|
||||
|
||||
private protocol StoryVideoView: UIView {
|
||||
var audioMode: StoryContentItem.AudioMode { get set }
|
||||
var playbackCompleted: (() -> Void)? { get set }
|
||||
var status: Signal<MediaPlayerStatus?, NoError> { get }
|
||||
|
||||
func play()
|
||||
func pause()
|
||||
func seek(timestamp: Double)
|
||||
func setSoundMuted(soundMuted: Bool)
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool)
|
||||
func setBaseRate(baseRate: Double)
|
||||
func update(size: CGSize, transition: ComponentTransition)
|
||||
}
|
||||
|
||||
private final class LegacyStoryVideoView: UIView, StoryVideoView {
|
||||
private let videoNode: UniversalVideoNode
|
||||
|
||||
var audioMode: StoryContentItem.AudioMode
|
||||
var playbackCompleted: (() -> Void)?
|
||||
|
||||
var status: Signal<MediaPlayerStatus?, NoError> {
|
||||
return self.videoNode.status
|
||||
}
|
||||
|
||||
init(
|
||||
context: AccountContext,
|
||||
file: FileMediaReference,
|
||||
audioMode: StoryContentItem.AudioMode,
|
||||
baseRate: Double,
|
||||
isCaptureProtected: Bool
|
||||
) {
|
||||
self.audioMode = audioMode
|
||||
|
||||
var userLocation: MediaResourceUserLocation = .other
|
||||
switch file {
|
||||
case let .story(peer, _, _):
|
||||
userLocation = .peer(peer.id)
|
||||
default:
|
||||
break
|
||||
}
|
||||
var hasSentFramesToDisplay: (() -> Void)?
|
||||
self.videoNode = UniversalVideoNode(
|
||||
context: context,
|
||||
postbox: context.account.postbox,
|
||||
audioSession: context.sharedContext.mediaManager.audioSession,
|
||||
manager: context.sharedContext.mediaManager.universalVideoManager,
|
||||
decoration: StoryVideoDecoration(),
|
||||
content: NativeVideoContent(
|
||||
id: .contextResult(0, "\(UInt64.random(in: 0 ... UInt64.max))"),
|
||||
userLocation: userLocation,
|
||||
fileReference: file,
|
||||
imageReference: nil,
|
||||
streamVideo: .story,
|
||||
loopVideo: true,
|
||||
enableSound: true,
|
||||
soundMuted: audioMode == .off,
|
||||
beginWithAmbientSound: audioMode == .ambient,
|
||||
mixWithOthers: true,
|
||||
useLargeThumbnail: false,
|
||||
autoFetchFullSizeThumbnail: false,
|
||||
tempFilePath: nil,
|
||||
captureProtected: isCaptureProtected,
|
||||
hintDimensions: file.media.dimensions?.cgSize,
|
||||
storeAfterDownload: nil,
|
||||
displayImage: false,
|
||||
hasSentFramesToDisplay: {
|
||||
hasSentFramesToDisplay?()
|
||||
}
|
||||
),
|
||||
priority: .gallery
|
||||
)
|
||||
self.videoNode.isHidden = true
|
||||
self.videoNode.setBaseRate(baseRate)
|
||||
|
||||
super.init(frame: CGRect())
|
||||
|
||||
hasSentFramesToDisplay = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.videoNode.isHidden = false
|
||||
}
|
||||
|
||||
self.videoNode.playbackCompleted = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.playbackCompleted?()
|
||||
}
|
||||
|
||||
self.addSubview(self.videoNode.view)
|
||||
|
||||
self.videoNode.ownsContentNodeUpdated = { [weak self] value in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if value {
|
||||
self.videoNode.seek(0.0)
|
||||
if self.audioMode != .off {
|
||||
self.videoNode.playOnceWithSound(playAndRecord: false, actionAtEnd: .stop)
|
||||
} else {
|
||||
self.videoNode.play()
|
||||
}
|
||||
}
|
||||
}
|
||||
self.videoNode.canAttachContent = true
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func play() {
|
||||
self.videoNode.play()
|
||||
}
|
||||
|
||||
func pause() {
|
||||
self.videoNode.pause()
|
||||
}
|
||||
|
||||
func seek(timestamp: Double) {
|
||||
self.videoNode.seek(timestamp)
|
||||
}
|
||||
|
||||
func setSoundMuted(soundMuted: Bool) {
|
||||
self.videoNode.setSoundMuted(soundMuted: soundMuted)
|
||||
}
|
||||
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
self.videoNode.continueWithOverridingAmbientMode(isAmbient: isAmbient)
|
||||
}
|
||||
|
||||
func setBaseRate(baseRate: Double) {
|
||||
self.videoNode.setBaseRate(baseRate)
|
||||
}
|
||||
|
||||
func update(size: CGSize, transition: ComponentTransition) {
|
||||
transition.setFrame(view: self.videoNode.view, frame: CGRect(origin: CGPoint(), size: size))
|
||||
self.videoNode.updateLayout(size: size, transition: transition.containedViewLayoutTransition)
|
||||
}
|
||||
}
|
||||
|
||||
private final class ModernStoryVideoView: UIView, StoryVideoView {
|
||||
private let player: ChunkMediaPlayerV2
|
||||
private let playerNode: MediaPlayerNode
|
||||
|
||||
var audioMode: StoryContentItem.AudioMode
|
||||
var playbackCompleted: (() -> Void)?
|
||||
var isFirstPlay: Bool = true
|
||||
|
||||
var status: Signal<MediaPlayerStatus?, NoError> {
|
||||
return self.player.status |> map(Optional.init)
|
||||
}
|
||||
|
||||
init(
|
||||
context: AccountContext,
|
||||
audioContext: ChunkMediaPlayerV2.AudioContext,
|
||||
file: FileMediaReference,
|
||||
audioMode: StoryContentItem.AudioMode,
|
||||
baseRate: Double,
|
||||
isCaptureProtected: Bool
|
||||
) {
|
||||
self.audioMode = audioMode
|
||||
|
||||
self.playerNode = MediaPlayerNode(
|
||||
backgroundThread: false,
|
||||
captureProtected: isCaptureProtected
|
||||
)
|
||||
|
||||
var userLocation: MediaResourceUserLocation = .other
|
||||
switch file {
|
||||
case let .story(peer, _, _):
|
||||
userLocation = .peer(peer.id)
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
self.player = ChunkMediaPlayerV2(
|
||||
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
|
||||
audioContext: audioContext,
|
||||
source: .directFetch(ChunkMediaPlayerV2.SourceDescription.ResourceDescription(
|
||||
postbox: context.account.postbox,
|
||||
size: file.media.size ?? 0,
|
||||
reference: file.resourceReference(file.media.resource),
|
||||
userLocation: userLocation,
|
||||
userContentType: .story,
|
||||
statsCategory: statsCategoryForFileWithAttributes(file.media.attributes),
|
||||
fetchAutomatically: false
|
||||
)),
|
||||
video: true,
|
||||
playAutomatically: false,
|
||||
enableSound: true,
|
||||
baseRate: baseRate,
|
||||
soundMuted: audioMode == .off,
|
||||
ambient: audioMode == .ambient,
|
||||
mixWithOthers: true,
|
||||
continuePlayingWithoutSoundOnLostAudioSession: false,
|
||||
isAudioVideoMessage: false,
|
||||
playerNode: self.playerNode
|
||||
)
|
||||
self.playerNode.isHidden = true
|
||||
self.player.setBaseRate(baseRate)
|
||||
|
||||
super.init(frame: CGRect())
|
||||
|
||||
self.addSubview(self.playerNode.view)
|
||||
|
||||
self.playerNode.hasSentFramesToDisplay = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.playerNode.isHidden = false
|
||||
}
|
||||
|
||||
self.player.actionAtEnd = .action({ [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.playbackCompleted?()
|
||||
})
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func play() {
|
||||
if self.isFirstPlay {
|
||||
self.isFirstPlay = false
|
||||
|
||||
if self.audioMode != .off {
|
||||
self.player.playOnceWithSound(playAndRecord: false, seek: .start)
|
||||
} else {
|
||||
self.player.play()
|
||||
}
|
||||
} else {
|
||||
self.player.play()
|
||||
}
|
||||
}
|
||||
|
||||
func pause() {
|
||||
self.player.pause()
|
||||
}
|
||||
|
||||
func seek(timestamp: Double) {
|
||||
self.player.seek(timestamp: timestamp, play: nil)
|
||||
}
|
||||
|
||||
func setSoundMuted(soundMuted: Bool) {
|
||||
self.player.setSoundMuted(soundMuted: soundMuted)
|
||||
}
|
||||
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
self.player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
|
||||
}
|
||||
|
||||
func setBaseRate(baseRate: Double) {
|
||||
self.player.setBaseRate(baseRate)
|
||||
}
|
||||
|
||||
func update(size: CGSize, transition: ComponentTransition) {
|
||||
transition.containedViewLayoutTransition.updateFrame(node: self.playerNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||
}
|
||||
|
||||
func updateNext(nextVideoView: ModernStoryVideoView?) {
|
||||
self.player.migrateToNextPlayerOnEnd = nextVideoView?.player
|
||||
}
|
||||
}
|
||||
|
||||
final class StoryItemContentComponent: Component {
|
||||
typealias EnvironmentType = StoryContentItem.Environment
|
||||
|
||||
@ -360,11 +91,10 @@ final class StoryItemContentComponent: Component {
|
||||
final class View: StoryContentItem.View {
|
||||
private let imageView: StoryItemImageView
|
||||
private let overlaysView: StoryItemOverlaysView
|
||||
private var videoNode: UniversalVideoNode?
|
||||
private var loadingEffectView: StoryItemLoadingEffectView?
|
||||
private var loadingEffectAppearanceTimer: SwiftSignalKit.Timer?
|
||||
|
||||
private var videoView: StoryVideoView?
|
||||
|
||||
private var mediaAreasEffectView: StoryItemLoadingEffectView?
|
||||
|
||||
private var currentMessageMedia: EngineMedia?
|
||||
@ -399,8 +129,6 @@ final class StoryItemContentComponent: Component {
|
||||
private var fetchPriorityResourceId: String?
|
||||
private var currentFetchPriority: (isMain: Bool, disposable: Disposable)?
|
||||
|
||||
private weak var nextItemView: StoryItemContentComponent.View?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
|
||||
self.imageView = StoryItemImageView()
|
||||
@ -458,7 +186,10 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
|
||||
private func initializeVideoIfReady(update: Bool) {
|
||||
if self.videoView != nil {
|
||||
if self.videoNode != nil {
|
||||
return
|
||||
}
|
||||
if case .pause = self.progressMode {
|
||||
return
|
||||
}
|
||||
|
||||
@ -466,49 +197,48 @@ final class StoryItemContentComponent: Component {
|
||||
return
|
||||
}
|
||||
|
||||
var useLegacyImplementation = true
|
||||
if let data = component.context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_video_legacystoryplayer"] as? Double {
|
||||
useLegacyImplementation = value != 0.0
|
||||
}
|
||||
|
||||
if case .pause = self.progressMode {
|
||||
if useLegacyImplementation {
|
||||
if case let .file(file) = currentMessageMedia, let peerReference = PeerReference(component.peer._asPeer()) {
|
||||
if self.videoNode == nil {
|
||||
let videoNode = UniversalVideoNode(
|
||||
context: component.context,
|
||||
postbox: component.context.account.postbox,
|
||||
audioSession: component.context.sharedContext.mediaManager.audioSession,
|
||||
manager: component.context.sharedContext.mediaManager.universalVideoManager,
|
||||
decoration: StoryVideoDecoration(),
|
||||
content: NativeVideoContent(
|
||||
id: .contextResult(0, "\(UInt64.random(in: 0 ... UInt64.max))"),
|
||||
userLocation: .peer(peerReference.id),
|
||||
fileReference: .story(peer: peerReference, id: component.item.id, media: file),
|
||||
imageReference: nil,
|
||||
streamVideo: .story,
|
||||
loopVideo: true,
|
||||
enableSound: true,
|
||||
soundMuted: component.audioMode == .off,
|
||||
beginWithAmbientSound: component.audioMode == .ambient,
|
||||
mixWithOthers: true,
|
||||
useLargeThumbnail: false,
|
||||
autoFetchFullSizeThumbnail: false,
|
||||
tempFilePath: nil,
|
||||
captureProtected: component.item.isForwardingDisabled,
|
||||
hintDimensions: file.dimensions?.cgSize,
|
||||
storeAfterDownload: nil,
|
||||
displayImage: false,
|
||||
hasSentFramesToDisplay: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.videoNode?.isHidden = false
|
||||
}
|
||||
|
||||
if case let .file(file) = currentMessageMedia, let peerReference = PeerReference(component.peer._asPeer()) {
|
||||
if self.videoView == nil {
|
||||
let videoView: StoryVideoView
|
||||
if useLegacyImplementation {
|
||||
videoView = LegacyStoryVideoView(
|
||||
context: component.context,
|
||||
file: .story(peer: peerReference, id: component.item.id, media: file),
|
||||
audioMode: component.audioMode,
|
||||
baseRate: component.baseRate,
|
||||
isCaptureProtected: component.item.isForwardingDisabled
|
||||
),
|
||||
priority: .gallery
|
||||
)
|
||||
} else {
|
||||
let audioContext: ChunkMediaPlayerV2.AudioContext
|
||||
if let current = self.environment?.sharedState.audioContext {
|
||||
audioContext = current
|
||||
} else {
|
||||
audioContext = ChunkMediaPlayerV2.AudioContext(audioSessionManager: component.context.sharedContext.mediaManager.audioSession)
|
||||
self.environment?.sharedState.audioContext = audioContext
|
||||
}
|
||||
videoView = ModernStoryVideoView(
|
||||
context: component.context,
|
||||
audioContext: audioContext,
|
||||
file: .story(peer: peerReference, id: component.item.id, media: file),
|
||||
audioMode: component.audioMode,
|
||||
baseRate: component.baseRate,
|
||||
isCaptureProtected: component.item.isForwardingDisabled
|
||||
)
|
||||
}
|
||||
self.videoView = videoView
|
||||
self.insertSubview(videoView, aboveSubview: self.imageView)
|
||||
videoNode.isHidden = true
|
||||
videoNode.setBaseRate(component.baseRate)
|
||||
|
||||
videoView.playbackCompleted = { [weak self] in
|
||||
self.videoNode = videoNode
|
||||
self.insertSubview(videoNode.view, aboveSubview: self.imageView)
|
||||
|
||||
videoNode.playbackCompleted = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
@ -523,24 +253,38 @@ final class StoryItemContentComponent: Component {
|
||||
if shouldLoop {
|
||||
self.rewind()
|
||||
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
if self.contentLoaded {
|
||||
videoView.play()
|
||||
videoNode.play()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.environment?.presentationProgressUpdated(1.0, false, true)
|
||||
}
|
||||
}
|
||||
videoNode.ownsContentNodeUpdated = { [weak self] value in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
if value {
|
||||
self.videoNode?.seek(0.0)
|
||||
if component.audioMode != .off {
|
||||
self.videoNode?.playOnceWithSound(playAndRecord: false, actionAtEnd: .stop)
|
||||
} else {
|
||||
self.videoNode?.play()
|
||||
}
|
||||
}
|
||||
}
|
||||
videoNode.canAttachContent = true
|
||||
if update {
|
||||
self.state?.updated(transition: .immediate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
if self.videoProgressDisposable == nil {
|
||||
self.videoProgressDisposable = (videoView.status
|
||||
self.videoProgressDisposable = (videoNode.status
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
guard let self, let status else {
|
||||
return
|
||||
@ -552,19 +296,9 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
let canPlay = self.progressMode != .pause && self.contentLoaded && self.hierarchyTrackingLayer.isInHierarchy
|
||||
|
||||
if canPlay {
|
||||
videoView.play()
|
||||
} else {
|
||||
videoView.pause()
|
||||
}
|
||||
}
|
||||
|
||||
self.updateVideoNextItem()
|
||||
}
|
||||
|
||||
override func setProgressMode(_ progressMode: StoryContentItem.ProgressMode) {
|
||||
if self.progressMode != progressMode {
|
||||
self.progressMode = progressMode
|
||||
@ -576,62 +310,48 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
func setNextItemView(nextItemView: StoryItemContentComponent.View?) {
|
||||
if self.nextItemView !== nextItemView {
|
||||
self.nextItemView = nextItemView
|
||||
self.updateVideoNextItem()
|
||||
}
|
||||
}
|
||||
|
||||
private func updateVideoNextItem() {
|
||||
if let videoView = self.videoView as? ModernStoryVideoView {
|
||||
let nextVideoView = self.nextItemView?.videoView as? ModernStoryVideoView
|
||||
videoView.updateNext(nextVideoView: nextVideoView)
|
||||
}
|
||||
}
|
||||
|
||||
override func rewind() {
|
||||
self.currentProgressTimerValue = 0.0
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
if self.contentLoaded {
|
||||
videoView.seek(timestamp: 0.0)
|
||||
videoNode.seek(0.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func leaveAmbientMode() {
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
self.ignoreBufferingTimestamp = CFAbsoluteTimeGetCurrent()
|
||||
videoView.setSoundMuted(soundMuted: false)
|
||||
videoView.continueWithOverridingAmbientMode(isAmbient: false)
|
||||
videoNode.setSoundMuted(soundMuted: false)
|
||||
videoNode.continueWithOverridingAmbientMode(isAmbient: false)
|
||||
}
|
||||
}
|
||||
|
||||
override func enterAmbientMode(ambient: Bool) {
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
self.ignoreBufferingTimestamp = CFAbsoluteTimeGetCurrent()
|
||||
if ambient {
|
||||
videoView.continueWithOverridingAmbientMode(isAmbient: true)
|
||||
videoNode.continueWithOverridingAmbientMode(isAmbient: true)
|
||||
} else {
|
||||
videoView.setSoundMuted(soundMuted: true)
|
||||
videoNode.setSoundMuted(soundMuted: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func setBaseRate(_ baseRate: Double) {
|
||||
if let videoView = self.videoView {
|
||||
videoView.setBaseRate(baseRate: baseRate)
|
||||
if let videoNode = self.videoNode {
|
||||
videoNode.setBaseRate(baseRate)
|
||||
}
|
||||
}
|
||||
|
||||
private func updateProgressMode(update: Bool) {
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
let canPlay = self.progressMode != .pause && self.contentLoaded && self.hierarchyTrackingLayer.isInHierarchy
|
||||
|
||||
if canPlay {
|
||||
videoView.play()
|
||||
videoNode.play()
|
||||
} else {
|
||||
videoView.pause()
|
||||
videoNode.pause()
|
||||
}
|
||||
}
|
||||
|
||||
@ -846,11 +566,11 @@ final class StoryItemContentComponent: Component {
|
||||
|
||||
private var isSeeking = false
|
||||
func seekTo(_ timestamp: Double, apply: Bool) {
|
||||
guard let videoView = self.videoView else {
|
||||
guard let videoNode = self.videoNode else {
|
||||
return
|
||||
}
|
||||
if apply {
|
||||
videoView.seek(timestamp: min(timestamp, self.effectiveDuration - 0.3))
|
||||
videoNode.seek(min(timestamp, self.effectiveDuration - 0.3))
|
||||
}
|
||||
self.isSeeking = true
|
||||
self.updateVideoPlaybackProgress(timestamp)
|
||||
@ -868,10 +588,6 @@ final class StoryItemContentComponent: Component {
|
||||
let environment = environment[StoryContentItem.Environment.self].value
|
||||
self.environment = environment
|
||||
|
||||
if let videoView = self.videoView {
|
||||
videoView.audioMode = component.audioMode
|
||||
}
|
||||
|
||||
var synchronousLoad = false
|
||||
if let hint = transition.userData(Hint.self) {
|
||||
synchronousLoad = hint.synchronousLoad
|
||||
@ -916,12 +632,12 @@ final class StoryItemContentComponent: Component {
|
||||
self.currentMessageMedia = messageMedia
|
||||
reloadMedia = true
|
||||
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
self.videoProgressDisposable?.dispose()
|
||||
self.videoProgressDisposable = nil
|
||||
|
||||
self.videoView = nil
|
||||
videoView.removeFromSuperview()
|
||||
self.videoNode = nil
|
||||
videoNode.view.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
self.currentMessageMetadataMedia = component.item.media
|
||||
@ -1051,10 +767,10 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
let _ = imageSize
|
||||
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
let videoSize = dimensions.aspectFilled(availableSize)
|
||||
videoView.frame = CGRect(origin: CGPoint(x: floor((availableSize.width - videoSize.width) * 0.5), y: floor((availableSize.height - videoSize.height) * 0.5)), size: videoSize)
|
||||
videoView.update(size: videoSize, transition: .immediate)
|
||||
videoNode.frame = CGRect(origin: CGPoint(x: floor((availableSize.width - videoSize.width) * 0.5), y: floor((availableSize.height - videoSize.height) * 0.5)), size: videoSize)
|
||||
videoNode.updateLayout(size: videoSize, transition: .immediate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1478,7 +1478,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
|
||||
if itemLayout.contentScaleFraction <= 0.0001 && !self.preparingToDisplayViewList {
|
||||
if index != centralIndex && index != centralIndex + 1 {
|
||||
if index != centralIndex {
|
||||
itemVisible = false
|
||||
}
|
||||
}
|
||||
@ -1870,19 +1870,6 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
for i in 0 ..< component.slice.allItems.count {
|
||||
guard let visibleItem = self.visibleItems[component.slice.allItems[i].id] else {
|
||||
continue
|
||||
}
|
||||
var nextVisibleItem: VisibleItem?
|
||||
if i != component.slice.allItems.count - 1 {
|
||||
nextVisibleItem = self.visibleItems[component.slice.allItems[i + 1].id]
|
||||
}
|
||||
if let itemView = visibleItem.view.view as? StoryItemContentComponent.View {
|
||||
itemView.setNextItemView(nextItemView: nextVisibleItem?.view.view as? StoryItemContentComponent.View)
|
||||
}
|
||||
}
|
||||
|
||||
self.trulyValidIds = trulyValidIds
|
||||
|
||||
var removeIds: [StoryId] = []
|
||||
|
@ -117,6 +117,8 @@ public final class TabSelectorComponent: Component {
|
||||
private let selectionView: UIImageView
|
||||
private var visibleItems: [AnyHashable: VisibleItem] = [:]
|
||||
|
||||
private var didInitiallyScroll = false
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.selectionView = UIImageView()
|
||||
|
||||
@ -238,11 +240,15 @@ public final class TabSelectorComponent: Component {
|
||||
)),
|
||||
effectAlignment: .center,
|
||||
minSize: nil,
|
||||
action: { [weak self] in
|
||||
action: { [weak self, weak itemView] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.setSelectedId(itemId)
|
||||
|
||||
if let view = itemView?.title.view, allowScroll && self.contentSize.width > self.bounds.width {
|
||||
self.scrollRectToVisible(view.frame.insetBy(dx: -64.0, dy: 0.0), animated: true)
|
||||
}
|
||||
},
|
||||
animateScale: !isLineSelection
|
||||
)),
|
||||
@ -336,11 +342,15 @@ public final class TabSelectorComponent: Component {
|
||||
self.selectionView.alpha = 0.0
|
||||
}
|
||||
|
||||
self.contentSize = CGSize(width: contentWidth, height: baseHeight + verticalInset * 2.0)
|
||||
let contentSize = CGSize(width: contentWidth, height: baseHeight + verticalInset * 2.0)
|
||||
if self.contentSize != contentSize {
|
||||
self.contentSize = contentSize
|
||||
}
|
||||
self.disablesInteractiveTransitionGestureRecognizer = contentWidth > availableSize.width
|
||||
|
||||
if let selectedBackgroundRect, self.bounds.width > 0.0 {
|
||||
if let selectedBackgroundRect, self.bounds.width > 0.0 && !self.didInitiallyScroll {
|
||||
self.scrollRectToVisible(selectedBackgroundRect.insetBy(dx: -spacing, dy: 0.0), animated: false)
|
||||
self.didInitiallyScroll = true
|
||||
}
|
||||
|
||||
return CGSize(width: min(contentWidth, availableSize.width), height: baseHeight + verticalInset * 2.0)
|
||||
|
@ -1,12 +0,0 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "ic_qrcode.pdf",
|
||||
"idiom" : "universal"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,4 +1,5 @@
|
||||
import Foundation
|
||||
import UniformTypeIdentifiers
|
||||
import UIKit
|
||||
import Display
|
||||
import AsyncDisplayKit
|
||||
@ -44,6 +45,7 @@ import TelegramNotices
|
||||
import AnimatedCountLabelNode
|
||||
import TelegramStringFormatting
|
||||
import TextNodeWithEntities
|
||||
import DeviceModel
|
||||
|
||||
private let accessoryButtonFont = Font.medium(14.0)
|
||||
private let counterFont = Font.with(size: 14.0, design: .regular, traits: [.monospacedNumbers])
|
||||
@ -537,6 +539,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
||||
var customEmojiContainerView: CustomEmojiContainerView?
|
||||
|
||||
let textInputBackgroundNode: ASImageNode
|
||||
var textInputBackgroundTapRecognizer: TouchDownGestureRecognizer?
|
||||
private var transparentTextInputBackgroundImage: UIImage?
|
||||
let actionButtons: ChatTextInputActionButtonsNode
|
||||
private let slowModeButton: BoostSlowModeButton
|
||||
@ -1087,6 +1090,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
||||
return false
|
||||
}
|
||||
}
|
||||
self.textInputBackgroundTapRecognizer = recognizer
|
||||
self.textInputBackgroundNode.isUserInteractionEnabled = true
|
||||
self.textInputBackgroundNode.view.addGestureRecognizer(recognizer)
|
||||
|
||||
@ -1164,6 +1168,11 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
||||
textInputNode.isUserInteractionEnabled = !self.sendingTextDisabled
|
||||
self.textInputNode = textInputNode
|
||||
|
||||
if let textInputBackgroundTapRecognizer = self.textInputBackgroundTapRecognizer {
|
||||
self.textInputBackgroundTapRecognizer = nil
|
||||
self.textInputBackgroundNode.view.removeGestureRecognizer(textInputBackgroundTapRecognizer)
|
||||
}
|
||||
|
||||
var accessoryButtonsWidth: CGFloat = 0.0
|
||||
var firstButton = true
|
||||
for (_, button) in self.accessoryItemButtons {
|
||||
@ -4473,11 +4482,15 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
|
||||
var attributedString: NSAttributedString?
|
||||
if let data = pasteboard.data(forPasteboardType: "private.telegramtext"), let value = chatInputStateStringFromAppSpecificString(data: data) {
|
||||
attributedString = value
|
||||
} else if let data = pasteboard.data(forPasteboardType: kUTTypeRTF as String) {
|
||||
} else if let data = pasteboard.data(forPasteboardType: "public.rtf") {
|
||||
attributedString = chatInputStateStringFromRTF(data, type: NSAttributedString.DocumentType.rtf)
|
||||
} else if let data = pasteboard.data(forPasteboardType: "com.apple.flat-rtfd") {
|
||||
if let _ = pasteboard.data(forPasteboardType: "com.apple.notes.richtext"), DeviceModel.current.isIpad, let htmlData = pasteboard.data(forPasteboardType: "public.html") {
|
||||
attributedString = chatInputStateStringFromRTF(htmlData, type: NSAttributedString.DocumentType.html)
|
||||
} else {
|
||||
attributedString = chatInputStateStringFromRTF(data, type: NSAttributedString.DocumentType.rtfd)
|
||||
}
|
||||
}
|
||||
|
||||
if let attributedString = attributedString {
|
||||
self.interfaceInteraction?.updateTextInputStateAndMode { current, inputMode in
|
||||
|
@ -1093,7 +1093,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
|
||||
var onSeeked: (() -> Void)?
|
||||
self.player = ChunkMediaPlayerV2(
|
||||
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
|
||||
audioContext: ChunkMediaPlayerV2.AudioContext(audioSessionManager: audioSessionManager),
|
||||
audioSessionManager: audioSessionManager,
|
||||
source: .externalParts(self.chunkPlayerPartsState.get()),
|
||||
video: true,
|
||||
enableSound: self.enableSound,
|
||||
|
@ -520,7 +520,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
} else {
|
||||
let mediaPlayer = ChunkMediaPlayerV2(
|
||||
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
|
||||
audioContext: ChunkMediaPlayerV2.AudioContext(audioSessionManager: audioSessionManager),
|
||||
audioSessionManager: audioSessionManager,
|
||||
source: .directFetch(ChunkMediaPlayerV2.SourceDescription.ResourceDescription(
|
||||
postbox: postbox,
|
||||
size: selectedFile.size ?? 0,
|
||||
|
@ -78,7 +78,7 @@ public struct ChatTranslationState: Codable {
|
||||
private func cachedChatTranslationState(engine: TelegramEngine, peerId: EnginePeer.Id, threadId: Int64?) -> Signal<ChatTranslationState?, NoError> {
|
||||
let key: EngineDataBuffer
|
||||
if let threadId {
|
||||
key = EngineDataBuffer(length: 8 + 8)
|
||||
key = EngineDataBuffer(length: 16)
|
||||
key.setInt64(0, value: peerId.id._internalGetInt64Value())
|
||||
key.setInt64(8, value: threadId)
|
||||
} else {
|
||||
@ -95,7 +95,7 @@ private func cachedChatTranslationState(engine: TelegramEngine, peerId: EnginePe
|
||||
private func updateChatTranslationState(engine: TelegramEngine, peerId: EnginePeer.Id, threadId: Int64?, state: ChatTranslationState?) -> Signal<Never, NoError> {
|
||||
let key: EngineDataBuffer
|
||||
if let threadId {
|
||||
key = EngineDataBuffer(length: 8 + 8)
|
||||
key = EngineDataBuffer(length: 16)
|
||||
key.setInt64(0, value: peerId.id._internalGetInt64Value())
|
||||
key.setInt64(8, value: threadId)
|
||||
} else {
|
||||
@ -111,10 +111,14 @@ private func updateChatTranslationState(engine: TelegramEngine, peerId: EnginePe
|
||||
}
|
||||
|
||||
public func updateChatTranslationStateInteractively(engine: TelegramEngine, peerId: EnginePeer.Id, threadId: Int64?, _ f: @escaping (ChatTranslationState?) -> ChatTranslationState?) -> Signal<Never, NoError> {
|
||||
let key = EngineDataBuffer(length: 8)
|
||||
key.setInt64(0, value: peerId.id._internalGetInt64Value())
|
||||
let key: EngineDataBuffer
|
||||
if let threadId {
|
||||
key = EngineDataBuffer(length: 16)
|
||||
key.setInt64(0, value: peerId.id._internalGetInt64Value())
|
||||
key.setInt64(8, value: threadId)
|
||||
} else {
|
||||
key = EngineDataBuffer(length: 8)
|
||||
key.setInt64(0, value: peerId.id._internalGetInt64Value())
|
||||
}
|
||||
|
||||
return engine.data.get(TelegramEngine.EngineData.Item.ItemCache.Item(collectionId: ApplicationSpecificItemCacheCollectionId.translationState, id: key))
|
||||
|
@ -828,7 +828,11 @@ public final class WebAppController: ViewController, AttachmentContainable {
|
||||
}
|
||||
|
||||
if let webView = self.webView {
|
||||
var scrollInset = UIEdgeInsets(top: 0.0, left: 0.0, bottom: layout.intrinsicInsets.bottom, right: 0.0)
|
||||
let inputHeight = self.validLayout?.0.inputHeight ?? 0.0
|
||||
|
||||
let intrinsicBottomInset = layout.intrinsicInsets.bottom > 40.0 ? layout.intrinsicInsets.bottom : 0.0
|
||||
|
||||
var scrollInset = UIEdgeInsets(top: 0.0, left: 0.0, bottom: max(inputHeight, intrinsicBottomInset), right: 0.0)
|
||||
var frameBottomInset: CGFloat = 0.0
|
||||
if scrollInset.bottom > 40.0 {
|
||||
frameBottomInset = scrollInset.bottom
|
||||
@ -842,11 +846,11 @@ public final class WebAppController: ViewController, AttachmentContainable {
|
||||
self.updateWebViewWhenStable = true
|
||||
}
|
||||
|
||||
var bottomInset = layout.intrinsicInsets.bottom + layout.additionalInsets.bottom
|
||||
if let inputHeight = self.validLayout?.0.inputHeight, inputHeight > 44.0 {
|
||||
bottomInset = max(bottomInset, inputHeight)
|
||||
var viewportBottomInset = max(frameBottomInset, scrollInset.bottom)
|
||||
if (self.validLayout?.0.inputHeight ?? 0.0) < 44.0 {
|
||||
viewportBottomInset += layout.additionalInsets.bottom
|
||||
}
|
||||
let viewportFrame = CGRect(origin: CGPoint(x: layout.safeInsets.left, y: topInset), size: CGSize(width: layout.size.width - layout.safeInsets.left - layout.safeInsets.right, height: max(1.0, layout.size.height - topInset - bottomInset)))
|
||||
let viewportFrame = CGRect(origin: CGPoint(x: layout.safeInsets.left, y: topInset), size: CGSize(width: layout.size.width - layout.safeInsets.left - layout.safeInsets.right, height: max(1.0, layout.size.height - topInset - viewportBottomInset)))
|
||||
|
||||
if webView.scrollView.contentInset != scrollInset {
|
||||
webView.scrollView.contentInset = scrollInset
|
||||
@ -1061,6 +1065,10 @@ public final class WebAppController: ViewController, AttachmentContainable {
|
||||
} else {
|
||||
self.lastExpansionTimestamp = currentTimestamp
|
||||
controller.requestAttachmentMenuExpansion()
|
||||
|
||||
Queue.mainQueue().after(0.4) {
|
||||
self.webView?.setNeedsLayout()
|
||||
}
|
||||
}
|
||||
case "web_app_close":
|
||||
controller.dismiss()
|
||||
|
Loading…
x
Reference in New Issue
Block a user