mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
56955e5c24
@ -6,26 +6,60 @@ import SwiftSignalKit
|
||||
import BuildConfig
|
||||
import BroadcastUploadHelpers
|
||||
import AudioToolbox
|
||||
import Postbox
|
||||
import CoreMedia
|
||||
import AVFoundation
|
||||
|
||||
private func rootPathForBasePath(_ appGroupPath: String) -> String {
|
||||
return appGroupPath + "/telegram-data"
|
||||
}
|
||||
|
||||
@available(iOS 10.0, *)
|
||||
@objc(BroadcastUploadSampleHandler) class BroadcastUploadSampleHandler: RPBroadcastSampleHandler {
|
||||
private protocol BroadcastUploadImpl: AnyObject {
|
||||
func initialize(rootPath: String)
|
||||
func processVideoSampleBuffer(sampleBuffer: CMSampleBuffer)
|
||||
func processAudioSampleBuffer(data: Data)
|
||||
}
|
||||
|
||||
private final class InProcessBroadcastUploadImpl: BroadcastUploadImpl {
|
||||
private weak var extensionContext: RPBroadcastSampleHandler?
|
||||
private var screencastBufferClientContext: IpcGroupCallBufferBroadcastContext?
|
||||
private var statusDisposable: Disposable?
|
||||
private var audioConverter: CustomAudioConverter?
|
||||
|
||||
|
||||
init(extensionContext: RPBroadcastSampleHandler) {
|
||||
self.extensionContext = extensionContext
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.statusDisposable?.dispose()
|
||||
}
|
||||
|
||||
func initialize(rootPath: String) {
|
||||
let screencastBufferClientContext = IpcGroupCallBufferBroadcastContext(basePath: rootPath + "/broadcast-coordination")
|
||||
self.screencastBufferClientContext = screencastBufferClientContext
|
||||
|
||||
public override func beginRequest(with context: NSExtensionContext) {
|
||||
super.beginRequest(with: context)
|
||||
var wasRunning = false
|
||||
self.statusDisposable = (screencastBufferClientContext.status
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
switch status {
|
||||
case .active:
|
||||
wasRunning = true
|
||||
case let .finished(reason):
|
||||
if wasRunning {
|
||||
self.finish(with: .screencastEnded)
|
||||
} else {
|
||||
self.finish(with: reason)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
private func finish(with reason: IpcGroupCallBufferBroadcastContext.Status.FinishReason) {
|
||||
guard let extensionContext = self.extensionContext else {
|
||||
return
|
||||
}
|
||||
var errorString: String?
|
||||
switch reason {
|
||||
case .callEnded:
|
||||
@ -39,16 +73,247 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String {
|
||||
let error = NSError(domain: "BroadcastUploadExtension", code: 1, userInfo: [
|
||||
NSLocalizedDescriptionKey: errorString
|
||||
])
|
||||
finishBroadcastWithError(error)
|
||||
extensionContext.finishBroadcastWithError(error)
|
||||
} else {
|
||||
finishBroadcastGracefully(self)
|
||||
finishBroadcastGracefully(extensionContext)
|
||||
}
|
||||
}
|
||||
|
||||
func processVideoSampleBuffer(sampleBuffer: CMSampleBuffer) {
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
var orientation = CGImagePropertyOrientation.up
|
||||
if #available(iOS 11.0, *) {
|
||||
if let orientationAttachment = CMGetAttachment(sampleBuffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil) as? NSNumber {
|
||||
orientation = CGImagePropertyOrientation(rawValue: orientationAttachment.uint32Value) ?? .up
|
||||
}
|
||||
}
|
||||
if let data = serializePixelBuffer(buffer: pixelBuffer) {
|
||||
self.screencastBufferClientContext?.setCurrentFrame(data: data, orientation: orientation)
|
||||
}
|
||||
}
|
||||
|
||||
func processAudioSampleBuffer(data: Data) {
|
||||
self.screencastBufferClientContext?.writeAudioData(data: data)
|
||||
}
|
||||
}
|
||||
|
||||
private final class EmbeddedBroadcastUploadImpl: BroadcastUploadImpl {
|
||||
private weak var extensionContext: RPBroadcastSampleHandler?
|
||||
|
||||
private var clientContext: IpcGroupCallEmbeddedBroadcastContext?
|
||||
private var statusDisposable: Disposable?
|
||||
|
||||
private var callContextId: UInt32?
|
||||
private var callContextDidSetJoinResponse: Bool = false
|
||||
private var callContext: OngoingGroupCallContext?
|
||||
private let screencastCapturer: OngoingCallVideoCapturer
|
||||
|
||||
private var joinPayloadDisposable: Disposable?
|
||||
|
||||
private var sampleBuffers: [CMSampleBuffer] = []
|
||||
private var lastAcceptedTimestamp: Double?
|
||||
|
||||
init(extensionContext: RPBroadcastSampleHandler) {
|
||||
self.extensionContext = extensionContext
|
||||
|
||||
self.screencastCapturer = OngoingCallVideoCapturer(isCustom: true)
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.joinPayloadDisposable?.dispose()
|
||||
}
|
||||
|
||||
func initialize(rootPath: String) {
|
||||
let clientContext = IpcGroupCallEmbeddedBroadcastContext(basePath: rootPath + "/embedded-broadcast-coordination")
|
||||
self.clientContext = clientContext
|
||||
|
||||
var wasRunning = false
|
||||
self.statusDisposable = (clientContext.status
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
switch status {
|
||||
case let .active(id, joinResponse):
|
||||
wasRunning = true
|
||||
|
||||
if self.callContextId != id {
|
||||
if let callContext = self.callContext {
|
||||
self.callContext = nil
|
||||
self.callContextId = nil
|
||||
self.callContextDidSetJoinResponse = false
|
||||
self.joinPayloadDisposable?.dispose()
|
||||
self.joinPayloadDisposable = nil
|
||||
callContext.stop(account: nil, reportCallId: nil, debugLog: Promise())
|
||||
}
|
||||
}
|
||||
|
||||
if let id {
|
||||
if self.callContext == nil {
|
||||
self.callContextId = id
|
||||
let callContext = OngoingGroupCallContext(
|
||||
audioSessionActive: .single(true),
|
||||
video: self.screencastCapturer,
|
||||
requestMediaChannelDescriptions: { _, _ in EmptyDisposable },
|
||||
rejoinNeeded: { },
|
||||
outgoingAudioBitrateKbit: nil,
|
||||
videoContentType: .screencast,
|
||||
enableNoiseSuppression: false,
|
||||
disableAudioInput: true,
|
||||
enableSystemMute: false,
|
||||
preferX264: false,
|
||||
logPath: "",
|
||||
onMutedSpeechActivityDetected: { _ in },
|
||||
encryptionKey: nil,
|
||||
isConference: false,
|
||||
sharedAudioDevice: nil
|
||||
)
|
||||
self.callContext = callContext
|
||||
self.joinPayloadDisposable = (callContext.joinPayload
|
||||
|> deliverOnMainQueue).start(next: { [weak self] joinPayload in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.callContextId != id {
|
||||
return
|
||||
}
|
||||
self.clientContext?.joinPayload = IpcGroupCallEmbeddedAppContext.JoinPayload(
|
||||
id: id,
|
||||
data: joinPayload.0,
|
||||
ssrc: joinPayload.1
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
if let callContext = self.callContext {
|
||||
if let joinResponse, !self.callContextDidSetJoinResponse {
|
||||
self.callContextDidSetJoinResponse = true
|
||||
callContext.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false, isUnifiedBroadcast: false)
|
||||
callContext.setJoinResponse(payload: joinResponse.data)
|
||||
}
|
||||
}
|
||||
}
|
||||
case let .finished(reason):
|
||||
if wasRunning {
|
||||
self.finish(with: .screencastEnded)
|
||||
} else {
|
||||
self.finish(with: reason)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private func finish(with reason: IpcGroupCallEmbeddedBroadcastContext.Status.FinishReason) {
|
||||
guard let extensionContext = self.extensionContext else {
|
||||
return
|
||||
}
|
||||
var errorString: String?
|
||||
switch reason {
|
||||
case .callEnded:
|
||||
errorString = "You're not in a voice chat"
|
||||
case .error:
|
||||
errorString = "Finished"
|
||||
case .screencastEnded:
|
||||
break
|
||||
}
|
||||
if let errorString = errorString {
|
||||
let error = NSError(domain: "BroadcastUploadExtension", code: 1, userInfo: [
|
||||
NSLocalizedDescriptionKey: errorString
|
||||
])
|
||||
extensionContext.finishBroadcastWithError(error)
|
||||
} else {
|
||||
finishBroadcastGracefully(extensionContext)
|
||||
}
|
||||
}
|
||||
|
||||
func processVideoSampleBuffer(sampleBuffer: CMSampleBuffer) {
|
||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds
|
||||
if let lastAcceptedTimestamp = self.lastAcceptedTimestamp {
|
||||
if lastAcceptedTimestamp + 1.0 / 30.0 > timestamp {
|
||||
return
|
||||
}
|
||||
}
|
||||
self.lastAcceptedTimestamp = timestamp
|
||||
|
||||
guard let sourceImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
let sourcePixelBuffer: CVPixelBuffer = sourceImageBuffer as CVPixelBuffer
|
||||
|
||||
let width = CVPixelBufferGetWidth(sourcePixelBuffer)
|
||||
let height = CVPixelBufferGetHeight(sourcePixelBuffer)
|
||||
let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(sourcePixelBuffer)
|
||||
|
||||
var outputPixelBuffer: CVPixelBuffer?
|
||||
let pixelFormat = CVPixelBufferGetPixelFormatType(sourcePixelBuffer)
|
||||
CVPixelBufferCreate(nil, width, height, pixelFormat, nil, &outputPixelBuffer)
|
||||
guard let outputPixelBuffer else {
|
||||
return
|
||||
}
|
||||
CVPixelBufferLockBaseAddress(sourcePixelBuffer, [])
|
||||
CVPixelBufferLockBaseAddress(outputPixelBuffer, [])
|
||||
|
||||
let outputBytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer)
|
||||
|
||||
let sourceBaseAddress = CVPixelBufferGetBaseAddress(sourcePixelBuffer)
|
||||
let outputBaseAddress = CVPixelBufferGetBaseAddress(outputPixelBuffer)
|
||||
|
||||
if outputBytesPerRow == sourceBytesPerRow {
|
||||
memcpy(outputBaseAddress!, sourceBaseAddress!, height * outputBytesPerRow)
|
||||
} else {
|
||||
for y in 0 ..< height {
|
||||
memcpy(outputBaseAddress!.advanced(by: y * outputBytesPerRow), sourceBaseAddress!.advanced(by: y * sourceBytesPerRow), min(sourceBytesPerRow, outputBytesPerRow))
|
||||
}
|
||||
}
|
||||
|
||||
defer {
|
||||
CVPixelBufferUnlockBaseAddress(sourcePixelBuffer, [])
|
||||
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, [])
|
||||
}
|
||||
|
||||
var orientation = CGImagePropertyOrientation.up
|
||||
if #available(iOS 11.0, *) {
|
||||
if let orientationAttachment = CMGetAttachment(sampleBuffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil) as? NSNumber {
|
||||
orientation = CGImagePropertyOrientation(rawValue: orientationAttachment.uint32Value) ?? .up
|
||||
}
|
||||
}
|
||||
|
||||
if let outputSampleBuffer = sampleBufferFromPixelBuffer(pixelBuffer: outputPixelBuffer) {
|
||||
let semaphore = DispatchSemaphore(value: 0)
|
||||
self.screencastCapturer.injectSampleBuffer(outputSampleBuffer, rotation: orientation, completion: {
|
||||
//semaphore.signal()
|
||||
})
|
||||
let _ = semaphore.wait(timeout: DispatchTime.now() + 1.0 / 30.0)
|
||||
}
|
||||
}
|
||||
|
||||
func processAudioSampleBuffer(data: Data) {
|
||||
self.callContext?.addExternalAudioData(data: data)
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 10.0, *)
|
||||
@objc(BroadcastUploadSampleHandler) class BroadcastUploadSampleHandler: RPBroadcastSampleHandler {
|
||||
private var impl: BroadcastUploadImpl?
|
||||
private var audioConverter: CustomAudioConverter?
|
||||
|
||||
public override func beginRequest(with context: NSExtensionContext) {
|
||||
super.beginRequest(with: context)
|
||||
}
|
||||
|
||||
private func finishWithError() {
|
||||
let errorString = "Finished"
|
||||
let error = NSError(domain: "BroadcastUploadExtension", code: 1, userInfo: [
|
||||
NSLocalizedDescriptionKey: errorString
|
||||
])
|
||||
self.finishBroadcastWithError(error)
|
||||
}
|
||||
|
||||
override public func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
|
||||
guard let appBundleIdentifier = Bundle.main.bundleIdentifier, let lastDotRange = appBundleIdentifier.range(of: ".", options: [.backwards]) else {
|
||||
self.finish(with: .error)
|
||||
self.finishWithError()
|
||||
return
|
||||
}
|
||||
|
||||
@ -58,35 +323,32 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String {
|
||||
let maybeAppGroupUrl = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: appGroupName)
|
||||
|
||||
guard let appGroupUrl = maybeAppGroupUrl else {
|
||||
self.finish(with: .error)
|
||||
self.finishWithError()
|
||||
return
|
||||
}
|
||||
|
||||
let rootPath = rootPathForBasePath(appGroupUrl.path)
|
||||
|
||||
TempBox.initializeShared(basePath: rootPath, processType: "share", launchSpecificId: Int64.random(in: Int64.min ... Int64.max))
|
||||
|
||||
let logsPath = rootPath + "/logs/broadcast-logs"
|
||||
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
|
||||
|
||||
let screencastBufferClientContext = IpcGroupCallBufferBroadcastContext(basePath: rootPath + "/broadcast-coordination")
|
||||
self.screencastBufferClientContext = screencastBufferClientContext
|
||||
|
||||
var wasRunning = false
|
||||
self.statusDisposable = (screencastBufferClientContext.status
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
switch status {
|
||||
case .active:
|
||||
wasRunning = true
|
||||
case let .finished(reason):
|
||||
if wasRunning {
|
||||
strongSelf.finish(with: .screencastEnded)
|
||||
} else {
|
||||
strongSelf.finish(with: reason)
|
||||
}
|
||||
}
|
||||
})
|
||||
let embeddedBroadcastImplementationTypePath = rootPath + "/broadcast-coordination-type"
|
||||
|
||||
var useIPCContext = false
|
||||
if let typeData = try? Data(contentsOf: URL(fileURLWithPath: embeddedBroadcastImplementationTypePath)), let type = String(data: typeData, encoding: .utf8) {
|
||||
useIPCContext = type == "ipc"
|
||||
}
|
||||
|
||||
let impl: BroadcastUploadImpl
|
||||
if useIPCContext {
|
||||
impl = EmbeddedBroadcastUploadImpl(extensionContext: self)
|
||||
} else {
|
||||
impl = InProcessBroadcastUploadImpl(extensionContext: self)
|
||||
}
|
||||
self.impl = impl
|
||||
impl.initialize(rootPath: rootPath)
|
||||
}
|
||||
|
||||
override public func broadcastPaused() {
|
||||
@ -112,18 +374,7 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String {
|
||||
}
|
||||
|
||||
private func processVideoSampleBuffer(sampleBuffer: CMSampleBuffer) {
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
var orientation = CGImagePropertyOrientation.up
|
||||
if #available(iOS 11.0, *) {
|
||||
if let orientationAttachment = CMGetAttachment(sampleBuffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil) as? NSNumber {
|
||||
orientation = CGImagePropertyOrientation(rawValue: orientationAttachment.uint32Value) ?? .up
|
||||
}
|
||||
}
|
||||
if let data = serializePixelBuffer(buffer: pixelBuffer) {
|
||||
self.screencastBufferClientContext?.setCurrentFrame(data: data, orientation: orientation)
|
||||
}
|
||||
self.impl?.processVideoSampleBuffer(sampleBuffer: sampleBuffer)
|
||||
}
|
||||
|
||||
private func processAudioSampleBuffer(sampleBuffer: CMSampleBuffer) {
|
||||
@ -133,9 +384,6 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String {
|
||||
guard let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) else {
|
||||
return
|
||||
}
|
||||
/*guard let blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) else {
|
||||
return
|
||||
}*/
|
||||
|
||||
let format = CustomAudioConverter.Format(
|
||||
numChannels: Int(asbd.pointee.mChannelsPerFrame),
|
||||
@ -146,7 +394,7 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String {
|
||||
}
|
||||
if let audioConverter = self.audioConverter {
|
||||
if let data = audioConverter.convert(sampleBuffer: sampleBuffer), !data.isEmpty {
|
||||
self.screencastBufferClientContext?.writeAudioData(data: data)
|
||||
self.impl?.processAudioSampleBuffer(data: data)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -287,3 +535,36 @@ private func converterComplexInputDataProc(inAudioConverter: AudioConverterRef,
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
private func sampleBufferFromPixelBuffer(pixelBuffer: CVPixelBuffer) -> CMSampleBuffer? {
|
||||
var maybeFormat: CMVideoFormatDescription?
|
||||
let status = CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescriptionOut: &maybeFormat)
|
||||
if status != noErr {
|
||||
return nil
|
||||
}
|
||||
guard let format = maybeFormat else {
|
||||
return nil
|
||||
}
|
||||
|
||||
var timingInfo = CMSampleTimingInfo(
|
||||
duration: CMTimeMake(value: 1, timescale: 30),
|
||||
presentationTimeStamp: CMTimeMake(value: 0, timescale: 30),
|
||||
decodeTimeStamp: CMTimeMake(value: 0, timescale: 30)
|
||||
)
|
||||
|
||||
var maybeSampleBuffer: CMSampleBuffer?
|
||||
let bufferStatus = CMSampleBufferCreateReadyWithImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescription: format, sampleTiming: &timingInfo, sampleBufferOut: &maybeSampleBuffer)
|
||||
|
||||
if (bufferStatus != noErr) {
|
||||
return nil
|
||||
}
|
||||
guard let sampleBuffer = maybeSampleBuffer else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let attachments: NSArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true)! as NSArray
|
||||
let dict: NSMutableDictionary = attachments[0] as! NSMutableDictionary
|
||||
dict[kCMSampleAttachmentKey_DisplayImmediately as NSString] = true as NSNumber
|
||||
|
||||
return sampleBuffer
|
||||
}
|
||||
|
@ -13693,3 +13693,6 @@ Sorry for the inconvenience.";
|
||||
"Gift.Withdraw.Title" = "Manage with Fragment";
|
||||
"Gift.Withdraw.Text" = "You can use Fragment, a third-party service, to transfer **%@** to your TON account. After that, you can manage it as an NFT with any TON wallet outside Telegram.\n\nYou can also move such NFTs back to your Telegram account via Fragment.";
|
||||
"Gift.Withdraw.Proceed" = "Open Fragment";
|
||||
|
||||
"ChatListFilter.NameEnableAnimations" = "Enable Animations";
|
||||
"ChatListFilter.NameDisableAnimations" = "Disable Animations";
|
||||
|
@ -45,6 +45,12 @@ http_file(
|
||||
sha256 = "89a287444b5b3e98f88a945afa50ce937b8ffd1dcc59c555ad9b1baf855298c9",
|
||||
)
|
||||
|
||||
http_file(
|
||||
name = "flatbuffers_zip",
|
||||
urls = ["https://github.com/google/flatbuffers/archive/refs/tags/v24.12.23.zip"],
|
||||
sha256 = "c5cd6a605ff20350c7faa19d8eeb599df6117ea4aabd16ac58a7eb5ba82df4e7",
|
||||
)
|
||||
|
||||
http_archive(
|
||||
name = "appcenter_sdk",
|
||||
urls = ["https://github.com/microsoft/appcenter-sdk-apple/releases/download/4.1.1/AppCenter-SDK-Apple-4.1.1.zip"],
|
||||
|
@ -806,7 +806,8 @@ public protocol TelegramRootControllerInterface: NavigationController {
|
||||
func getPrivacySettings() -> Promise<AccountPrivacySettings?>?
|
||||
func openSettings()
|
||||
func openBirthdaySetup()
|
||||
func openPhotoSetup()
|
||||
func openPhotoSetup(completedWithUploadingImage: @escaping (UIImage, Signal<PeerInfoAvatarUploadStatus, NoError>) -> UIView?)
|
||||
func openAvatars()
|
||||
}
|
||||
|
||||
public protocol QuickReplySetupScreenInitialData: AnyObject {
|
||||
|
@ -953,6 +953,11 @@ public final class PeerInfoNavigationSourceTag {
|
||||
}
|
||||
}
|
||||
|
||||
public enum PeerInfoAvatarUploadStatus {
|
||||
case progress(Float)
|
||||
case done
|
||||
}
|
||||
|
||||
public protocol PeerInfoScreen: ViewController {
|
||||
var peerId: PeerId { get }
|
||||
var privacySettings: Promise<AccountPrivacySettings?> { get }
|
||||
@ -961,7 +966,8 @@ public protocol PeerInfoScreen: ViewController {
|
||||
func toggleStorySelection(ids: [Int32], isSelected: Bool)
|
||||
func togglePaneIsReordering(isReordering: Bool)
|
||||
func cancelItemSelection()
|
||||
func openAvatarSetup()
|
||||
func openAvatarSetup(completedWithUploadingImage: @escaping (UIImage, Signal<PeerInfoAvatarUploadStatus, NoError>) -> UIView?)
|
||||
func openAvatars()
|
||||
}
|
||||
|
||||
public extension Peer {
|
||||
|
@ -112,6 +112,7 @@ swift_library(
|
||||
"//submodules/ChatPresentationInterfaceState",
|
||||
"//submodules/ShimmerEffect:ShimmerEffect",
|
||||
"//submodules/TelegramUI/Components/LottieComponent",
|
||||
"//submodules/TelegramUI/Components/AvatarUploadToastScreen",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -52,6 +52,7 @@ import ArchiveInfoScreen
|
||||
import BirthdayPickerScreen
|
||||
import OldChannelsController
|
||||
import TextFormat
|
||||
import AvatarUploadToastScreen
|
||||
|
||||
private final class ContextControllerContentSourceImpl: ContextControllerContentSource {
|
||||
let controller: ViewController
|
||||
@ -1208,7 +1209,54 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
|
||||
return
|
||||
}
|
||||
if let rootController = self.navigationController as? TelegramRootControllerInterface {
|
||||
rootController.openPhotoSetup()
|
||||
rootController.openPhotoSetup(completedWithUploadingImage: { [weak self] image, uploadStatus in
|
||||
guard let self else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let toastScreen = AvatarUploadToastScreen(
|
||||
context: self.context,
|
||||
image: image,
|
||||
uploadStatus: uploadStatus,
|
||||
arrowTarget: { [weak self] in
|
||||
guard let self else {
|
||||
return nil
|
||||
}
|
||||
guard let tabController = self.parent as? TabBarController else {
|
||||
return nil
|
||||
}
|
||||
guard let settingsController = tabController.controllers.first(where: { $0 is PeerInfoScreen }) as? PeerInfoScreen else {
|
||||
return nil
|
||||
}
|
||||
guard let tabFrame = tabController.frameForControllerTab(controller: settingsController) else {
|
||||
return nil
|
||||
}
|
||||
return (tabController.view, tabFrame)
|
||||
},
|
||||
viewUploadedAvatar: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if let rootController = self.navigationController as? TelegramRootControllerInterface {
|
||||
rootController.openAvatars()
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if let navigationController = self.navigationController as? NavigationController {
|
||||
var viewControllers = navigationController.viewControllers
|
||||
if let index = viewControllers.firstIndex(where: { $0 is TabBarController }) {
|
||||
viewControllers.insert(toastScreen, at: index + 1)
|
||||
} else {
|
||||
viewControllers.append(toastScreen)
|
||||
}
|
||||
navigationController.setViewControllers(viewControllers, animated: true)
|
||||
} else {
|
||||
self.push(toastScreen)
|
||||
}
|
||||
|
||||
return toastScreen.targetAvatarView
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -378,10 +378,9 @@ private enum ChatListFilterPresetEntry: ItemListNodeEntry {
|
||||
case .screenHeader:
|
||||
return ChatListFilterSettingsHeaderItem(context: arguments.context, theme: presentationData.theme, text: "", animation: .newFolder, sectionId: self.section)
|
||||
case let .nameHeader(title, enableAnimations):
|
||||
//TODO:localize
|
||||
var actionText: String?
|
||||
if let enableAnimations {
|
||||
actionText = enableAnimations ? "Disable Animations" : "Enable Animations"
|
||||
actionText = enableAnimations ? presentationData.strings.ChatListFilter_NameDisableAnimations : presentationData.strings.ChatListFilter_NameEnableAnimations
|
||||
}
|
||||
return ItemListSectionHeaderItem(presentationData: presentationData, text: title, actionText: actionText, action: {
|
||||
arguments.toggleNameAnimations()
|
||||
|
@ -3218,7 +3218,7 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
|
||||
var actionButtonTitleNodeLayoutAndApply: (TextNodeLayout, () -> TextNode)?
|
||||
if case .none = badgeContent, case .none = mentionBadgeContent, case let .chat(itemPeer) = contentPeer, case let .user(user) = itemPeer.chatMainPeer, let botInfo = user.botInfo, botInfo.flags.contains(.hasWebApp) {
|
||||
actionButtonTitleNodeLayoutAndApply = makeActionButtonTitleNodeLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.ChatList_InlineButtonOpenApp, font: Font.semibold(15.0), textColor: theme.unreadBadgeActiveTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: rawContentWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
||||
actionButtonTitleNodeLayoutAndApply = makeActionButtonTitleNodeLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.ChatList_InlineButtonOpenApp, font: Font.semibold(floor(item.presentationData.fontSize.itemListBaseFontSize * 15.0 / 17.0)), textColor: theme.unreadBadgeActiveTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: rawContentWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
|
||||
}
|
||||
|
||||
var badgeSize: CGFloat = 0.0
|
||||
@ -4017,8 +4017,13 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
}
|
||||
|
||||
if let (actionButtonTitleNodeLayout, apply) = actionButtonTitleNodeLayoutAndApply {
|
||||
let actionButtonSize = CGSize(width: actionButtonTitleNodeLayout.size.width + 12.0 * 2.0, height: actionButtonTitleNodeLayout.size.height + 5.0 + 4.0)
|
||||
let actionButtonFrame = CGRect(x: nextBadgeX - actionButtonSize.width, y: contentRect.maxY - actionButtonSize.height, width: actionButtonSize.width, height: actionButtonSize.height)
|
||||
let actionButtonSideInset = floor(item.presentationData.fontSize.itemListBaseFontSize * 12.0 / 17.0)
|
||||
let actionButtonTopInset = floor(item.presentationData.fontSize.itemListBaseFontSize * 5.0 / 17.0)
|
||||
let actionButtonBottomInset = floor(item.presentationData.fontSize.itemListBaseFontSize * 4.0 / 17.0)
|
||||
|
||||
let actionButtonSize = CGSize(width: actionButtonTitleNodeLayout.size.width + actionButtonSideInset * 2.0, height: actionButtonTitleNodeLayout.size.height + actionButtonTopInset + actionButtonBottomInset)
|
||||
var actionButtonFrame = CGRect(x: nextBadgeX - actionButtonSize.width, y: contentRect.minY + floor((contentRect.height - actionButtonSize.height) * 0.5), width: actionButtonSize.width, height: actionButtonSize.height)
|
||||
actionButtonFrame.origin.y = max(actionButtonFrame.origin.y, dateFrame.maxY + floor(item.presentationData.fontSize.itemListBaseFontSize * 4.0 / 17.0))
|
||||
|
||||
let actionButtonNode: HighlightableButtonNode
|
||||
if let current = strongSelf.actionButtonNode {
|
||||
@ -4037,10 +4042,10 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
actionButtonBackgroundView = UIImageView()
|
||||
strongSelf.actionButtonBackgroundView = actionButtonBackgroundView
|
||||
actionButtonNode.view.addSubview(actionButtonBackgroundView)
|
||||
|
||||
if actionButtonBackgroundView.image?.size.height != actionButtonSize.height {
|
||||
actionButtonBackgroundView.image = generateStretchableFilledCircleImage(diameter: actionButtonSize.height, color: .white)?.withRenderingMode(.alwaysTemplate)
|
||||
}
|
||||
}
|
||||
|
||||
if actionButtonBackgroundView.image?.size.height != actionButtonSize.height {
|
||||
actionButtonBackgroundView.image = generateStretchableFilledCircleImage(diameter: actionButtonSize.height, color: .white)?.withRenderingMode(.alwaysTemplate)
|
||||
}
|
||||
|
||||
actionButtonBackgroundView.tintColor = theme.unreadBadgeActiveBackgroundColor
|
||||
@ -4054,7 +4059,7 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
|
||||
|
||||
actionButtonNode.frame = actionButtonFrame
|
||||
actionButtonBackgroundView.frame = CGRect(origin: CGPoint(), size: actionButtonFrame.size)
|
||||
actionButtonTitleNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((actionButtonFrame.width - actionButtonTitleNodeLayout.size.width) * 0.5), y: 5.0), size: actionButtonTitleNodeLayout.size)
|
||||
actionButtonTitleNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((actionButtonFrame.width - actionButtonTitleNodeLayout.size.width) * 0.5), y: actionButtonTopInset), size: actionButtonTitleNodeLayout.size)
|
||||
|
||||
nextBadgeX -= actionButtonSize.width + 6.0
|
||||
} else {
|
||||
|
@ -1994,6 +1994,11 @@ public final class ChatListNode: ListView {
|
||||
starsSubscriptionsContextPromise.get()
|
||||
)
|
||||
|> mapToSignal { suggestions, dismissedSuggestions, configuration, newSessionReviews, data, birthdays, starsSubscriptionsContext -> Signal<ChatListNotice?, NoError> in
|
||||
#if DEBUG
|
||||
var suggestions = suggestions
|
||||
suggestions.insert(.setupPhoto, at: 0)
|
||||
#endif
|
||||
|
||||
let (accountPeer, birthday) = data
|
||||
|
||||
if let newSessionReview = newSessionReviews.first {
|
||||
|
@ -218,6 +218,16 @@ open class ViewControllerComponentContainer: ViewController {
|
||||
}
|
||||
self.containerLayoutUpdated(layout: currentLayout.layout, navigationHeight: currentLayout.navigationHeight, transition: transition)
|
||||
}
|
||||
|
||||
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
if let result = super.hitTest(point, with: event) {
|
||||
if result === self.view {
|
||||
return nil
|
||||
}
|
||||
return result
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public var node: Node {
|
||||
|
@ -107,7 +107,8 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
case experimentalCallMute(Bool)
|
||||
case conferenceCalls(Bool)
|
||||
case playerV2(Bool)
|
||||
case benchmarkReflectors
|
||||
case devRequests(Bool)
|
||||
case fakeAds(Bool)
|
||||
case enableLocalTranslation(Bool)
|
||||
case preferredVideoCodec(Int, String, String?, Bool)
|
||||
case disableVideoAspectScaling(Bool)
|
||||
@ -133,7 +134,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
return DebugControllerSection.web.rawValue
|
||||
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
|
||||
return DebugControllerSection.experiments.rawValue
|
||||
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .liveStreamV2, .experimentalCallMute, .conferenceCalls, .playerV2, .benchmarkReflectors, .enableLocalTranslation:
|
||||
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .liveStreamV2, .experimentalCallMute, .conferenceCalls, .playerV2, .devRequests, .fakeAds, .enableLocalTranslation:
|
||||
return DebugControllerSection.experiments.rawValue
|
||||
case .logTranslationRecognition, .resetTranslationStates:
|
||||
return DebugControllerSection.translation.rawValue
|
||||
@ -254,12 +255,14 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
return 53
|
||||
case .playerV2:
|
||||
return 54
|
||||
case .benchmarkReflectors:
|
||||
case .devRequests:
|
||||
return 55
|
||||
case .enableLocalTranslation:
|
||||
case .fakeAds:
|
||||
return 56
|
||||
case .enableLocalTranslation:
|
||||
return 57
|
||||
case let .preferredVideoCodec(index, _, _, _):
|
||||
return 57 + index
|
||||
return 58 + index
|
||||
case .disableVideoAspectScaling:
|
||||
return 100
|
||||
case .enableNetworkFramework:
|
||||
@ -1368,60 +1371,25 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
})
|
||||
}).start()
|
||||
})
|
||||
case .benchmarkReflectors:
|
||||
return ItemListActionItem(presentationData: presentationData, title: "Benchmark Reflectors", kind: .generic, alignment: .natural, sectionId: self.section, style: .blocks, action: {
|
||||
guard let context = arguments.context else {
|
||||
return
|
||||
}
|
||||
|
||||
var signal: Signal<ReflectorBenchmark.Results, NoError> = Signal { subscriber in
|
||||
var reflectorBenchmark: ReflectorBenchmark? = ReflectorBenchmark(address: "91.108.13.35", port: 599)
|
||||
reflectorBenchmark?.start(completion: { results in
|
||||
subscriber.putNext(results)
|
||||
subscriber.putCompletion()
|
||||
case let .devRequests(value):
|
||||
return ItemListSwitchItem(presentationData: presentationData, title: "PlayerV2", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
||||
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
|
||||
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
|
||||
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
|
||||
settings.devRequests = value
|
||||
return PreferencesEntry(settings)
|
||||
})
|
||||
|
||||
return ActionDisposable {
|
||||
reflectorBenchmark = nil
|
||||
}
|
||||
}
|
||||
|> runOn(.mainQueue())
|
||||
|
||||
var cancelImpl: (() -> Void)?
|
||||
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||
let progressSignal = Signal<Never, NoError> { subscriber in
|
||||
let controller = OverlayStatusController(theme: presentationData.theme, type: .loading(cancelled: {
|
||||
cancelImpl?()
|
||||
}))
|
||||
arguments.presentController(controller, nil)
|
||||
return ActionDisposable { [weak controller] in
|
||||
Queue.mainQueue().async() {
|
||||
controller?.dismiss()
|
||||
}
|
||||
}
|
||||
}
|
||||
|> runOn(Queue.mainQueue())
|
||||
|> delay(0.15, queue: Queue.mainQueue())
|
||||
let progressDisposable = progressSignal.start()
|
||||
|
||||
let reindexDisposable = MetaDisposable()
|
||||
|
||||
signal = signal
|
||||
|> afterDisposed {
|
||||
Queue.mainQueue().async {
|
||||
progressDisposable.dispose()
|
||||
}
|
||||
}
|
||||
cancelImpl = {
|
||||
reindexDisposable.set(nil)
|
||||
}
|
||||
reindexDisposable.set((signal
|
||||
|> deliverOnMainQueue).start(next: { results in
|
||||
if let context = arguments.context {
|
||||
let controller = textAlertController(context: context, title: nil, text: "Bandwidth: \(results.bandwidthBytesPerSecond * 8 / 1024) kbit/s (expected \(results.expectedBandwidthBytesPerSecond * 8 / 1024) kbit/s)\nAvg latency: \(Int(results.averageDelay * 1000.0)) ms", actions: [TextAlertAction(type: .genericAction, title: "OK", action: {})])
|
||||
arguments.presentController(controller, nil)
|
||||
}
|
||||
}))
|
||||
}).start()
|
||||
})
|
||||
case let .fakeAds(value):
|
||||
return ItemListSwitchItem(presentationData: presentationData, title: "Fake Ads", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
||||
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
|
||||
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
|
||||
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
|
||||
settings.fakeAds = value
|
||||
return PreferencesEntry(settings)
|
||||
})
|
||||
}).start()
|
||||
})
|
||||
case let .enableLocalTranslation(value):
|
||||
return ItemListSwitchItem(presentationData: presentationData, title: "Local Translation", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
||||
@ -1593,21 +1561,10 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
|
||||
entries.append(.conferenceCalls(experimentalSettings.conferenceCalls))
|
||||
entries.append(.playerV2(experimentalSettings.playerV2))
|
||||
|
||||
entries.append(.benchmarkReflectors)
|
||||
entries.append(.devRequests(experimentalSettings.devRequests))
|
||||
entries.append(.fakeAds(experimentalSettings.fakeAds))
|
||||
entries.append(.enableLocalTranslation(experimentalSettings.enableLocalTranslation))
|
||||
}
|
||||
|
||||
/*let codecs: [(String, String?)] = [
|
||||
("No Preference", nil),
|
||||
("H265", "H265"),
|
||||
("H264", "H264"),
|
||||
("VP8", "VP8"),
|
||||
("VP9", "VP9")
|
||||
]
|
||||
|
||||
for i in 0 ..< codecs.count {
|
||||
entries.append(.preferredVideoCodec(i, codecs[i].0, codecs[i].1, experimentalSettings.preferredVideoCodec == codecs[i].1))
|
||||
}*/
|
||||
|
||||
if isMainApp {
|
||||
entries.append(.disableVideoAspectScaling(experimentalSettings.disableVideoAspectScaling))
|
||||
|
@ -535,6 +535,9 @@ final class NavigationModalContainer: ASDisplayNode, ASScrollViewDelegate, ASGes
|
||||
return self.dim.view
|
||||
}
|
||||
if self.isFlat {
|
||||
if result === self.container.view {
|
||||
return nil
|
||||
}
|
||||
return result
|
||||
}
|
||||
var currentParent: UIView? = result
|
||||
|
@ -36,6 +36,40 @@
|
||||
return [[FFMpegAVCodec alloc] initWithImpl:codec];
|
||||
}
|
||||
}
|
||||
} else if (preferHardwareAccelerationCapable && codecId == AV_CODEC_ID_H264) {
|
||||
void *codecIterationState = nil;
|
||||
while (true) {
|
||||
AVCodec const *codec = av_codec_iterate(&codecIterationState);
|
||||
if (!codec) {
|
||||
break;
|
||||
}
|
||||
if (!av_codec_is_decoder(codec)) {
|
||||
continue;
|
||||
}
|
||||
if (codec->id != codecId) {
|
||||
continue;
|
||||
}
|
||||
if (strncmp(codec->name, "h264", 2) == 0) {
|
||||
return [[FFMpegAVCodec alloc] initWithImpl:codec];
|
||||
}
|
||||
}
|
||||
} else if (preferHardwareAccelerationCapable && codecId == AV_CODEC_ID_HEVC) {
|
||||
void *codecIterationState = nil;
|
||||
while (true) {
|
||||
AVCodec const *codec = av_codec_iterate(&codecIterationState);
|
||||
if (!codec) {
|
||||
break;
|
||||
}
|
||||
if (!av_codec_is_decoder(codec)) {
|
||||
continue;
|
||||
}
|
||||
if (codec->id != codecId) {
|
||||
continue;
|
||||
}
|
||||
if (strncmp(codec->name, "hevc", 2) == 0) {
|
||||
return [[FFMpegAVCodec alloc] initWithImpl:codec];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AVCodec const *codec = avcodec_find_decoder(codecId);
|
||||
|
@ -1738,8 +1738,25 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
} else if let _ = item.content as? PlatformVideoContent {
|
||||
disablePlayerControls = true
|
||||
forceEnablePiP = true
|
||||
} else if let _ = item.content as? HLSVideoContent {
|
||||
} else if let content = item.content as? HLSVideoContent {
|
||||
isAdaptive = true
|
||||
|
||||
if let qualitySet = HLSQualitySet(baseFile: content.fileReference, codecConfiguration: HLSCodecConfiguration(isHardwareAv1Supported: false, isSoftwareAv1Supported: true)), let (quality, playlistFile) = qualitySet.playlistFiles.sorted(by: { $0.key < $1.key }).first, let dataFile = qualitySet.qualityFiles[quality] {
|
||||
var alternativeQualities: [(playlist: FileMediaReference, dataFile: FileMediaReference)] = []
|
||||
for (otherQuality, otherPlaylistFile) in qualitySet.playlistFiles {
|
||||
if otherQuality != quality, let otherDataFile = qualitySet.qualityFiles[otherQuality] {
|
||||
alternativeQualities.append((otherPlaylistFile, dataFile: otherDataFile))
|
||||
}
|
||||
}
|
||||
self.videoFramePreview = MediaPlayerFramePreviewHLS(
|
||||
postbox: item.context.account.postbox,
|
||||
userLocation: content.userLocation,
|
||||
userContentType: .video,
|
||||
playlistFile: playlistFile,
|
||||
mainDataFile: dataFile,
|
||||
alternativeQualities: alternativeQualities
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let _ = isAdaptive
|
||||
|
@ -423,7 +423,12 @@ public final class LocationMapNode: ASDisplayNode, MKMapViewDelegateTarget {
|
||||
self.mapView?.setRegion(region, animated: animated)
|
||||
} else {
|
||||
let mapRect = MKMapRect(region: region)
|
||||
self.mapView?.setVisibleMapRect(mapRect, edgePadding: UIEdgeInsets(top: offset.y + self.topPadding, left: offset.x, bottom: 0.0, right: 0.0), animated: animated)
|
||||
var effectiveTopOffset: CGFloat = offset.y
|
||||
if #available(iOS 18.0, *) {
|
||||
} else {
|
||||
effectiveTopOffset += self.topPadding
|
||||
}
|
||||
self.mapView?.setVisibleMapRect(mapRect, edgePadding: UIEdgeInsets(top: effectiveTopOffset, left: offset.x, bottom: 0.0, right: 0.0), animated: animated)
|
||||
}
|
||||
self.ignoreRegionChanges = false
|
||||
|
||||
|
@ -651,6 +651,7 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att
|
||||
self.gridNode.scrollView.addSubview(cameraView)
|
||||
self.gridNode.addSubnode(self.cameraActivateAreaNode)
|
||||
} else if useModernCamera, !Camera.isIpad {
|
||||
#if !targetEnvironment(simulator)
|
||||
var cameraPosition: Camera.Position = .back
|
||||
if case .assets(nil, .createAvatar) = controller.subject {
|
||||
cameraPosition = .front
|
||||
@ -703,6 +704,7 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att
|
||||
} else {
|
||||
setupCamera()
|
||||
}
|
||||
#endif
|
||||
} else {
|
||||
self.containerNode.clipsToBounds = true
|
||||
}
|
||||
|
@ -45,6 +45,7 @@ private func FFMpegLookaheadReader_readPacketCallback(userData: UnsafeMutableRaw
|
||||
memcpy(buffer, bytes, fetchedData.count)
|
||||
}
|
||||
let fetchedCount = Int32(fetchedData.count)
|
||||
//print("Fetched from \(context.readingOffset) (\(fetchedCount) bytes)")
|
||||
context.setReadingOffset(offset: context.readingOffset + Int64(fetchedCount))
|
||||
if fetchedCount == 0 {
|
||||
return FFMPEG_CONSTANT_AVERROR_EOF
|
||||
@ -79,12 +80,12 @@ private final class FFMpegLookaheadReader {
|
||||
var audioStream: FFMpegFileReader.StreamInfo?
|
||||
var videoStream: FFMpegFileReader.StreamInfo?
|
||||
|
||||
var seekInfo: FFMpegLookaheadThread.State.Seek?
|
||||
var maxReadPts: FFMpegLookaheadThread.State.Seek?
|
||||
var audioStreamState: FFMpegLookaheadThread.StreamState?
|
||||
var videoStreamState: FFMpegLookaheadThread.StreamState?
|
||||
var seekInfo: FFMpegLookahead.State.Seek?
|
||||
var maxReadPts: FFMpegLookahead.State.Seek?
|
||||
var audioStreamState: FFMpegLookahead.StreamState?
|
||||
var videoStreamState: FFMpegLookahead.StreamState?
|
||||
|
||||
var reportedState: FFMpegLookaheadThread.State?
|
||||
var reportedState: FFMpegLookahead.State?
|
||||
|
||||
var readingOffset: Int64 = 0
|
||||
var isCancelled: Bool = false
|
||||
@ -108,6 +109,8 @@ private final class FFMpegLookaheadReader {
|
||||
let avFormatContext = FFMpegAVFormatContext()
|
||||
avFormatContext.setIO(avIoContext)
|
||||
|
||||
self.setReadingOffset(offset: 0)
|
||||
|
||||
if !avFormatContext.openInput(withDirectFilePath: nil) {
|
||||
return nil
|
||||
}
|
||||
@ -170,7 +173,7 @@ private final class FFMpegLookaheadReader {
|
||||
|
||||
if let preferredStream = self.videoStream ?? self.audioStream {
|
||||
let pts = CMTimeMakeWithSeconds(params.seekToTimestamp, preferredTimescale: preferredStream.timeScale)
|
||||
self.seekInfo = FFMpegLookaheadThread.State.Seek(streamIndex: preferredStream.index, pts: pts.value)
|
||||
self.seekInfo = FFMpegLookahead.State.Seek(streamIndex: preferredStream.index, pts: pts.value)
|
||||
avFormatContext.seekFrame(forStreamIndex: Int32(preferredStream.index), pts: pts.value, positionOnKeyframe: true)
|
||||
}
|
||||
|
||||
@ -223,7 +226,7 @@ private final class FFMpegLookaheadReader {
|
||||
return
|
||||
}
|
||||
|
||||
let maxPtsSeconds = max(self.params.seekToTimestamp, currentTimestamp) + 10.0
|
||||
let maxPtsSeconds = max(self.params.seekToTimestamp, currentTimestamp) + self.params.lookaheadDuration
|
||||
|
||||
var currentAudioPtsSecondsAdvanced: Double = 0.0
|
||||
var currentVideoPtsSecondsAdvanced: Double = 0.0
|
||||
@ -258,14 +261,14 @@ private final class FFMpegLookaheadReader {
|
||||
break
|
||||
}
|
||||
|
||||
self.maxReadPts = FFMpegLookaheadThread.State.Seek(streamIndex: Int(packet.streamIndex), pts: packet.pts)
|
||||
self.maxReadPts = FFMpegLookahead.State.Seek(streamIndex: Int(packet.streamIndex), pts: packet.pts)
|
||||
|
||||
if let audioStream = self.audioStream, Int(packet.streamIndex) == audioStream.index {
|
||||
let pts = CMTimeMake(value: packet.pts, timescale: audioStream.timeScale)
|
||||
if let audioStreamState = self.audioStreamState {
|
||||
currentAudioPtsSecondsAdvanced += pts.seconds - audioStreamState.readableToTime.seconds
|
||||
}
|
||||
self.audioStreamState = FFMpegLookaheadThread.StreamState(
|
||||
self.audioStreamState = FFMpegLookahead.StreamState(
|
||||
info: audioStream,
|
||||
readableToTime: pts
|
||||
)
|
||||
@ -274,7 +277,7 @@ private final class FFMpegLookaheadReader {
|
||||
if let videoStreamState = self.videoStreamState {
|
||||
currentVideoPtsSecondsAdvanced += pts.seconds - videoStreamState.readableToTime.seconds
|
||||
}
|
||||
self.videoStreamState = FFMpegLookaheadThread.StreamState(
|
||||
self.videoStreamState = FFMpegLookahead.StreamState(
|
||||
info: videoStream,
|
||||
readableToTime: pts
|
||||
)
|
||||
@ -300,7 +303,7 @@ private final class FFMpegLookaheadReader {
|
||||
stateIsFullyInitialised = false
|
||||
}
|
||||
|
||||
let state = FFMpegLookaheadThread.State(
|
||||
let state = FFMpegLookahead.State(
|
||||
seek: seekInfo,
|
||||
maxReadablePts: self.maxReadPts,
|
||||
audio: (stateIsFullyInitialised && self.maxReadPts != nil) ? self.audioStreamState : nil,
|
||||
@ -315,45 +318,10 @@ private final class FFMpegLookaheadReader {
|
||||
}
|
||||
|
||||
private final class FFMpegLookaheadThread: NSObject {
|
||||
struct StreamState: Equatable {
|
||||
let info: FFMpegFileReader.StreamInfo
|
||||
let readableToTime: CMTime
|
||||
|
||||
init(info: FFMpegFileReader.StreamInfo, readableToTime: CMTime) {
|
||||
self.info = info
|
||||
self.readableToTime = readableToTime
|
||||
}
|
||||
}
|
||||
|
||||
struct State: Equatable {
|
||||
struct Seek: Equatable {
|
||||
var streamIndex: Int
|
||||
var pts: Int64
|
||||
|
||||
init(streamIndex: Int, pts: Int64) {
|
||||
self.streamIndex = streamIndex
|
||||
self.pts = pts
|
||||
}
|
||||
}
|
||||
|
||||
let seek: Seek
|
||||
let maxReadablePts: Seek?
|
||||
let audio: StreamState?
|
||||
let video: StreamState?
|
||||
let isEnded: Bool
|
||||
|
||||
init(seek: Seek, maxReadablePts: Seek?, audio: StreamState?, video: StreamState?, isEnded: Bool) {
|
||||
self.seek = seek
|
||||
self.maxReadablePts = maxReadablePts
|
||||
self.audio = audio
|
||||
self.video = video
|
||||
self.isEnded = isEnded
|
||||
}
|
||||
}
|
||||
|
||||
final class Params: NSObject {
|
||||
let seekToTimestamp: Double
|
||||
let updateState: (State) -> Void
|
||||
let lookaheadDuration: Double
|
||||
let updateState: (FFMpegLookahead.State) -> Void
|
||||
let fetchInRange: (Range<Int64>) -> Disposable
|
||||
let getDataInRange: (Range<Int64>, @escaping (Data?) -> Void) -> Disposable
|
||||
let isDataCachedInRange: (Range<Int64>) -> Bool
|
||||
@ -363,7 +331,8 @@ private final class FFMpegLookaheadThread: NSObject {
|
||||
|
||||
init(
|
||||
seekToTimestamp: Double,
|
||||
updateState: @escaping (State) -> Void,
|
||||
lookaheadDuration: Double,
|
||||
updateState: @escaping (FFMpegLookahead.State) -> Void,
|
||||
fetchInRange: @escaping (Range<Int64>) -> Disposable,
|
||||
getDataInRange: @escaping (Range<Int64>, @escaping (Data?) -> Void) -> Disposable,
|
||||
isDataCachedInRange: @escaping (Range<Int64>) -> Bool,
|
||||
@ -372,6 +341,7 @@ private final class FFMpegLookaheadThread: NSObject {
|
||||
currentTimestamp: Atomic<Double?>
|
||||
) {
|
||||
self.seekToTimestamp = seekToTimestamp
|
||||
self.lookaheadDuration = lookaheadDuration
|
||||
self.updateState = updateState
|
||||
self.fetchInRange = fetchInRange
|
||||
self.getDataInRange = getDataInRange
|
||||
@ -414,14 +384,51 @@ private final class FFMpegLookaheadThread: NSObject {
|
||||
}
|
||||
}
|
||||
|
||||
private final class FFMpegLookahead {
|
||||
final class FFMpegLookahead {
|
||||
struct StreamState: Equatable {
|
||||
let info: FFMpegFileReader.StreamInfo
|
||||
let readableToTime: CMTime
|
||||
|
||||
init(info: FFMpegFileReader.StreamInfo, readableToTime: CMTime) {
|
||||
self.info = info
|
||||
self.readableToTime = readableToTime
|
||||
}
|
||||
}
|
||||
|
||||
struct State: Equatable {
|
||||
struct Seek: Equatable {
|
||||
var streamIndex: Int
|
||||
var pts: Int64
|
||||
|
||||
init(streamIndex: Int, pts: Int64) {
|
||||
self.streamIndex = streamIndex
|
||||
self.pts = pts
|
||||
}
|
||||
}
|
||||
|
||||
let seek: Seek
|
||||
let maxReadablePts: Seek?
|
||||
let audio: StreamState?
|
||||
let video: StreamState?
|
||||
let isEnded: Bool
|
||||
|
||||
init(seek: Seek, maxReadablePts: Seek?, audio: StreamState?, video: StreamState?, isEnded: Bool) {
|
||||
self.seek = seek
|
||||
self.maxReadablePts = maxReadablePts
|
||||
self.audio = audio
|
||||
self.video = video
|
||||
self.isEnded = isEnded
|
||||
}
|
||||
}
|
||||
|
||||
private let cancel = Promise<Void>()
|
||||
private let currentTimestamp = Atomic<Double?>(value: nil)
|
||||
private let thread: Thread
|
||||
|
||||
init(
|
||||
seekToTimestamp: Double,
|
||||
updateState: @escaping (FFMpegLookaheadThread.State) -> Void,
|
||||
lookaheadDuration: Double,
|
||||
updateState: @escaping (FFMpegLookahead.State) -> Void,
|
||||
fetchInRange: @escaping (Range<Int64>) -> Disposable,
|
||||
getDataInRange: @escaping (Range<Int64>, @escaping (Data?) -> Void) -> Disposable,
|
||||
isDataCachedInRange: @escaping (Range<Int64>) -> Bool,
|
||||
@ -432,6 +439,7 @@ private final class FFMpegLookahead {
|
||||
selector: #selector(FFMpegLookaheadThread.entryPoint(_:)),
|
||||
object: FFMpegLookaheadThread.Params(
|
||||
seekToTimestamp: seekToTimestamp,
|
||||
lookaheadDuration: lookaheadDuration,
|
||||
updateState: updateState,
|
||||
fetchInRange: fetchInRange,
|
||||
getDataInRange: getDataInRange,
|
||||
@ -496,7 +504,7 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
|
||||
let lookaheadId = self.currentLookaheadId
|
||||
|
||||
let resource = self.resource
|
||||
let updateState: (FFMpegLookaheadThread.State) -> Void = { [weak self] state in
|
||||
let updateState: (FFMpegLookahead.State) -> Void = { [weak self] state in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
@ -580,6 +588,7 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
|
||||
|
||||
self.lookahead = FFMpegLookahead(
|
||||
seekToTimestamp: position,
|
||||
lookaheadDuration: 10.0,
|
||||
updateState: updateState,
|
||||
fetchInRange: { range in
|
||||
return fetchedMediaResource(
|
||||
|
@ -201,6 +201,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
private var baseRate: Double = 1.0
|
||||
private var isSoundEnabled: Bool
|
||||
private var isMuted: Bool
|
||||
private var isAmbientMode: Bool
|
||||
|
||||
private var seekId: Int = 0
|
||||
private var seekTimestamp: Double = 0.0
|
||||
@ -251,6 +252,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
|
||||
self.isSoundEnabled = enableSound
|
||||
self.isMuted = soundMuted
|
||||
self.isAmbientMode = ambient
|
||||
self.baseRate = baseRate
|
||||
|
||||
self.renderSynchronizer = AVSampleBufferRenderSynchronizer()
|
||||
@ -317,7 +319,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
if self.isSoundEnabled && self.hasSound {
|
||||
if self.audioSessionDisposable == nil {
|
||||
self.audioSessionDisposable = self.audioSessionManager.push(params: ManagedAudioSessionClientParams(
|
||||
audioSessionType: .play(mixWithOthers: false),
|
||||
audioSessionType: self.isAmbientMode ? .ambient : .play(mixWithOthers: false),
|
||||
activateImmediately: false,
|
||||
manualActivate: { [weak self] control in
|
||||
control.setupAndActivate(synchronous: false, { state in
|
||||
@ -775,6 +777,22 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
}
|
||||
|
||||
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
if self.isAmbientMode != isAmbient {
|
||||
self.isAmbientMode = isAmbient
|
||||
|
||||
self.hasAudioSession = false
|
||||
self.updateInternalState()
|
||||
self.audioSessionDisposable?.dispose()
|
||||
self.audioSessionDisposable = nil
|
||||
|
||||
let currentTimestamp: CMTime
|
||||
if let pendingSeekTimestamp = self.pendingSeekTimestamp {
|
||||
currentTimestamp = CMTimeMakeWithSeconds(pendingSeekTimestamp, preferredTimescale: 44000)
|
||||
} else {
|
||||
currentTimestamp = self.renderSynchronizer.currentTime()
|
||||
}
|
||||
self.seek(timestamp: currentTimestamp.seconds, play: nil)
|
||||
}
|
||||
}
|
||||
|
||||
public func continuePlayingWithoutSound(seek: MediaPlayerSeek) {
|
||||
@ -877,6 +895,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
self.loadedPartsMediaData.with { [weak self] loadedPartsMediaData in
|
||||
loadedPartsMediaData.parts.removeAll()
|
||||
loadedPartsMediaData.seekFromMinTimestamp = timestamp
|
||||
loadedPartsMediaData.directMediaData = nil
|
||||
loadedPartsMediaData.directReaderId = nil
|
||||
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
@ -1050,6 +1070,9 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
continue outer
|
||||
}
|
||||
}
|
||||
/*if isVideo {
|
||||
print("Enqueue video \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value)")
|
||||
}*/
|
||||
/*if !isVideo {
|
||||
print("Enqueue audio \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value) next: \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value + 1024)")
|
||||
}*/
|
||||
|
@ -26,16 +26,41 @@ private func FFMpegFileReader_readPacketCallback(userData: UnsafeMutableRawPoint
|
||||
return Int32(result)
|
||||
case let .resource(resource):
|
||||
let readCount = min(256 * 1024, Int64(bufferSize))
|
||||
let requestRange: Range<Int64> = resource.readingPosition ..< (resource.readingPosition + readCount)
|
||||
|
||||
//TODO:improve thread safe read if incomplete
|
||||
if let (file, readSize) = resource.mediaBox.internal_resourceData(id: resource.resource.id, size: resource.size, in: requestRange) {
|
||||
let result = file.read(buffer, readSize)
|
||||
if result == 0 {
|
||||
return FFMPEG_CONSTANT_AVERROR_EOF
|
||||
var bufferOffset = 0
|
||||
let doRead: (Range<Int64>) -> Void = { range in
|
||||
//TODO:improve thread safe read if incomplete
|
||||
if let (file, readSize) = resource.mediaBox.internal_resourceData(id: resource.resource.id, size: resource.resourceSize, in: range) {
|
||||
let effectiveReadSize = max(0, min(Int(readCount) - bufferOffset, readSize))
|
||||
let count = file.read(buffer.advanced(by: bufferOffset), effectiveReadSize)
|
||||
bufferOffset += count
|
||||
resource.readingPosition += Int64(count)
|
||||
}
|
||||
resource.readingPosition += Int64(result)
|
||||
return Int32(result)
|
||||
}
|
||||
|
||||
var mappedRangePosition: Int64 = 0
|
||||
for mappedRange in resource.mappedRanges {
|
||||
let bytesToRead = readCount - Int64(bufferOffset)
|
||||
if bytesToRead <= 0 {
|
||||
break
|
||||
}
|
||||
|
||||
let mappedRangeSize = mappedRange.upperBound - mappedRange.lowerBound
|
||||
let mappedRangeReadingPosition = resource.readingPosition - mappedRangePosition
|
||||
|
||||
if mappedRangeReadingPosition >= 0 && mappedRangeReadingPosition < mappedRangeSize {
|
||||
let mappedRangeAvailableBytesToRead = mappedRangeSize - mappedRangeReadingPosition
|
||||
let mappedRangeBytesToRead = min(bytesToRead, mappedRangeAvailableBytesToRead)
|
||||
if mappedRangeBytesToRead > 0 {
|
||||
let mappedReadRange = (mappedRange.lowerBound + mappedRangeReadingPosition) ..< (mappedRange.lowerBound + mappedRangeReadingPosition + mappedRangeBytesToRead)
|
||||
doRead(mappedReadRange)
|
||||
}
|
||||
}
|
||||
|
||||
mappedRangePosition += mappedRangeSize
|
||||
}
|
||||
if bufferOffset != 0 {
|
||||
return Int32(bufferOffset)
|
||||
} else {
|
||||
return FFMPEG_CONSTANT_AVERROR_EOF
|
||||
}
|
||||
@ -65,7 +90,7 @@ private func FFMpegFileReader_seekCallback(userData: UnsafeMutableRawPointer?, o
|
||||
final class FFMpegFileReader {
|
||||
enum SourceDescription {
|
||||
case file(String)
|
||||
case resource(mediaBox: MediaBox, resource: MediaResource, size: Int64)
|
||||
case resource(mediaBox: MediaBox, resource: MediaResource, resourceSize: Int64, mappedRanges: [Range<Int64>])
|
||||
}
|
||||
|
||||
final class StreamInfo: Equatable {
|
||||
@ -117,12 +142,21 @@ final class FFMpegFileReader {
|
||||
final class Resource {
|
||||
let mediaBox: MediaBox
|
||||
let resource: MediaResource
|
||||
let resourceSize: Int64
|
||||
let mappedRanges: [Range<Int64>]
|
||||
let size: Int64
|
||||
var readingPosition: Int64 = 0
|
||||
|
||||
init(mediaBox: MediaBox, resource: MediaResource, size: Int64) {
|
||||
init(mediaBox: MediaBox, resource: MediaResource, resourceSize: Int64, mappedRanges: [Range<Int64>]) {
|
||||
self.mediaBox = mediaBox
|
||||
self.resource = resource
|
||||
self.resourceSize = resourceSize
|
||||
self.mappedRanges = mappedRanges
|
||||
|
||||
var size: Int64 = 0
|
||||
for range in mappedRanges {
|
||||
size += range.upperBound - range.lowerBound
|
||||
}
|
||||
self.size = size
|
||||
}
|
||||
}
|
||||
@ -179,6 +213,11 @@ final class FFMpegFileReader {
|
||||
case index(Int)
|
||||
}
|
||||
|
||||
enum Seek {
|
||||
case stream(streamIndex: Int, pts: Int64)
|
||||
case direct(position: Double)
|
||||
}
|
||||
|
||||
enum ReadFrameResult {
|
||||
case frame(MediaTrackFrame)
|
||||
case waitingForMoreData
|
||||
@ -200,7 +239,7 @@ final class FFMpegFileReader {
|
||||
private var lastReadPts: (streamIndex: Int, pts: Int64)?
|
||||
private var isWaitingForMoreData: Bool = false
|
||||
|
||||
public init?(source: SourceDescription, passthroughDecoder: Bool = false, useHardwareAcceleration: Bool, selectedStream: SelectedStream, seek: (streamIndex: Int, pts: Int64)?, maxReadablePts: (streamIndex: Int, pts: Int64, isEnded: Bool)?) {
|
||||
public init?(source: SourceDescription, passthroughDecoder: Bool = false, useHardwareAcceleration: Bool, selectedStream: SelectedStream, seek: Seek?, maxReadablePts: (streamIndex: Int, pts: Int64, isEnded: Bool)?) {
|
||||
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
|
||||
|
||||
switch source {
|
||||
@ -209,8 +248,8 @@ final class FFMpegFileReader {
|
||||
return nil
|
||||
}
|
||||
self.source = .file(file)
|
||||
case let .resource(mediaBox, resource, size):
|
||||
self.source = .resource(Source.Resource(mediaBox: mediaBox, resource: resource, size: size))
|
||||
case let .resource(mediaBox, resource, resourceSize, mappedRanges):
|
||||
self.source = .resource(Source.Resource(mediaBox: mediaBox, resource: resource, resourceSize: resourceSize, mappedRanges: mappedRanges))
|
||||
}
|
||||
|
||||
self.maxReadablePts = maxReadablePts
|
||||
@ -350,7 +389,12 @@ final class FFMpegFileReader {
|
||||
self.stream = stream
|
||||
|
||||
if let seek {
|
||||
avFormatContext.seekFrame(forStreamIndex: Int32(seek.streamIndex), pts: seek.pts, positionOnKeyframe: true)
|
||||
switch seek {
|
||||
case let .stream(streamIndex, pts):
|
||||
avFormatContext.seekFrame(forStreamIndex: Int32(streamIndex), pts: pts, positionOnKeyframe: true)
|
||||
case let .direct(position):
|
||||
avFormatContext.seekFrame(forStreamIndex: Int32(stream.info.index), pts: CMTimeMakeWithSeconds(Float64(position), preferredTimescale: stream.info.timeScale).value, positionOnKeyframe: true)
|
||||
}
|
||||
} else {
|
||||
avFormatContext.seekFrame(forStreamIndex: Int32(stream.info.index), pts: 0, positionOnKeyframe: true)
|
||||
}
|
||||
|
@ -135,14 +135,27 @@ public final class FFMpegMediaDataReaderV2: MediaDataReader {
|
||||
self.isVideo = isVideo
|
||||
|
||||
let source: FFMpegFileReader.SourceDescription
|
||||
var seek: (streamIndex: Int, pts: Int64)?
|
||||
var seek: FFMpegFileReader.Seek?
|
||||
var maxReadablePts: (streamIndex: Int, pts: Int64, isEnded: Bool)?
|
||||
switch content {
|
||||
case let .tempFile(tempFile):
|
||||
source = .file(tempFile.file.path)
|
||||
case let .directStream(directStream):
|
||||
source = .resource(mediaBox: directStream.mediaBox, resource: directStream.resource, size: directStream.size)
|
||||
seek = (directStream.seek.streamIndex, directStream.seek.pts)
|
||||
let mappedRanges: [Range<Int64>]
|
||||
#if DEBUG && false
|
||||
var mappedRangesValue: [Range<Int64>] = []
|
||||
var testOffset: Int64 = 0
|
||||
while testOffset < directStream.size {
|
||||
let testBlock: Int64 = min(3 * 1024 + 1, directStream.size - testOffset)
|
||||
mappedRangesValue.append(testOffset ..< (testOffset + testBlock))
|
||||
testOffset += testBlock
|
||||
}
|
||||
mappedRanges = mappedRangesValue
|
||||
#else
|
||||
mappedRanges = [0 ..< directStream.size]
|
||||
#endif
|
||||
source = .resource(mediaBox: directStream.mediaBox, resource: directStream.resource, resourceSize: directStream.size, mappedRanges: mappedRanges)
|
||||
seek = .stream(streamIndex: directStream.seek.streamIndex, pts: directStream.seek.pts)
|
||||
maxReadablePts = directStream.maxReadablePts
|
||||
}
|
||||
|
||||
@ -150,6 +163,10 @@ public final class FFMpegMediaDataReaderV2: MediaDataReader {
|
||||
var passthroughDecoder = true
|
||||
var useHardwareAcceleration = false
|
||||
|
||||
if (codecName == "h264" || codecName == "hevc") {
|
||||
passthroughDecoder = false
|
||||
useHardwareAcceleration = true
|
||||
}
|
||||
if (codecName == "av1" || codecName == "av01") {
|
||||
passthroughDecoder = false
|
||||
useHardwareAcceleration = internal_isHardwareAv1Supported
|
||||
|
@ -4,6 +4,7 @@ import SwiftSignalKit
|
||||
import Postbox
|
||||
import TelegramCore
|
||||
import FFMpegBinding
|
||||
import VideoToolbox
|
||||
|
||||
public enum FramePreviewResult {
|
||||
case image(UIImage)
|
||||
@ -151,3 +152,534 @@ public final class MediaPlayerFramePreview: FramePreview {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class MediaPlayerFramePreviewHLS: FramePreview {
|
||||
private final class Impl {
|
||||
private struct Part {
|
||||
var timestamp: Int
|
||||
var duration: Int
|
||||
var range: Range<Int>
|
||||
|
||||
init(timestamp: Int, duration: Int, range: Range<Int>) {
|
||||
self.timestamp = timestamp
|
||||
self.duration = duration
|
||||
self.range = range
|
||||
}
|
||||
}
|
||||
|
||||
private final class Playlist {
|
||||
let dataFile: FileMediaReference
|
||||
let initializationPart: Part
|
||||
let parts: [Part]
|
||||
|
||||
init(dataFile: FileMediaReference, initializationPart: Part, parts: [Part]) {
|
||||
self.dataFile = dataFile
|
||||
self.initializationPart = initializationPart
|
||||
self.parts = parts
|
||||
}
|
||||
}
|
||||
|
||||
let queue: Queue
|
||||
let postbox: Postbox
|
||||
let userLocation: MediaResourceUserLocation
|
||||
let userContentType: MediaResourceUserContentType
|
||||
let playlistFile: FileMediaReference
|
||||
let mainDataFile: FileMediaReference
|
||||
let alternativeQualities: [(playlist: FileMediaReference, dataFile: FileMediaReference)]
|
||||
|
||||
private var playlist: Playlist?
|
||||
private var alternativePlaylists: [Playlist] = []
|
||||
private var fetchPlaylistDisposable: Disposable?
|
||||
private var playlistDisposable: Disposable?
|
||||
|
||||
private var pendingFrame: (Int, FFMpegLookahead)?
|
||||
private let nextRequestedFrame: Atomic<Double?>
|
||||
|
||||
let framePipe = ValuePipe<FramePreviewResult>()
|
||||
|
||||
init(
|
||||
queue: Queue,
|
||||
postbox: Postbox,
|
||||
userLocation: MediaResourceUserLocation,
|
||||
userContentType: MediaResourceUserContentType,
|
||||
playlistFile: FileMediaReference,
|
||||
mainDataFile: FileMediaReference,
|
||||
alternativeQualities: [(playlist: FileMediaReference, dataFile: FileMediaReference)],
|
||||
nextRequestedFrame: Atomic<Double?>
|
||||
) {
|
||||
self.queue = queue
|
||||
self.postbox = postbox
|
||||
self.userLocation = userLocation
|
||||
self.userContentType = userContentType
|
||||
self.playlistFile = playlistFile
|
||||
self.mainDataFile = mainDataFile
|
||||
self.alternativeQualities = alternativeQualities
|
||||
self.nextRequestedFrame = nextRequestedFrame
|
||||
|
||||
self.loadPlaylist()
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.fetchPlaylistDisposable?.dispose()
|
||||
self.playlistDisposable?.dispose()
|
||||
}
|
||||
|
||||
func generateFrame() {
|
||||
if self.pendingFrame != nil {
|
||||
return
|
||||
}
|
||||
|
||||
self.updateFrameRequest()
|
||||
}
|
||||
|
||||
func cancelPendingFrames() {
|
||||
self.pendingFrame = nil
|
||||
}
|
||||
|
||||
private func loadPlaylist() {
|
||||
if self.fetchPlaylistDisposable != nil {
|
||||
return
|
||||
}
|
||||
|
||||
let loadPlaylist: (FileMediaReference, FileMediaReference) -> Signal<Playlist?, NoError> = { playlistFile, dataFile in
|
||||
return self.postbox.mediaBox.resourceData(playlistFile.media.resource)
|
||||
|> mapToSignal { data -> Signal<Playlist?, NoError> in
|
||||
if !data.complete {
|
||||
return .never()
|
||||
}
|
||||
|
||||
guard let data = try? Data(contentsOf: URL(fileURLWithPath: data.path)) else {
|
||||
return .single(nil)
|
||||
}
|
||||
guard let playlistString = String(data: data, encoding: .utf8) else {
|
||||
return .single(nil)
|
||||
}
|
||||
|
||||
var durations: [Int] = []
|
||||
var byteRanges: [Range<Int>] = []
|
||||
|
||||
let extinfRegex = try! NSRegularExpression(pattern: "EXTINF:(\\d+)", options: [])
|
||||
let byteRangeRegex = try! NSRegularExpression(pattern: "EXT-X-BYTERANGE:(\\d+)@(\\d+)", options: [])
|
||||
|
||||
let extinfResults = extinfRegex.matches(in: playlistString, range: NSRange(playlistString.startIndex..., in: playlistString))
|
||||
for result in extinfResults {
|
||||
if let durationRange = Range(result.range(at: 1), in: playlistString) {
|
||||
if let duration = Int(String(playlistString[durationRange])) {
|
||||
durations.append(duration)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let byteRangeResults = byteRangeRegex.matches(in: playlistString, range: NSRange(playlistString.startIndex..., in: playlistString))
|
||||
for result in byteRangeResults {
|
||||
if let lengthRange = Range(result.range(at: 1), in: playlistString), let upperBoundRange = Range(result.range(at: 2), in: playlistString) {
|
||||
if let length = Int(String(playlistString[lengthRange])), let lowerBound = Int(String(playlistString[upperBoundRange])) {
|
||||
byteRanges.append(lowerBound ..< (lowerBound + length))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if durations.count != byteRanges.count {
|
||||
return .single(nil)
|
||||
}
|
||||
|
||||
var durationOffset = 0
|
||||
var initializationPart: Part?
|
||||
var parts: [Part] = []
|
||||
for i in 0 ..< durations.count {
|
||||
let part = Part(timestamp: durationOffset, duration: durations[i], range: byteRanges[i])
|
||||
if i == 0 {
|
||||
initializationPart = Part(timestamp: 0, duration: 0, range: 0 ..< byteRanges[i].lowerBound)
|
||||
}
|
||||
parts.append(part)
|
||||
durationOffset += durations[i]
|
||||
}
|
||||
|
||||
if let initializationPart {
|
||||
return .single(Playlist(dataFile: dataFile, initializationPart: initializationPart, parts: parts))
|
||||
} else {
|
||||
return .single(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let fetchPlaylist: (FileMediaReference) -> Signal<Never, NoError> = { playlistFile in
|
||||
return fetchedMediaResource(
|
||||
mediaBox: self.postbox.mediaBox,
|
||||
userLocation: self.userLocation,
|
||||
userContentType: self.userContentType,
|
||||
reference: playlistFile.resourceReference(playlistFile.media.resource)
|
||||
)
|
||||
|> ignoreValues
|
||||
|> `catch` { _ -> Signal<Never, NoError> in
|
||||
return .complete()
|
||||
}
|
||||
}
|
||||
|
||||
var fetchSignals: [Signal<Never, NoError>] = []
|
||||
fetchSignals.append(fetchPlaylist(self.playlistFile))
|
||||
for quality in self.alternativeQualities {
|
||||
fetchSignals.append(fetchPlaylist(quality.playlist))
|
||||
}
|
||||
self.fetchPlaylistDisposable = combineLatest(fetchSignals).startStrict()
|
||||
|
||||
self.playlistDisposable = (combineLatest(queue: self.queue,
|
||||
loadPlaylist(self.playlistFile, self.mainDataFile),
|
||||
combineLatest(self.alternativeQualities.map {
|
||||
return loadPlaylist($0.playlist, $0.dataFile)
|
||||
})
|
||||
)
|
||||
|> deliverOn(self.queue)).startStrict(next: { [weak self] mainPlaylist, alternativePlaylists in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
self.playlist = mainPlaylist
|
||||
self.alternativePlaylists = alternativePlaylists.compactMap{ $0 }
|
||||
})
|
||||
}
|
||||
|
||||
private func updateFrameRequest() {
|
||||
guard let playlist = self.playlist else {
|
||||
return
|
||||
}
|
||||
if self.pendingFrame != nil {
|
||||
return
|
||||
}
|
||||
guard let nextRequestedFrame = self.nextRequestedFrame.swap(nil) else {
|
||||
return
|
||||
}
|
||||
|
||||
var allPlaylists: [Playlist] = [playlist]
|
||||
allPlaylists.append(contentsOf: self.alternativePlaylists)
|
||||
outer: for playlist in allPlaylists {
|
||||
if let dataFileSize = playlist.dataFile.media.size, let part = playlist.parts.first(where: { $0.timestamp <= Int(nextRequestedFrame) && ($0.timestamp + $0.duration) > Int(nextRequestedFrame) }) {
|
||||
let mappedRanges: [Range<Int64>] = [
|
||||
Int64(playlist.initializationPart.range.lowerBound) ..< Int64(playlist.initializationPart.range.upperBound),
|
||||
Int64(part.range.lowerBound) ..< Int64(part.range.upperBound)
|
||||
]
|
||||
for mappedRange in mappedRanges {
|
||||
if !self.postbox.mediaBox.internal_resourceDataIsCached(id: playlist.dataFile.media.resource.id, size: dataFileSize, in: mappedRange) {
|
||||
continue outer
|
||||
}
|
||||
}
|
||||
|
||||
if let directReader = FFMpegFileReader(
|
||||
source: .resource(mediaBox: self.postbox.mediaBox, resource: playlist.dataFile.media.resource, resourceSize: dataFileSize, mappedRanges: mappedRanges),
|
||||
useHardwareAcceleration: false,
|
||||
selectedStream: .mediaType(.video),
|
||||
seek: .direct(position: nextRequestedFrame),
|
||||
maxReadablePts: nil
|
||||
) {
|
||||
var lastFrame: CMSampleBuffer?
|
||||
findFrame: while true {
|
||||
switch directReader.readFrame() {
|
||||
case let .frame(frame):
|
||||
if lastFrame == nil {
|
||||
lastFrame = frame.sampleBuffer
|
||||
} else if CMSampleBufferGetPresentationTimeStamp(frame.sampleBuffer).seconds > nextRequestedFrame {
|
||||
break findFrame
|
||||
} else {
|
||||
lastFrame = frame.sampleBuffer
|
||||
}
|
||||
default:
|
||||
break findFrame
|
||||
}
|
||||
}
|
||||
if let lastFrame {
|
||||
if let imageBuffer = CMSampleBufferGetImageBuffer(lastFrame) {
|
||||
var cgImage: CGImage?
|
||||
VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &cgImage)
|
||||
if let cgImage {
|
||||
self.framePipe.putNext(.image(UIImage(cgImage: cgImage)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.updateFrameRequest()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
let initializationPart = playlist.initializationPart
|
||||
guard let part = playlist.parts.first(where: { $0.timestamp <= Int(nextRequestedFrame) && ($0.timestamp + $0.duration) > Int(nextRequestedFrame) }) else {
|
||||
return
|
||||
}
|
||||
guard let dataFileSize = self.mainDataFile.media.size else {
|
||||
return
|
||||
}
|
||||
|
||||
let resource = self.mainDataFile.media.resource
|
||||
let postbox = self.postbox
|
||||
let userLocation = self.userLocation
|
||||
let userContentType = self.userContentType
|
||||
let dataFile = self.mainDataFile
|
||||
|
||||
let partRange: Range<Int64> = Int64(part.range.lowerBound) ..< Int64(part.range.upperBound)
|
||||
|
||||
let mappedRanges: [Range<Int64>] = [
|
||||
Int64(initializationPart.range.lowerBound) ..< Int64(initializationPart.range.upperBound),
|
||||
partRange
|
||||
]
|
||||
var mappedSize: Int64 = 0
|
||||
for range in mappedRanges {
|
||||
mappedSize += range.upperBound - range.lowerBound
|
||||
}
|
||||
|
||||
let queue = self.queue
|
||||
let updateState: (FFMpegLookahead.State) -> Void = { [weak self] state in
|
||||
queue.async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.pendingFrame?.0 != part.timestamp {
|
||||
return
|
||||
}
|
||||
guard let video = state.video else {
|
||||
return
|
||||
}
|
||||
|
||||
if let directReader = FFMpegFileReader(
|
||||
source: .resource(mediaBox: postbox.mediaBox, resource: resource, resourceSize: dataFileSize, mappedRanges: mappedRanges),
|
||||
useHardwareAcceleration: false,
|
||||
selectedStream: .index(video.info.index),
|
||||
seek: .stream(streamIndex: state.seek.streamIndex, pts: state.seek.pts),
|
||||
maxReadablePts: (video.info.index, video.readableToTime.value, state.isEnded)
|
||||
) {
|
||||
switch directReader.readFrame() {
|
||||
case let .frame(frame):
|
||||
if let imageBuffer = CMSampleBufferGetImageBuffer(frame.sampleBuffer) {
|
||||
var cgImage: CGImage?
|
||||
VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &cgImage)
|
||||
if let cgImage {
|
||||
self.framePipe.putNext(.image(UIImage(cgImage: cgImage)))
|
||||
}
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
self.pendingFrame = nil
|
||||
self.updateFrameRequest()
|
||||
}
|
||||
}
|
||||
|
||||
let lookahead = FFMpegLookahead(
|
||||
seekToTimestamp: 0.0,
|
||||
lookaheadDuration: 0.0,
|
||||
updateState: updateState,
|
||||
fetchInRange: { fetchRange in
|
||||
let disposable = DisposableSet()
|
||||
|
||||
let readCount = fetchRange.upperBound - fetchRange.lowerBound
|
||||
var readingPosition = fetchRange.lowerBound
|
||||
|
||||
var bufferOffset = 0
|
||||
let doRead: (Range<Int64>) -> Void = { range in
|
||||
disposable.add(fetchedMediaResource(
|
||||
mediaBox: postbox.mediaBox,
|
||||
userLocation: userLocation,
|
||||
userContentType: userContentType,
|
||||
reference: dataFile.resourceReference(dataFile.media.resource),
|
||||
range: (range, .elevated),
|
||||
statsCategory: .video,
|
||||
preferBackgroundReferenceRevalidation: false
|
||||
).startStrict())
|
||||
let count = Int(range.upperBound - range.lowerBound)
|
||||
bufferOffset += count
|
||||
readingPosition += Int64(count)
|
||||
}
|
||||
|
||||
var mappedRangePosition: Int64 = 0
|
||||
for mappedRange in mappedRanges {
|
||||
let bytesToRead = readCount - Int64(bufferOffset)
|
||||
if bytesToRead <= 0 {
|
||||
break
|
||||
}
|
||||
|
||||
let mappedRangeSize = mappedRange.upperBound - mappedRange.lowerBound
|
||||
let mappedRangeReadingPosition = readingPosition - mappedRangePosition
|
||||
|
||||
if mappedRangeReadingPosition >= 0 && mappedRangeReadingPosition < mappedRangeSize {
|
||||
let mappedRangeAvailableBytesToRead = mappedRangeSize - mappedRangeReadingPosition
|
||||
let mappedRangeBytesToRead = min(bytesToRead, mappedRangeAvailableBytesToRead)
|
||||
if mappedRangeBytesToRead > 0 {
|
||||
let mappedReadRange = (mappedRange.lowerBound + mappedRangeReadingPosition) ..< (mappedRange.lowerBound + mappedRangeReadingPosition + mappedRangeBytesToRead)
|
||||
doRead(mappedReadRange)
|
||||
}
|
||||
}
|
||||
|
||||
mappedRangePosition += mappedRangeSize
|
||||
}
|
||||
|
||||
return disposable
|
||||
},
|
||||
getDataInRange: { getRange, completion in
|
||||
var signals: [Signal<(Data, Bool), NoError>] = []
|
||||
|
||||
let readCount = getRange.upperBound - getRange.lowerBound
|
||||
var readingPosition = getRange.lowerBound
|
||||
|
||||
var bufferOffset = 0
|
||||
let doRead: (Range<Int64>) -> Void = { range in
|
||||
signals.append(postbox.mediaBox.resourceData(resource, size: dataFileSize, in: range, mode: .complete))
|
||||
|
||||
let readSize = Int(range.upperBound - range.lowerBound)
|
||||
let effectiveReadSize = max(0, min(Int(readCount) - bufferOffset, readSize))
|
||||
let count = effectiveReadSize
|
||||
bufferOffset += count
|
||||
readingPosition += Int64(count)
|
||||
}
|
||||
|
||||
var mappedRangePosition: Int64 = 0
|
||||
for mappedRange in mappedRanges {
|
||||
let bytesToRead = readCount - Int64(bufferOffset)
|
||||
if bytesToRead <= 0 {
|
||||
break
|
||||
}
|
||||
|
||||
let mappedRangeSize = mappedRange.upperBound - mappedRange.lowerBound
|
||||
let mappedRangeReadingPosition = readingPosition - mappedRangePosition
|
||||
|
||||
if mappedRangeReadingPosition >= 0 && mappedRangeReadingPosition < mappedRangeSize {
|
||||
let mappedRangeAvailableBytesToRead = mappedRangeSize - mappedRangeReadingPosition
|
||||
let mappedRangeBytesToRead = min(bytesToRead, mappedRangeAvailableBytesToRead)
|
||||
if mappedRangeBytesToRead > 0 {
|
||||
let mappedReadRange = (mappedRange.lowerBound + mappedRangeReadingPosition) ..< (mappedRange.lowerBound + mappedRangeReadingPosition + mappedRangeBytesToRead)
|
||||
doRead(mappedReadRange)
|
||||
}
|
||||
}
|
||||
|
||||
mappedRangePosition += mappedRangeSize
|
||||
}
|
||||
|
||||
let singal = combineLatest(signals)
|
||||
|> map { results -> Data? in
|
||||
var result = Data()
|
||||
for (partData, partIsComplete) in results {
|
||||
if !partIsComplete {
|
||||
return nil
|
||||
}
|
||||
result.append(partData)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
return singal.start(next: { result in
|
||||
completion(result)
|
||||
})
|
||||
},
|
||||
isDataCachedInRange: { cachedRange in
|
||||
let readCount = cachedRange.upperBound - cachedRange.lowerBound
|
||||
var readingPosition = cachedRange.lowerBound
|
||||
|
||||
var allDataIsCached = true
|
||||
|
||||
var bufferOffset = 0
|
||||
let doRead: (Range<Int64>) -> Void = { range in
|
||||
let isCached = postbox.mediaBox.internal_resourceDataIsCached(
|
||||
id: resource.id,
|
||||
size: dataFileSize,
|
||||
in: range
|
||||
)
|
||||
if !isCached {
|
||||
allDataIsCached = false
|
||||
}
|
||||
|
||||
let effectiveReadSize = Int(range.upperBound - range.lowerBound)
|
||||
let count = effectiveReadSize
|
||||
bufferOffset += count
|
||||
readingPosition += Int64(count)
|
||||
}
|
||||
|
||||
var mappedRangePosition: Int64 = 0
|
||||
for mappedRange in mappedRanges {
|
||||
let bytesToRead = readCount - Int64(bufferOffset)
|
||||
if bytesToRead <= 0 {
|
||||
break
|
||||
}
|
||||
|
||||
let mappedRangeSize = mappedRange.upperBound - mappedRange.lowerBound
|
||||
let mappedRangeReadingPosition = readingPosition - mappedRangePosition
|
||||
|
||||
if mappedRangeReadingPosition >= 0 && mappedRangeReadingPosition < mappedRangeSize {
|
||||
let mappedRangeAvailableBytesToRead = mappedRangeSize - mappedRangeReadingPosition
|
||||
let mappedRangeBytesToRead = min(bytesToRead, mappedRangeAvailableBytesToRead)
|
||||
if mappedRangeBytesToRead > 0 {
|
||||
let mappedReadRange = (mappedRange.lowerBound + mappedRangeReadingPosition) ..< (mappedRange.lowerBound + mappedRangeReadingPosition + mappedRangeBytesToRead)
|
||||
doRead(mappedReadRange)
|
||||
}
|
||||
}
|
||||
|
||||
mappedRangePosition += mappedRangeSize
|
||||
}
|
||||
|
||||
return allDataIsCached
|
||||
},
|
||||
size: mappedSize
|
||||
)
|
||||
|
||||
self.pendingFrame = (part.timestamp, lookahead)
|
||||
|
||||
lookahead.updateCurrentTimestamp(timestamp: 0.0)
|
||||
}
|
||||
}
|
||||
|
||||
private let queue: Queue
|
||||
private let impl: QueueLocalObject<Impl>
|
||||
|
||||
public var generatedFrames: Signal<FramePreviewResult, NoError> {
|
||||
return Signal { subscriber in
|
||||
let disposable = MetaDisposable()
|
||||
self.impl.with { impl in
|
||||
disposable.set(impl.framePipe.signal().start(next: { result in
|
||||
subscriber.putNext(result)
|
||||
}))
|
||||
}
|
||||
return disposable
|
||||
}
|
||||
}
|
||||
|
||||
private let nextRequestedFrame = Atomic<Double?>(value: nil)
|
||||
|
||||
public init(
|
||||
postbox: Postbox,
|
||||
userLocation: MediaResourceUserLocation,
|
||||
userContentType: MediaResourceUserContentType,
|
||||
playlistFile: FileMediaReference,
|
||||
mainDataFile: FileMediaReference,
|
||||
alternativeQualities: [(playlist: FileMediaReference, dataFile: FileMediaReference)]
|
||||
) {
|
||||
let queue = Queue()
|
||||
self.queue = queue
|
||||
let nextRequestedFrame = self.nextRequestedFrame
|
||||
self.impl = QueueLocalObject(queue: queue, generate: {
|
||||
return Impl(
|
||||
queue: queue,
|
||||
postbox: postbox,
|
||||
userLocation: userLocation,
|
||||
userContentType: userContentType,
|
||||
playlistFile: playlistFile,
|
||||
mainDataFile: mainDataFile,
|
||||
alternativeQualities: alternativeQualities,
|
||||
nextRequestedFrame: nextRequestedFrame
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
public func generateFrame(at timestamp: Double) {
|
||||
let _ = self.nextRequestedFrame.swap(timestamp)
|
||||
self.impl.with { impl in
|
||||
impl.generateFrame()
|
||||
}
|
||||
}
|
||||
|
||||
public func cancelPendingFrames() {
|
||||
self.impl.with { impl in
|
||||
impl.cancelPendingFrames()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -918,8 +918,12 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.audioSessionShouldBeActive.set(false)
|
||||
if wasActive {
|
||||
let debugLogValue = Promise<String?>()
|
||||
self.ongoingContext?.stop(debugLogValue: debugLogValue)
|
||||
let _ = self.conferenceCall?.leave(terminateIfPossible: false).start()
|
||||
if let conferenceCall = self.conferenceCall {
|
||||
debugLogValue.set(conferenceCall.debugLog.get())
|
||||
let _ = conferenceCall.leave(terminateIfPossible: false).start()
|
||||
} else {
|
||||
self.ongoingContext?.stop(debugLogValue: debugLogValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
var terminating = false
|
||||
@ -1198,8 +1202,12 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
public func hangUp() -> Signal<Bool, NoError> {
|
||||
let debugLogValue = Promise<String?>()
|
||||
self.callSessionManager.drop(internalId: self.internalId, reason: .hangUp, debugLog: debugLogValue.get())
|
||||
self.ongoingContext?.stop(debugLogValue: debugLogValue)
|
||||
let _ = self.conferenceCall?.leave(terminateIfPossible: false).start()
|
||||
if let conferenceCall = self.conferenceCall {
|
||||
debugLogValue.set(conferenceCall.debugLog.get())
|
||||
let _ = conferenceCall.leave(terminateIfPossible: false).start()
|
||||
} else {
|
||||
self.ongoingContext?.stop(debugLogValue: debugLogValue)
|
||||
}
|
||||
|
||||
return self.hungUpPromise.get()
|
||||
}
|
||||
@ -1207,8 +1215,12 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
public func rejectBusy() {
|
||||
self.callSessionManager.drop(internalId: self.internalId, reason: .busy, debugLog: .single(nil))
|
||||
let debugLog = Promise<String?>()
|
||||
self.ongoingContext?.stop(debugLogValue: debugLog)
|
||||
let _ = self.conferenceCall?.leave(terminateIfPossible: false).start()
|
||||
if let conferenceCall = self.conferenceCall {
|
||||
debugLog.set(conferenceCall.debugLog.get())
|
||||
let _ = conferenceCall.leave(terminateIfPossible: false).start()
|
||||
} else {
|
||||
self.ongoingContext?.stop(debugLogValue: debugLog)
|
||||
}
|
||||
}
|
||||
|
||||
public func toggleIsMuted() {
|
||||
@ -1262,7 +1274,11 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
guard let screencastCapturer = screencastCapturer else {
|
||||
return
|
||||
}
|
||||
screencastCapturer.injectPixelBuffer(screencastFrame.0, rotation: screencastFrame.1)
|
||||
guard let sampleBuffer = sampleBufferFromPixelBuffer(pixelBuffer: screencastFrame.0) else {
|
||||
return
|
||||
}
|
||||
|
||||
screencastCapturer.injectSampleBuffer(sampleBuffer, rotation: screencastFrame.1, completion: {})
|
||||
}))
|
||||
self.screencastAudioDataDisposable.set((screencastBufferServerContext.audioData
|
||||
|> deliverOnMainQueue).start(next: { [weak self] data in
|
||||
@ -1467,3 +1483,36 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.videoCapturer?.switchVideoInput(isFront: self.useFrontCamera)
|
||||
}
|
||||
}
|
||||
|
||||
func sampleBufferFromPixelBuffer(pixelBuffer: CVPixelBuffer) -> CMSampleBuffer? {
|
||||
var maybeFormat: CMVideoFormatDescription?
|
||||
let status = CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescriptionOut: &maybeFormat)
|
||||
if status != noErr {
|
||||
return nil
|
||||
}
|
||||
guard let format = maybeFormat else {
|
||||
return nil
|
||||
}
|
||||
|
||||
var timingInfo = CMSampleTimingInfo(
|
||||
duration: CMTimeMake(value: 1, timescale: 30),
|
||||
presentationTimeStamp: CMTimeMake(value: 0, timescale: 30),
|
||||
decodeTimeStamp: CMTimeMake(value: 0, timescale: 30)
|
||||
)
|
||||
|
||||
var maybeSampleBuffer: CMSampleBuffer?
|
||||
let bufferStatus = CMSampleBufferCreateReadyWithImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescription: format, sampleTiming: &timingInfo, sampleBufferOut: &maybeSampleBuffer)
|
||||
|
||||
if (bufferStatus != noErr) {
|
||||
return nil
|
||||
}
|
||||
guard let sampleBuffer = maybeSampleBuffer else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let attachments: NSArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true)! as NSArray
|
||||
let dict: NSMutableDictionary = attachments[0] as! NSMutableDictionary
|
||||
dict[kCMSampleAttachmentKey_DisplayImmediately as NSString] = true as NSNumber
|
||||
|
||||
return sampleBuffer
|
||||
}
|
||||
|
@ -321,12 +321,12 @@ private extension CurrentImpl {
|
||||
}
|
||||
}
|
||||
|
||||
func stop(account: Account, reportCallId: CallId?) {
|
||||
func stop(account: Account, reportCallId: CallId?, debugLog: Promise<String?>) {
|
||||
switch self {
|
||||
case let .call(callContext):
|
||||
callContext.stop(account: account, reportCallId: reportCallId)
|
||||
callContext.stop(account: account, reportCallId: reportCallId, debugLog: debugLog)
|
||||
case .mediaStream, .externalMediaStream:
|
||||
break
|
||||
debugLog.set(.single(nil))
|
||||
}
|
||||
}
|
||||
|
||||
@ -466,6 +466,138 @@ public func allocateCallLogPath(account: Account) -> String {
|
||||
return "\(path)/\(name).log"
|
||||
}
|
||||
|
||||
private protocol ScreencastIPCContext: AnyObject {
|
||||
var isActive: Signal<Bool, NoError> { get }
|
||||
|
||||
func requestScreencast() -> Signal<(String, UInt32), NoError>?
|
||||
func setJoinResponse(clientParams: String)
|
||||
func disableScreencast(account: Account)
|
||||
}
|
||||
|
||||
private final class ScreencastInProcessIPCContext: ScreencastIPCContext {
|
||||
private let isConference: Bool
|
||||
|
||||
private let screencastBufferServerContext: IpcGroupCallBufferAppContext
|
||||
private var screencastCallContext: ScreencastContext?
|
||||
private let screencastCapturer: OngoingCallVideoCapturer
|
||||
private var screencastFramesDisposable: Disposable?
|
||||
private var screencastAudioDataDisposable: Disposable?
|
||||
|
||||
var isActive: Signal<Bool, NoError> {
|
||||
return self.screencastBufferServerContext.isActive
|
||||
}
|
||||
|
||||
init(basePath: String, isConference: Bool) {
|
||||
self.isConference = isConference
|
||||
|
||||
let screencastBufferServerContext = IpcGroupCallBufferAppContext(basePath: basePath + "/broadcast-coordination")
|
||||
self.screencastBufferServerContext = screencastBufferServerContext
|
||||
let screencastCapturer = OngoingCallVideoCapturer(isCustom: true)
|
||||
self.screencastCapturer = screencastCapturer
|
||||
self.screencastFramesDisposable = (screencastBufferServerContext.frames
|
||||
|> deliverOnMainQueue).start(next: { [weak screencastCapturer] screencastFrame in
|
||||
guard let screencastCapturer = screencastCapturer else {
|
||||
return
|
||||
}
|
||||
guard let sampleBuffer = sampleBufferFromPixelBuffer(pixelBuffer: screencastFrame.0) else {
|
||||
return
|
||||
}
|
||||
screencastCapturer.injectSampleBuffer(sampleBuffer, rotation: screencastFrame.1, completion: {})
|
||||
})
|
||||
self.screencastAudioDataDisposable = (screencastBufferServerContext.audioData
|
||||
|> deliverOnMainQueue).start(next: { [weak self] data in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.screencastCallContext?.addExternalAudioData(data: data)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.screencastFramesDisposable?.dispose()
|
||||
self.screencastAudioDataDisposable?.dispose()
|
||||
}
|
||||
|
||||
func requestScreencast() -> Signal<(String, UInt32), NoError>? {
|
||||
if self.screencastCallContext == nil {
|
||||
let screencastCallContext = InProcessScreencastContext(
|
||||
context: OngoingGroupCallContext(
|
||||
audioSessionActive: .single(true),
|
||||
video: self.screencastCapturer,
|
||||
requestMediaChannelDescriptions: { _, _ in EmptyDisposable },
|
||||
rejoinNeeded: { },
|
||||
outgoingAudioBitrateKbit: nil,
|
||||
videoContentType: .screencast,
|
||||
enableNoiseSuppression: false,
|
||||
disableAudioInput: true,
|
||||
enableSystemMute: false,
|
||||
preferX264: false,
|
||||
logPath: "",
|
||||
onMutedSpeechActivityDetected: { _ in },
|
||||
encryptionKey: nil,
|
||||
isConference: self.isConference,
|
||||
sharedAudioDevice: nil
|
||||
)
|
||||
)
|
||||
self.screencastCallContext = screencastCallContext
|
||||
return screencastCallContext.joinPayload
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func setJoinResponse(clientParams: String) {
|
||||
if let screencastCallContext = self.screencastCallContext {
|
||||
screencastCallContext.setRTCJoinResponse(clientParams: clientParams)
|
||||
}
|
||||
}
|
||||
|
||||
func disableScreencast(account: Account) {
|
||||
if let screencastCallContext = self.screencastCallContext {
|
||||
self.screencastCallContext = nil
|
||||
screencastCallContext.stop(account: account, reportCallId: nil)
|
||||
|
||||
self.screencastBufferServerContext.stopScreencast()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final class ScreencastEmbeddedIPCContext: ScreencastIPCContext {
|
||||
private let serverContext: IpcGroupCallEmbeddedAppContext
|
||||
|
||||
var isActive: Signal<Bool, NoError> {
|
||||
return self.serverContext.isActive
|
||||
}
|
||||
|
||||
init(basePath: String) {
|
||||
self.serverContext = IpcGroupCallEmbeddedAppContext(basePath: basePath + "/embedded-broadcast-coordination")
|
||||
}
|
||||
|
||||
func requestScreencast() -> Signal<(String, UInt32), NoError>? {
|
||||
if let id = self.serverContext.startScreencast() {
|
||||
return self.serverContext.joinPayload
|
||||
|> filter { joinPayload -> Bool in
|
||||
return joinPayload.id == id
|
||||
}
|
||||
|> map { joinPayload -> (String, UInt32) in
|
||||
return (joinPayload.data, joinPayload.ssrc)
|
||||
}
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func setJoinResponse(clientParams: String) {
|
||||
self.serverContext.joinResponse = IpcGroupCallEmbeddedAppContext.JoinResponse(data: clientParams)
|
||||
}
|
||||
|
||||
func disableScreencast(account: Account) {
|
||||
self.serverContext.stopScreencast()
|
||||
}
|
||||
}
|
||||
|
||||
public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
private enum InternalState {
|
||||
case requesting
|
||||
@ -629,9 +761,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
let externalMediaStream = Promise<DirectMediaStreamingContext>()
|
||||
|
||||
private var screencastCallContext: OngoingGroupCallContext?
|
||||
private var screencastBufferServerContext: IpcGroupCallBufferAppContext?
|
||||
private var screencastCapturer: OngoingCallVideoCapturer?
|
||||
private var screencastIPCContext: ScreencastIPCContext?
|
||||
|
||||
private struct SsrcMapping {
|
||||
var peerId: EnginePeer.Id
|
||||
@ -860,8 +990,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
return self.isSpeakingPromise.get()
|
||||
}
|
||||
|
||||
private var screencastFramesDisposable: Disposable?
|
||||
private var screencastAudioDataDisposable: Disposable?
|
||||
private var screencastStateDisposable: Disposable?
|
||||
|
||||
public let isStream: Bool
|
||||
@ -876,6 +1004,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
public var onMutedSpeechActivityDetected: ((Bool) -> Void)?
|
||||
|
||||
let debugLog = Promise<String?>()
|
||||
|
||||
init(
|
||||
accountContext: AccountContext,
|
||||
audioSession: ManagedAudioSession,
|
||||
@ -1149,26 +1279,24 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.requestCall(movingFromBroadcastToRtc: false)
|
||||
}
|
||||
|
||||
let basePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination"
|
||||
let screencastBufferServerContext = IpcGroupCallBufferAppContext(basePath: basePath)
|
||||
self.screencastBufferServerContext = screencastBufferServerContext
|
||||
let screencastCapturer = OngoingCallVideoCapturer(isCustom: true)
|
||||
self.screencastCapturer = screencastCapturer
|
||||
self.screencastFramesDisposable = (screencastBufferServerContext.frames
|
||||
|> deliverOnMainQueue).start(next: { [weak screencastCapturer] screencastFrame in
|
||||
guard let screencastCapturer = screencastCapturer else {
|
||||
return
|
||||
}
|
||||
screencastCapturer.injectPixelBuffer(screencastFrame.0, rotation: screencastFrame.1)
|
||||
})
|
||||
self.screencastAudioDataDisposable = (screencastBufferServerContext.audioData
|
||||
|> deliverOnMainQueue).start(next: { [weak self] data in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.screencastCallContext?.addExternalAudioData(data: data)
|
||||
})
|
||||
self.screencastStateDisposable = (screencastBufferServerContext.isActive
|
||||
var useIPCContext = "".isEmpty
|
||||
if let data = self.accountContext.currentAppConfiguration.with({ $0 }).data, data["ios_killswitch_use_inprocess_screencast"] != nil {
|
||||
useIPCContext = false
|
||||
}
|
||||
|
||||
let embeddedBroadcastImplementationTypePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination-type"
|
||||
|
||||
let screencastIPCContext: ScreencastIPCContext
|
||||
if useIPCContext {
|
||||
screencastIPCContext = ScreencastEmbeddedIPCContext(basePath: self.accountContext.sharedContext.basePath)
|
||||
let _ = try? "ipc".write(toFile: embeddedBroadcastImplementationTypePath, atomically: true, encoding: .utf8)
|
||||
} else {
|
||||
screencastIPCContext = ScreencastInProcessIPCContext(basePath: self.accountContext.sharedContext.basePath, isConference: self.isConference)
|
||||
let _ = try? "legacy".write(toFile: embeddedBroadcastImplementationTypePath, atomically: true, encoding: .utf8)
|
||||
}
|
||||
self.screencastIPCContext = screencastIPCContext
|
||||
|
||||
self.screencastStateDisposable = (screencastIPCContext.isActive
|
||||
|> distinctUntilChanged
|
||||
|> deliverOnMainQueue).start(next: { [weak self] isActive in
|
||||
guard let strongSelf = self else {
|
||||
@ -1228,8 +1356,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
self.peerUpdatesSubscription?.dispose()
|
||||
|
||||
self.screencastFramesDisposable?.dispose()
|
||||
self.screencastAudioDataDisposable?.dispose()
|
||||
self.screencastStateDisposable?.dispose()
|
||||
|
||||
self.internal_isRemoteConnectedDisposable?.dispose()
|
||||
@ -2658,10 +2784,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
self.markedAsCanBeRemoved = true
|
||||
|
||||
self.genericCallContext?.stop(account: self.account, reportCallId: self.conferenceFromCallId)
|
||||
|
||||
//self.screencastIpcContext = nil
|
||||
self.screencastCallContext?.stop(account: self.account, reportCallId: nil)
|
||||
self.genericCallContext?.stop(account: self.account, reportCallId: self.conferenceFromCallId, debugLog: self.debugLog)
|
||||
self.screencastIPCContext?.disableScreencast(account: self.account)
|
||||
|
||||
self._canBeRemoved.set(.single(true))
|
||||
|
||||
@ -3106,59 +3230,50 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
|
||||
private func requestScreencast() {
|
||||
guard let callInfo = self.internalState.callInfo, self.screencastCallContext == nil else {
|
||||
guard let callInfo = self.internalState.callInfo else {
|
||||
return
|
||||
}
|
||||
|
||||
self.hasScreencast = true
|
||||
|
||||
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, enableSystemMute: false, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in }, encryptionKey: nil, isConference: self.isConference, sharedAudioDevice: nil)
|
||||
self.screencastCallContext = screencastCallContext
|
||||
|
||||
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] joinPayload in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
|
||||
strongSelf.requestDisposable.set((strongSelf.accountContext.engine.calls.joinGroupCallAsScreencast(
|
||||
callId: callInfo.id,
|
||||
accessHash: callInfo.accessHash,
|
||||
joinPayload: joinPayload.0
|
||||
)
|
||||
|> deliverOnMainQueue).start(next: { joinCallResult in
|
||||
guard let strongSelf = self, let screencastCallContext = strongSelf.screencastCallContext else {
|
||||
if let screencastIPCContext = self.screencastIPCContext, let joinPayload = screencastIPCContext.requestScreencast() {
|
||||
self.screencastJoinDisposable.set((joinPayload
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] joinPayload in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
let clientParams = joinCallResult.jsonParams
|
||||
|
||||
screencastCallContext.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false, isUnifiedBroadcast: false)
|
||||
screencastCallContext.setJoinResponse(payload: clientParams)
|
||||
}, error: { error in
|
||||
guard let _ = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.requestDisposable.set((strongSelf.accountContext.engine.calls.joinGroupCallAsScreencast(
|
||||
callId: callInfo.id,
|
||||
accessHash: callInfo.accessHash,
|
||||
joinPayload: joinPayload.0
|
||||
)
|
||||
|> deliverOnMainQueue).start(next: { joinCallResult in
|
||||
guard let strongSelf = self, let screencastIPCContext = strongSelf.screencastIPCContext else {
|
||||
return
|
||||
}
|
||||
screencastIPCContext.setJoinResponse(clientParams: joinCallResult.jsonParams)
|
||||
|
||||
}, error: { error in
|
||||
guard let _ = self else {
|
||||
return
|
||||
}
|
||||
}))
|
||||
}))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
public func disableScreencast() {
|
||||
self.hasScreencast = false
|
||||
if let screencastCallContext = self.screencastCallContext {
|
||||
self.screencastCallContext = nil
|
||||
screencastCallContext.stop(account: self.account, reportCallId: nil)
|
||||
self.screencastIPCContext?.disableScreencast(account: self.account)
|
||||
|
||||
let maybeCallInfo: GroupCallInfo? = self.internalState.callInfo
|
||||
|
||||
let maybeCallInfo: GroupCallInfo? = self.internalState.callInfo
|
||||
|
||||
if let callInfo = maybeCallInfo {
|
||||
self.screencastJoinDisposable.set(self.accountContext.engine.calls.leaveGroupCallAsScreencast(
|
||||
callId: callInfo.id,
|
||||
accessHash: callInfo.accessHash
|
||||
).start())
|
||||
}
|
||||
|
||||
self.screencastBufferServerContext?.stopScreencast()
|
||||
if let callInfo = maybeCallInfo {
|
||||
self.screencastJoinDisposable.set(self.accountContext.engine.calls.leaveGroupCallAsScreencast(
|
||||
callId: callInfo.id,
|
||||
accessHash: callInfo.accessHash
|
||||
).start())
|
||||
}
|
||||
}
|
||||
|
||||
@ -3608,3 +3723,34 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|> runOn(.mainQueue())
|
||||
}
|
||||
}
|
||||
|
||||
private protocol ScreencastContext: AnyObject {
|
||||
func addExternalAudioData(data: Data)
|
||||
func stop(account: Account, reportCallId: CallId?)
|
||||
func setRTCJoinResponse(clientParams: String)
|
||||
}
|
||||
|
||||
private final class InProcessScreencastContext: ScreencastContext {
|
||||
private let context: OngoingGroupCallContext
|
||||
|
||||
var joinPayload: Signal<(String, UInt32), NoError> {
|
||||
return self.context.joinPayload
|
||||
}
|
||||
|
||||
init(context: OngoingGroupCallContext) {
|
||||
self.context = context
|
||||
}
|
||||
|
||||
func addExternalAudioData(data: Data) {
|
||||
self.context.addExternalAudioData(data: data)
|
||||
}
|
||||
|
||||
func stop(account: Account, reportCallId: CallId?) {
|
||||
self.context.stop(account: account, reportCallId: reportCallId, debugLog: Promise())
|
||||
}
|
||||
|
||||
func setRTCJoinResponse(clientParams: String) {
|
||||
self.context.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false, isUnifiedBroadcast: false)
|
||||
self.context.setJoinResponse(payload: clientParams)
|
||||
}
|
||||
}
|
||||
|
@ -8,39 +8,6 @@ import TelegramVoip
|
||||
import AVFoundation
|
||||
import LibYuvBinding
|
||||
|
||||
private func sampleBufferFromPixelBuffer(pixelBuffer: CVPixelBuffer) -> CMSampleBuffer? {
|
||||
var maybeFormat: CMVideoFormatDescription?
|
||||
let status = CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescriptionOut: &maybeFormat)
|
||||
if status != noErr {
|
||||
return nil
|
||||
}
|
||||
guard let format = maybeFormat else {
|
||||
return nil
|
||||
}
|
||||
|
||||
var timingInfo = CMSampleTimingInfo(
|
||||
duration: CMTimeMake(value: 1, timescale: 30),
|
||||
presentationTimeStamp: CMTimeMake(value: 0, timescale: 30),
|
||||
decodeTimeStamp: CMTimeMake(value: 0, timescale: 30)
|
||||
)
|
||||
|
||||
var maybeSampleBuffer: CMSampleBuffer?
|
||||
let bufferStatus = CMSampleBufferCreateReadyWithImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescription: format, sampleTiming: &timingInfo, sampleBufferOut: &maybeSampleBuffer)
|
||||
|
||||
if (bufferStatus != noErr) {
|
||||
return nil
|
||||
}
|
||||
guard let sampleBuffer = maybeSampleBuffer else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let attachments: NSArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true)! as NSArray
|
||||
let dict: NSMutableDictionary = attachments[0] as! NSMutableDictionary
|
||||
dict[kCMSampleAttachmentKey_DisplayImmediately as NSString] = true as NSNumber
|
||||
|
||||
return sampleBuffer
|
||||
}
|
||||
|
||||
private func copyI420BufferToNV12Buffer(buffer: OngoingGroupCallContext.VideoFrameData.I420Buffer, pixelBuffer: CVPixelBuffer) -> Bool {
|
||||
guard CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange else {
|
||||
return false
|
||||
|
@ -0,0 +1,30 @@
|
||||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
|
||||
swift_library(
|
||||
name = "AvatarUploadToastScreen",
|
||||
module_name = "AvatarUploadToastScreen",
|
||||
srcs = glob([
|
||||
"Sources/**/*.swift",
|
||||
]),
|
||||
copts = [
|
||||
"-warnings-as-errors",
|
||||
],
|
||||
deps = [
|
||||
"//submodules/Display",
|
||||
"//submodules/TelegramPresentationData",
|
||||
"//submodules/ComponentFlow",
|
||||
"//submodules/Components/ComponentDisplayAdapters",
|
||||
"//submodules/Postbox",
|
||||
"//submodules/TelegramCore",
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
"//submodules/Components/ViewControllerComponent",
|
||||
"//submodules/Components/MultilineTextComponent",
|
||||
"//submodules/AccountContext",
|
||||
"//submodules/RadialStatusNode",
|
||||
"//submodules/TelegramUI/Components/AnimatedTextComponent",
|
||||
"//submodules/TelegramUI/Components/PlainButtonComponent",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
@ -0,0 +1,466 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import TelegramPresentationData
|
||||
import ComponentFlow
|
||||
import ComponentDisplayAdapters
|
||||
import AppBundle
|
||||
import ViewControllerComponent
|
||||
import AccountContext
|
||||
import MultilineTextComponent
|
||||
import RadialStatusNode
|
||||
import SwiftSignalKit
|
||||
import AnimatedTextComponent
|
||||
import PlainButtonComponent
|
||||
|
||||
private final class AvatarUploadToastScreenComponent: Component {
|
||||
let context: AccountContext
|
||||
let image: UIImage
|
||||
let uploadStatus: Signal<PeerInfoAvatarUploadStatus, NoError>
|
||||
let arrowTarget: () -> (UIView, CGRect)?
|
||||
let viewUploadedAvatar: () -> Void
|
||||
|
||||
init(context: AccountContext, image: UIImage, uploadStatus: Signal<PeerInfoAvatarUploadStatus, NoError>, arrowTarget: @escaping () -> (UIView, CGRect)?, viewUploadedAvatar: @escaping () -> Void) {
|
||||
self.context = context
|
||||
self.image = image
|
||||
self.uploadStatus = uploadStatus
|
||||
self.arrowTarget = arrowTarget
|
||||
self.viewUploadedAvatar = viewUploadedAvatar
|
||||
}
|
||||
|
||||
static func ==(lhs: AvatarUploadToastScreenComponent, rhs: AvatarUploadToastScreenComponent) -> Bool {
|
||||
return true
|
||||
}
|
||||
|
||||
final class View: UIView {
|
||||
private let contentView: UIView
|
||||
private let backgroundView: BlurredBackgroundView
|
||||
|
||||
private let backgroundMaskView: UIView
|
||||
private let backgroundMainMaskView: UIView
|
||||
private let backgroundArrowMaskView: UIImageView
|
||||
|
||||
private let avatarView: UIImageView
|
||||
private let progressNode: RadialStatusNode
|
||||
private let content = ComponentView<Empty>()
|
||||
private let actionButton = ComponentView<Empty>()
|
||||
|
||||
private var isUpdating: Bool = false
|
||||
private var component: AvatarUploadToastScreenComponent?
|
||||
private var environment: EnvironmentType?
|
||||
private weak var state: EmptyComponentState?
|
||||
|
||||
private var status: PeerInfoAvatarUploadStatus = .progress(0.0)
|
||||
private var statusDisposable: Disposable?
|
||||
|
||||
private var doneTimer: Foundation.Timer?
|
||||
private var currentIsDone: Bool = false
|
||||
|
||||
private var isDisplaying: Bool = false
|
||||
|
||||
var targetAvatarView: UIView? {
|
||||
return self.avatarView
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.contentView = UIView()
|
||||
|
||||
self.backgroundView = BlurredBackgroundView(color: .clear, enableBlur: true)
|
||||
|
||||
self.backgroundMaskView = UIView()
|
||||
|
||||
self.backgroundMainMaskView = UIView()
|
||||
self.backgroundMainMaskView.backgroundColor = .white
|
||||
|
||||
self.backgroundArrowMaskView = UIImageView()
|
||||
|
||||
self.avatarView = UIImageView()
|
||||
self.progressNode = RadialStatusNode(backgroundNodeColor: .clear)
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.backgroundView.mask = self.backgroundMaskView
|
||||
self.backgroundMaskView.addSubview(self.backgroundMainMaskView)
|
||||
self.backgroundMaskView.addSubview(self.backgroundArrowMaskView)
|
||||
self.addSubview(self.backgroundView)
|
||||
|
||||
self.addSubview(self.contentView)
|
||||
self.contentView.addSubview(self.avatarView)
|
||||
self.contentView.addSubview(self.progressNode.view)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
preconditionFailure()
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.statusDisposable?.dispose()
|
||||
self.doneTimer?.invalidate()
|
||||
}
|
||||
|
||||
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
if !self.contentView.frame.contains(point) {
|
||||
return nil
|
||||
}
|
||||
return super.hitTest(point, with: event)
|
||||
}
|
||||
|
||||
func animateIn() {
|
||||
func generateParabollicMotionKeyframes(from sourcePoint: CGFloat, elevation: CGFloat) -> [CGFloat] {
|
||||
let midPoint = sourcePoint - elevation
|
||||
|
||||
let y1 = sourcePoint
|
||||
let y2 = midPoint
|
||||
let y3 = sourcePoint
|
||||
|
||||
let x1 = 0.0
|
||||
let x2 = 100.0
|
||||
let x3 = 200.0
|
||||
|
||||
var keyframes: [CGFloat] = []
|
||||
let a = (x3 * (y2 - y1) + x2 * (y1 - y3) + x1 * (y3 - y2)) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
|
||||
let b = (x1 * x1 * (y2 - y3) + x3 * x3 * (y1 - y2) + x2 * x2 * (y3 - y1)) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
|
||||
let c = (x2 * x2 * (x3 * y1 - x1 * y3) + x2 * (x1 * x1 * y3 - x3 * x3 * y1) + x1 * x3 * (x3 - x1) * y2) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
|
||||
|
||||
for i in 0 ..< 10 {
|
||||
let k = listViewAnimationCurveSystem(CGFloat(i) / CGFloat(10 - 1))
|
||||
let x = x3 * k
|
||||
let y = a * x * x + b * x + c
|
||||
|
||||
keyframes.append(y)
|
||||
}
|
||||
|
||||
return keyframes
|
||||
}
|
||||
let offsetValues = generateParabollicMotionKeyframes(from: 0.0, elevation: -10.0)
|
||||
self.layer.animateKeyframes(values: offsetValues.map { $0 as NSNumber }, duration: 0.5, keyPath: "position.y", additive: true)
|
||||
|
||||
self.contentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
self.isDisplaying = true
|
||||
if !self.isUpdating {
|
||||
self.state?.updated(transition: .spring(duration: 0.5))
|
||||
}
|
||||
}
|
||||
|
||||
func animateOut(completion: @escaping () -> Void) {
|
||||
self.backgroundView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in
|
||||
})
|
||||
self.contentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in
|
||||
completion()
|
||||
})
|
||||
}
|
||||
|
||||
func update(component: AvatarUploadToastScreenComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<ViewControllerComponentContainer.Environment>, transition: ComponentTransition) -> CGSize {
|
||||
self.isUpdating = true
|
||||
defer {
|
||||
self.isUpdating = false
|
||||
}
|
||||
|
||||
let environment = environment[ViewControllerComponentContainer.Environment.self].value
|
||||
|
||||
if self.component == nil {
|
||||
self.statusDisposable = (component.uploadStatus
|
||||
|> deliverOnMainQueue).startStrict(next: { [weak self] status in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.status = status
|
||||
if !self.isUpdating {
|
||||
self.state?.updated(transition: .spring(duration: 0.4))
|
||||
}
|
||||
|
||||
if case .done = status, self.doneTimer == nil {
|
||||
self.doneTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 4.0, repeats: false, block: { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.environment?.controller()?.dismiss()
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
self.component = component
|
||||
self.environment = environment
|
||||
self.state = state
|
||||
|
||||
var isDone = false
|
||||
let effectiveProgress: CGFloat
|
||||
switch self.status {
|
||||
case let .progress(value):
|
||||
effectiveProgress = CGFloat(value)
|
||||
case .done:
|
||||
isDone = true
|
||||
effectiveProgress = 1.0
|
||||
}
|
||||
let previousIsDone = self.currentIsDone
|
||||
self.currentIsDone = isDone
|
||||
|
||||
let contentInsets = UIEdgeInsets(top: 10.0, left: 12.0, bottom: 10.0, right: 10.0)
|
||||
|
||||
let tabBarHeight: CGFloat
|
||||
if !environment.safeInsets.left.isZero {
|
||||
tabBarHeight = 34.0 + environment.safeInsets.bottom
|
||||
} else {
|
||||
tabBarHeight = 49.0 + environment.safeInsets.bottom
|
||||
}
|
||||
let containerInsets = UIEdgeInsets(
|
||||
top: environment.safeInsets.top,
|
||||
left: environment.safeInsets.left + 12.0,
|
||||
bottom: tabBarHeight + 3.0,
|
||||
right: environment.safeInsets.right + 12.0
|
||||
)
|
||||
|
||||
let availableContentSize = CGSize(width: availableSize.width - containerInsets.left - containerInsets.right, height: availableSize.height - containerInsets.top - containerInsets.bottom)
|
||||
|
||||
let spacing: CGFloat = 12.0
|
||||
|
||||
let iconSize = CGSize(width: 30.0, height: 30.0)
|
||||
let iconProgressInset: CGFloat = 3.0
|
||||
|
||||
var textItems: [AnimatedTextComponent.Item] = []
|
||||
textItems.append(AnimatedTextComponent.Item(id: AnyHashable(0), isUnbreakable: true, content: .text("Your photo is ")))
|
||||
if isDone {
|
||||
textItems.append(AnimatedTextComponent.Item(id: AnyHashable(1), isUnbreakable: true, content: .text("now set.")))
|
||||
} else {
|
||||
textItems.append(AnimatedTextComponent.Item(id: AnyHashable(1), isUnbreakable: true, content: .text("uploading.")))
|
||||
}
|
||||
|
||||
let actionButtonSize = self.actionButton.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(PlainButtonComponent(
|
||||
content: AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: "View", font: Font.regular(17.0), textColor: environment.theme.list.itemAccentColor.withMultiplied(hue: 0.933, saturation: 0.61, brightness: 1.0)))
|
||||
)),
|
||||
effectAlignment: .center,
|
||||
contentInsets: UIEdgeInsets(top: -8.0, left: -8.0, bottom: -8.0, right: -8.0),
|
||||
action: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
self.doneTimer?.invalidate()
|
||||
self.environment?.controller()?.dismiss()
|
||||
component.viewUploadedAvatar()
|
||||
},
|
||||
animateAlpha: true,
|
||||
animateScale: false,
|
||||
animateContents: false
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableContentSize.width - contentInsets.left - contentInsets.right - spacing - iconSize.width, height: availableContentSize.height)
|
||||
)
|
||||
|
||||
//TODO:localize
|
||||
let contentSize = self.content.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(AnimatedTextComponent(
|
||||
font: Font.regular(14.0),
|
||||
color: .white,
|
||||
items: textItems
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableContentSize.width - contentInsets.left - contentInsets.right - spacing - iconSize.width - actionButtonSize.width - 16.0 - 4.0, height: availableContentSize.height)
|
||||
)
|
||||
|
||||
var contentHeight: CGFloat = 0.0
|
||||
contentHeight += contentInsets.top + contentInsets.bottom + max(iconSize.height, contentSize.height)
|
||||
|
||||
if self.avatarView.image == nil {
|
||||
self.avatarView.image = generateImage(iconSize, rotatedContext: { size, context in
|
||||
UIGraphicsPushContext(context)
|
||||
defer {
|
||||
UIGraphicsPopContext()
|
||||
}
|
||||
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
context.addEllipse(in: CGRect(origin: CGPoint(), size: size))
|
||||
context.clip()
|
||||
|
||||
component.image.draw(in: CGRect(origin: CGPoint(), size: size))
|
||||
})
|
||||
}
|
||||
|
||||
let avatarFrame = CGRect(origin: CGPoint(x: contentInsets.left, y: floor((contentHeight - iconSize.height) * 0.5)), size: iconSize)
|
||||
|
||||
var adjustedAvatarFrame = avatarFrame
|
||||
if !isDone {
|
||||
adjustedAvatarFrame = adjustedAvatarFrame.insetBy(dx: iconProgressInset, dy: iconProgressInset)
|
||||
}
|
||||
transition.setPosition(view: self.avatarView, position: adjustedAvatarFrame.center)
|
||||
transition.setBounds(view: self.avatarView, bounds: CGRect(origin: CGPoint(), size: adjustedAvatarFrame.size))
|
||||
if isDone && !previousIsDone {
|
||||
let topScale: CGFloat = 1.1
|
||||
self.avatarView.layer.animateScale(from: 1.0, to: topScale, duration: 0.16, removeOnCompletion: false, completion: { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.avatarView.layer.animateScale(from: topScale, to: 1.0, duration: 0.16)
|
||||
})
|
||||
self.progressNode.layer.animateScale(from: 1.0, to: topScale, duration: 0.16, removeOnCompletion: false, completion: { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.progressNode.layer.animateScale(from: topScale, to: 1.0, duration: 0.16)
|
||||
})
|
||||
HapticFeedback().success()
|
||||
}
|
||||
|
||||
self.progressNode.frame = avatarFrame
|
||||
self.progressNode.transitionToState(.progress(color: .white, lineWidth: 1.0 + UIScreenPixel, value: effectiveProgress, cancelEnabled: false, animateRotation: true))
|
||||
transition.setAlpha(view: self.progressNode.view, alpha: isDone ? 0.0 : 1.0)
|
||||
|
||||
if let contentView = self.content.view {
|
||||
if contentView.superview == nil {
|
||||
self.contentView.addSubview(contentView)
|
||||
}
|
||||
transition.setFrame(view: contentView, frame: CGRect(origin: CGPoint(x: contentInsets.left + iconSize.width + spacing, y: floor((contentHeight - contentSize.height) * 0.5)), size: contentSize))
|
||||
}
|
||||
|
||||
if let actionButtonView = self.actionButton.view {
|
||||
if actionButtonView.superview == nil {
|
||||
self.contentView.addSubview(actionButtonView)
|
||||
}
|
||||
transition.setFrame(view: actionButtonView, frame: CGRect(origin: CGPoint(x: availableContentSize.width - contentInsets.right - 16.0 - actionButtonSize.width, y: floor((contentHeight - actionButtonSize.height) * 0.5)), size: actionButtonSize))
|
||||
transition.setAlpha(view: actionButtonView, alpha: isDone ? 1.0 : 0.0)
|
||||
}
|
||||
|
||||
let size = CGSize(width: availableContentSize.width, height: contentHeight)
|
||||
|
||||
let contentFrame = CGRect(origin: CGPoint(x: containerInsets.left, y: availableSize.height - containerInsets.bottom - size.height), size: size)
|
||||
|
||||
self.backgroundView.updateColor(color: self.isDisplaying ? UIColor(white: 0.0, alpha: 0.7) : UIColor.black, transition: transition.containedViewLayoutTransition)
|
||||
let backgroundFrame: CGRect
|
||||
if self.isDisplaying {
|
||||
backgroundFrame = contentFrame
|
||||
} else {
|
||||
backgroundFrame = CGRect(origin: CGPoint(), size: availableSize)
|
||||
}
|
||||
if self.backgroundView.bounds.size != contentFrame.size {
|
||||
self.backgroundView.update(size: availableSize, cornerRadius: 0.0, transition: transition.containedViewLayoutTransition)
|
||||
}
|
||||
transition.setFrame(view: self.backgroundView, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
transition.setFrame(view: self.backgroundMaskView, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
|
||||
transition.setCornerRadius(layer: self.backgroundMainMaskView.layer, cornerRadius: self.isDisplaying ? 14.0 : 0.0)
|
||||
transition.setFrame(view: self.backgroundMainMaskView, frame: backgroundFrame)
|
||||
|
||||
if self.backgroundArrowMaskView.image == nil {
|
||||
let arrowFactor: CGFloat = 0.75
|
||||
let arrowSize = CGSize(width: floor(29.0 * arrowFactor), height: floor(10.0 * arrowFactor))
|
||||
self.backgroundArrowMaskView.image = generateImage(arrowSize, rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
context.scaleBy(x: size.width / 29.0, y: size.height / 10.0)
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.scaleBy(x: 0.333, y: 0.333)
|
||||
let _ = try? drawSvgPath(context, path: "M85.882251,0 C79.5170552,0 73.4125613,2.52817247 68.9116882,7.02834833 L51.4264069,24.5109211 C46.7401154,29.1964866 39.1421356,29.1964866 34.4558441,24.5109211 L16.9705627,7.02834833 C12.4696897,2.52817247 6.36519576,0 0,0 L85.882251,0 ")
|
||||
context.fillPath()
|
||||
})?.withRenderingMode(.alwaysTemplate)
|
||||
}
|
||||
|
||||
if let arrowImage = self.backgroundArrowMaskView.image, let (targetView, targetRect) = component.arrowTarget() {
|
||||
let targetArrowRect = targetView.convert(targetRect, to: self)
|
||||
self.backgroundArrowMaskView.isHidden = false
|
||||
|
||||
var arrowFrame = CGRect(origin: CGPoint(x: targetArrowRect.minX + floor((targetArrowRect.width - arrowImage.size.width) * 0.5), y: contentFrame.maxY), size: arrowImage.size)
|
||||
if !self.isDisplaying {
|
||||
arrowFrame = arrowFrame.offsetBy(dx: 0.0, dy: -10.0)
|
||||
}
|
||||
transition.setFrame(view: self.backgroundArrowMaskView, frame: arrowFrame)
|
||||
} else {
|
||||
self.backgroundArrowMaskView.isHidden = true
|
||||
}
|
||||
|
||||
transition.setFrame(view: self.contentView, frame: contentFrame)
|
||||
|
||||
return availableSize
|
||||
}
|
||||
}
|
||||
|
||||
func makeView() -> View {
|
||||
return View(frame: CGRect())
|
||||
}
|
||||
|
||||
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<ViewControllerComponentContainer.Environment>, transition: ComponentTransition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
||||
|
||||
public class AvatarUploadToastScreen: ViewControllerComponentContainer {
|
||||
public var targetAvatarView: UIView? {
|
||||
if let view = self.node.hostView.componentView as? AvatarUploadToastScreenComponent.View {
|
||||
return view.targetAvatarView
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
private var processedDidAppear: Bool = false
|
||||
private var processedDidDisappear: Bool = false
|
||||
|
||||
public init(
|
||||
context: AccountContext,
|
||||
image: UIImage,
|
||||
uploadStatus: Signal<PeerInfoAvatarUploadStatus, NoError>,
|
||||
arrowTarget: @escaping () -> (UIView, CGRect)?,
|
||||
viewUploadedAvatar: @escaping () -> Void
|
||||
) {
|
||||
super.init(
|
||||
context: context,
|
||||
component: AvatarUploadToastScreenComponent(
|
||||
context: context,
|
||||
image: image,
|
||||
uploadStatus: uploadStatus,
|
||||
arrowTarget: arrowTarget,
|
||||
viewUploadedAvatar: viewUploadedAvatar
|
||||
),
|
||||
navigationBarAppearance: .none,
|
||||
statusBarStyle: .ignore,
|
||||
presentationMode: .default,
|
||||
updatedPresentationData: nil
|
||||
)
|
||||
self.navigationPresentation = .flatModal
|
||||
}
|
||||
|
||||
required public init(coder aDecoder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
}
|
||||
|
||||
override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) {
|
||||
super.containerLayoutUpdated(layout, transition: transition)
|
||||
}
|
||||
|
||||
override public func viewDidAppear(_ animated: Bool) {
|
||||
super.viewDidAppear(animated)
|
||||
|
||||
if !self.processedDidAppear {
|
||||
self.processedDidAppear = true
|
||||
if let componentView = self.node.hostView.componentView as? AvatarUploadToastScreenComponent.View {
|
||||
componentView.animateIn()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func superDismiss() {
|
||||
super.dismiss()
|
||||
}
|
||||
|
||||
override public func dismiss(completion: (() -> Void)? = nil) {
|
||||
if !self.processedDidDisappear {
|
||||
self.processedDidDisappear = true
|
||||
|
||||
if let componentView = self.node.hostView.componentView as? AvatarUploadToastScreenComponent.View {
|
||||
componentView.animateOut(completion: { [weak self] in
|
||||
if let self {
|
||||
self.superDismiss()
|
||||
}
|
||||
completion?()
|
||||
})
|
||||
} else {
|
||||
super.dismiss(completion: completion)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -303,6 +303,18 @@ public final class ChatInlineSearchResultsListComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
guard let result = super.hitTest(point, with: event) else {
|
||||
return nil
|
||||
}
|
||||
if result === self.listNode.view {
|
||||
if self.backgroundColor == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func update(component: ChatInlineSearchResultsListComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
self.isUpdating = true
|
||||
defer {
|
||||
|
@ -342,7 +342,7 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
}
|
||||
|
||||
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.3)
|
||||
self.vibrancyCancelIconView.tintColor = .white
|
||||
self.vibrancyCancelIconView.tintColor = .black
|
||||
|
||||
let cancelTextSize = self.cancelText.update(
|
||||
transition: .immediate,
|
||||
@ -352,7 +352,7 @@ public final class MediaRecordingPanelComponent: Component {
|
||||
)
|
||||
let _ = self.vibrancyCancelText.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: component.strings.Conversation_SlideToCancel, font: Font.regular(15.0), color: .white)),
|
||||
component: AnyComponent(Text(text: component.strings.Conversation_SlideToCancel, font: Font.regular(15.0), color: .black)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
|
||||
)
|
||||
|
@ -457,7 +457,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
|
||||
public final class View: UIView {
|
||||
private let fieldBackgroundView: BlurredBackgroundView
|
||||
private let vibrancyEffectView: UIVisualEffectView
|
||||
private let fieldBackgroundTint: UIView
|
||||
private let gradientView: UIImageView
|
||||
private let bottomGradientView: UIView
|
||||
|
||||
@ -522,12 +522,16 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.fieldBackgroundView = BlurredBackgroundView(color: UIColor(white: 0.0, alpha: 0.5), enableBlur: true)
|
||||
|
||||
self.vibrancyEffectView = UIVisualEffectView(effect: UIVibrancyEffect(blurEffect: UIBlurEffect(style: .dark)))
|
||||
self.fieldBackgroundView = BlurredBackgroundView(color: nil, enableBlur: true)
|
||||
self.fieldBackgroundTint = UIView()
|
||||
self.fieldBackgroundTint.backgroundColor = UIColor(white: 1.0, alpha: 0.1)
|
||||
|
||||
self.mediaRecordingVibrancyContainer = UIView()
|
||||
self.vibrancyEffectView.contentView.addSubview(self.mediaRecordingVibrancyContainer)
|
||||
if let filter = CALayer.luminanceToAlpha() {
|
||||
self.mediaRecordingVibrancyContainer.backgroundColor = .white
|
||||
self.mediaRecordingVibrancyContainer.layer.filters = [filter]
|
||||
}
|
||||
self.fieldBackgroundTint.mask = self.mediaRecordingVibrancyContainer
|
||||
|
||||
self.gradientView = UIImageView()
|
||||
self.bottomGradientView = UIView()
|
||||
@ -538,8 +542,8 @@ public final class MessageInputPanelComponent: Component {
|
||||
|
||||
self.addSubview(self.bottomGradientView)
|
||||
self.addSubview(self.gradientView)
|
||||
self.fieldBackgroundView.addSubview(self.vibrancyEffectView)
|
||||
self.addSubview(self.fieldBackgroundView)
|
||||
self.addSubview(self.fieldBackgroundTint)
|
||||
self.addSubview(self.textClippingView)
|
||||
|
||||
self.viewForOverlayContent = ViewForOverlayContent(
|
||||
@ -876,7 +880,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
transition: placeholderTransition,
|
||||
component: AnyComponent(AnimatedTextComponent(
|
||||
font: Font.regular(17.0),
|
||||
color: .white,
|
||||
color: .black,
|
||||
items: placeholderItems
|
||||
)),
|
||||
environment: {},
|
||||
@ -912,7 +916,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
if let headerView = headerView as? ForwardInfoPanelComponent.View {
|
||||
if headerView.superview == nil {
|
||||
self.addSubview(headerView)
|
||||
self.vibrancyEffectView.contentView.addSubview(headerView.backgroundView)
|
||||
self.mediaRecordingVibrancyContainer.addSubview(headerView.backgroundView)
|
||||
|
||||
headerView.backgroundView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4)
|
||||
}
|
||||
@ -965,11 +969,15 @@ public final class MessageInputPanelComponent: Component {
|
||||
let rawFieldBackgroundFrame = fieldBackgroundFrame
|
||||
fieldBackgroundFrame.size.height += headerHeight
|
||||
|
||||
transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
|
||||
self.vibrancyEffectView.isHidden = false // component.style == .media
|
||||
//transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
|
||||
|
||||
transition.setFrame(view: self.fieldBackgroundView, frame: fieldBackgroundFrame)
|
||||
self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: headerHeight > 0.0 ? 18.0 : baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition)
|
||||
transition.setFrame(view: self.fieldBackgroundTint, frame: fieldBackgroundFrame)
|
||||
transition.setFrame(view: self.mediaRecordingVibrancyContainer, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
|
||||
|
||||
//self.fieldBackgroundTint.backgroundColor = .blue
|
||||
transition.setCornerRadius(layer: self.fieldBackgroundTint.layer, cornerRadius: headerHeight > 0.0 ? 18.0 : baseFieldHeight * 0.5)
|
||||
|
||||
var textClippingFrame = rawFieldBackgroundFrame.offsetBy(dx: 0.0, dy: headerHeight)
|
||||
if component.style == .media, !isEditing {
|
||||
@ -993,7 +1001,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
if let placeholderView = self.placeholder.view, let vibrancyPlaceholderView = self.vibrancyPlaceholder.view {
|
||||
if vibrancyPlaceholderView.superview == nil {
|
||||
vibrancyPlaceholderView.layer.anchorPoint = CGPoint()
|
||||
self.vibrancyEffectView.contentView.addSubview(vibrancyPlaceholderView)
|
||||
self.mediaRecordingVibrancyContainer.addSubview(vibrancyPlaceholderView)
|
||||
|
||||
vibrancyPlaceholderView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4)
|
||||
}
|
||||
@ -1768,7 +1776,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
lightFieldColor = UIColor(white: 0.2, alpha: 0.45)
|
||||
} else if self.textFieldExternalState.hasText && component.alwaysDarkWhenHasText {
|
||||
fieldBackgroundIsDark = true
|
||||
} else if isEditing || component.style == .editor {
|
||||
} else if isEditing || component.style == .story || component.style == .editor {
|
||||
fieldBackgroundIsDark = true
|
||||
}
|
||||
self.fieldBackgroundView.updateColor(color: fieldBackgroundIsDark ? UIColor(white: 0.0, alpha: 0.5) : lightFieldColor, transition: transition.containedViewLayoutTransition)
|
||||
|
@ -1258,7 +1258,7 @@ func peerInfoScreenData(context: AccountContext, peerId: PeerId, strings: Presen
|
||||
let starsRevenueContextAndState = context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: peerId))
|
||||
|> mapToSignal { peer -> Signal<(StarsRevenueStatsContext?, StarsRevenueStats?), NoError> in
|
||||
var canViewStarsRevenue = false
|
||||
if let peer, case let .user(user) = peer, let botInfo = user.botInfo, botInfo.flags.contains(.canEdit) || context.sharedContext.applicationBindings.appBuildType == .internal {
|
||||
if let peer, case let .user(user) = peer, let botInfo = user.botInfo, botInfo.flags.contains(.canEdit) || context.sharedContext.applicationBindings.appBuildType == .internal || context.sharedContext.immediateExperimentalUISettings.devRequests {
|
||||
canViewStarsRevenue = true
|
||||
}
|
||||
#if DEBUG
|
||||
@ -1283,7 +1283,7 @@ func peerInfoScreenData(context: AccountContext, peerId: PeerId, strings: Presen
|
||||
)
|
||||
|> mapToSignal { peer, canViewRevenue -> Signal<(RevenueStatsContext?, RevenueStats?), NoError> in
|
||||
var canViewRevenue = canViewRevenue
|
||||
if let peer, case let .user(user) = peer, let _ = user.botInfo, context.sharedContext.applicationBindings.appBuildType == .internal {
|
||||
if let peer, case let .user(user) = peer, let _ = user.botInfo, context.sharedContext.applicationBindings.appBuildType == .internal || context.sharedContext.immediateExperimentalUISettings.devRequests {
|
||||
canViewRevenue = true
|
||||
}
|
||||
#if DEBUG
|
||||
|
@ -51,9 +51,14 @@ final class PeerInfoHeaderNavigationButtonContainerNode: SparseNode {
|
||||
button.updateContentsColor(backgroundColor: self.backgroundContentColor, contentsColor: self.contentsColor, canBeExpanded: canBeExpanded, transition: transition)
|
||||
transition.updateSublayerTransformOffset(layer: button.layer, offset: CGPoint(x: canBeExpanded ? -8.0 : 0.0, y: 0.0))
|
||||
}
|
||||
|
||||
var accumulatedRightButtonOffset: CGFloat = canBeExpanded ? 16.0 : 0.0
|
||||
for (_, button) in self.rightButtonNodes {
|
||||
button.updateContentsColor(backgroundColor: self.backgroundContentColor, contentsColor: self.contentsColor, canBeExpanded: canBeExpanded, transition: transition)
|
||||
transition.updateSublayerTransformOffset(layer: button.layer, offset: CGPoint(x: canBeExpanded ? 16.0 : 0.0, y: 0.0))
|
||||
transition.updateSublayerTransformOffset(layer: button.layer, offset: CGPoint(x: accumulatedRightButtonOffset, y: 0.0))
|
||||
if self.backgroundContentColor.alpha != 0.0 {
|
||||
accumulatedRightButtonOffset -= 6.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -171,8 +176,8 @@ final class PeerInfoHeaderNavigationButtonContainerNode: SparseNode {
|
||||
if self.currentRightButtons != rightButtons || presentationData.strings !== self.presentationData?.strings {
|
||||
self.currentRightButtons = rightButtons
|
||||
|
||||
var nextRegularButtonOrigin = size.width - sideInset
|
||||
var nextExpandedButtonOrigin = size.width - sideInset
|
||||
var nextRegularButtonOrigin = size.width - sideInset - 8.0
|
||||
var nextExpandedButtonOrigin = size.width - sideInset - 8.0
|
||||
for spec in rightButtons.reversed() {
|
||||
let buttonNode: PeerInfoHeaderNavigationButton
|
||||
var wasAdded = false
|
||||
@ -248,8 +253,8 @@ final class PeerInfoHeaderNavigationButtonContainerNode: SparseNode {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var nextRegularButtonOrigin = size.width - sideInset
|
||||
var nextExpandedButtonOrigin = size.width - sideInset
|
||||
var nextRegularButtonOrigin = size.width - sideInset - 8.0
|
||||
var nextExpandedButtonOrigin = size.width - sideInset - 8.0
|
||||
|
||||
for spec in rightButtons.reversed() {
|
||||
var key = spec.key
|
||||
|
@ -3947,38 +3947,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
|
||||
return
|
||||
}
|
||||
|
||||
let entriesPromise = Promise<[AvatarGalleryEntry]>(entries)
|
||||
let galleryController = AvatarGalleryController(context: strongSelf.context, peer: EnginePeer(peer), sourceCorners: .round, remoteEntries: entriesPromise, skipInitial: true, centralEntryIndex: centralEntry.flatMap { entries.firstIndex(of: $0) }, replaceRootController: { controller, ready in
|
||||
})
|
||||
galleryController.openAvatarSetup = { [weak self] completion in
|
||||
self?.controller?.openAvatarForEditing(fromGallery: true, completion: { _ in
|
||||
completion()
|
||||
})
|
||||
}
|
||||
galleryController.avatarPhotoEditCompletion = { [weak self] image in
|
||||
self?.controller?.updateProfilePhoto(image, mode: .generic)
|
||||
}
|
||||
galleryController.avatarVideoEditCompletion = { [weak self] image, asset, adjustments in
|
||||
self?.controller?.updateProfileVideo(image, asset: asset, adjustments: adjustments, mode: .generic)
|
||||
}
|
||||
galleryController.removedEntry = { [weak self] entry in
|
||||
if let item = PeerInfoAvatarListItem(entry: entry) {
|
||||
let _ = self?.headerNode.avatarListNode.listContainerNode.deleteItem(item)
|
||||
}
|
||||
}
|
||||
strongSelf.hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).startStrict(next: { entry in
|
||||
self?.headerNode.updateAvatarIsHidden(entry: entry)
|
||||
}))
|
||||
strongSelf.view.endEditing(true)
|
||||
strongSelf.controller?.present(galleryController, in: .window(.root), with: AvatarGalleryControllerPresentationArguments(transitionArguments: { entry in
|
||||
if let transitionNode = self?.headerNode.avatarTransitionArguments(entry: entry) {
|
||||
return GalleryTransitionArguments(transitionNode: transitionNode, addToTransitionSurface: { view in
|
||||
self?.headerNode.addToAvatarTransitionSurface(view: view)
|
||||
})
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}))
|
||||
strongSelf.openAvatarGallery(peer: EnginePeer(peer), entries: entries, centralEntry: centralEntry, animateTransition: true)
|
||||
|
||||
Queue.mainQueue().after(0.4) {
|
||||
strongSelf.resetHeaderExpansion()
|
||||
@ -9766,7 +9735,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
|
||||
mixin.didFinishWithImage = { [weak self] image in
|
||||
if let image = image {
|
||||
completion(image)
|
||||
self?.controller?.updateProfilePhoto(image, mode: mode)
|
||||
self?.controller?.updateProfilePhoto(image, mode: mode, uploadStatus: nil)
|
||||
}
|
||||
}
|
||||
mixin.didFinishWithVideo = { [weak self] image, asset, adjustments in
|
||||
@ -12260,6 +12229,47 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
|
||||
func cancelItemSelection() {
|
||||
self.headerNode.navigationButtonContainer.performAction?(.selectionDone, nil, nil)
|
||||
}
|
||||
|
||||
func openAvatarGallery(peer: EnginePeer, entries: [AvatarGalleryEntry], centralEntry: AvatarGalleryEntry?, animateTransition: Bool) {
|
||||
let entriesPromise = Promise<[AvatarGalleryEntry]>(entries)
|
||||
let galleryController = AvatarGalleryController(context: self.context, peer: peer, sourceCorners: .round, remoteEntries: entriesPromise, skipInitial: true, centralEntryIndex: centralEntry.flatMap { entries.firstIndex(of: $0) }, replaceRootController: { controller, ready in
|
||||
})
|
||||
galleryController.openAvatarSetup = { [weak self] completion in
|
||||
self?.controller?.openAvatarForEditing(fromGallery: true, completion: { _ in
|
||||
completion()
|
||||
})
|
||||
}
|
||||
galleryController.avatarPhotoEditCompletion = { [weak self] image in
|
||||
self?.controller?.updateProfilePhoto(image, mode: .generic, uploadStatus: nil)
|
||||
}
|
||||
galleryController.avatarVideoEditCompletion = { [weak self] image, asset, adjustments in
|
||||
self?.controller?.updateProfileVideo(image, asset: asset, adjustments: adjustments, mode: .generic)
|
||||
}
|
||||
galleryController.removedEntry = { [weak self] entry in
|
||||
if let item = PeerInfoAvatarListItem(entry: entry) {
|
||||
let _ = self?.headerNode.avatarListNode.listContainerNode.deleteItem(item)
|
||||
}
|
||||
}
|
||||
self.hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).startStrict(next: { [weak self] entry in
|
||||
self?.headerNode.updateAvatarIsHidden(entry: entry)
|
||||
}))
|
||||
self.view.endEditing(true)
|
||||
let arguments = AvatarGalleryControllerPresentationArguments(transitionArguments: { [weak self] _ in
|
||||
if animateTransition, let entry = centralEntry, let transitionNode = self?.headerNode.avatarTransitionArguments(entry: entry) {
|
||||
return GalleryTransitionArguments(transitionNode: transitionNode, addToTransitionSurface: { view in
|
||||
self?.headerNode.addToAvatarTransitionSurface(view: view)
|
||||
})
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
})
|
||||
if self.controller?.navigationController != nil {
|
||||
self.controller?.present(galleryController, in: .window(.root), with: arguments)
|
||||
} else {
|
||||
galleryController.presentationArguments = arguments
|
||||
self.context.sharedContext.mainWindow?.present(galleryController, on: .root)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class PeerInfoScreenImpl: ViewController, PeerInfoScreen, KeyShortcutResponder {
|
||||
@ -12760,9 +12770,9 @@ public final class PeerInfoScreenImpl: ViewController, PeerInfoScreen, KeyShortc
|
||||
}
|
||||
}
|
||||
|
||||
public func openAvatarSetup() {
|
||||
public func openAvatarSetup(completedWithUploadingImage: @escaping (UIImage, Signal<PeerInfoAvatarUploadStatus, NoError>) -> UIView?) {
|
||||
let proceed = { [weak self] in
|
||||
self?.openAvatarForEditing()
|
||||
self?.newopenAvatarForEditing(completedWithUploadingImage: completedWithUploadingImage)
|
||||
}
|
||||
if !self.isNodeLoaded {
|
||||
self.loadDisplayNode()
|
||||
@ -12774,6 +12784,18 @@ public final class PeerInfoScreenImpl: ViewController, PeerInfoScreen, KeyShortc
|
||||
}
|
||||
}
|
||||
|
||||
public func openAvatars() {
|
||||
let _ = (self.context.engine.data.get(
|
||||
TelegramEngine.EngineData.Item.Peer.Peer(id: self.peerId)
|
||||
)
|
||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] peer in
|
||||
guard let self, let peer else {
|
||||
return
|
||||
}
|
||||
self.controllerNode.openAvatarGallery(peer: peer, entries: self.controllerNode.headerNode.avatarListNode.listContainerNode.galleryEntries, centralEntry: nil, animateTransition: false)
|
||||
})
|
||||
}
|
||||
|
||||
func openAvatarForEditing(mode: PeerInfoAvatarEditingMode = .generic, fromGallery: Bool = false, completion: @escaping (UIImage?) -> Void = { _ in }) {
|
||||
self.controllerNode.openAvatarForEditing(mode: mode, fromGallery: fromGallery, completion: completion)
|
||||
}
|
||||
|
@ -19,164 +19,197 @@ import LegacyComponents
|
||||
import LegacyMediaPickerUI
|
||||
|
||||
extension PeerInfoScreenImpl {
|
||||
// func newopenAvatarForEditing(mode: PeerInfoAvatarEditingMode = .generic, fromGallery: Bool = false, completion: @escaping (UIImage?) -> Void = { _ in }) {
|
||||
// guard let data = self.controllerNode.data, let peer = data.peer, mode != .generic || canEditPeerInfo(context: self.context, peer: peer, chatLocation: self.chatLocation, threadData: data.threadData) else {
|
||||
// return
|
||||
// }
|
||||
// self.view.endEditing(true)
|
||||
//
|
||||
// let peerId = self.peerId
|
||||
// var isForum = false
|
||||
// if let peer = peer as? TelegramChannel, peer.flags.contains(.isForum) {
|
||||
// isForum = true
|
||||
// }
|
||||
//
|
||||
// var currentIsVideo = false
|
||||
// var emojiMarkup: TelegramMediaImage.EmojiMarkup?
|
||||
// let item = self.controllerNode.headerNode.avatarListNode.listContainerNode.currentItemNode?.item
|
||||
// if let item = item, case let .image(_, _, videoRepresentations, _, _, emojiMarkupValue) = item {
|
||||
// currentIsVideo = !videoRepresentations.isEmpty
|
||||
// emojiMarkup = emojiMarkupValue
|
||||
// }
|
||||
//
|
||||
// let _ = isForum
|
||||
// let _ = currentIsVideo
|
||||
//
|
||||
// let _ = (self.context.engine.data.get(
|
||||
// TelegramEngine.EngineData.Item.Peer.Peer(id: peerId)
|
||||
// )
|
||||
// |> deliverOnMainQueue).startStandalone(next: { [weak self] peer in
|
||||
// guard let self, let peer else {
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// let keyboardInputData = Promise<AvatarKeyboardInputData>()
|
||||
// keyboardInputData.set(AvatarEditorScreen.inputData(context: self.context, isGroup: peer.id.namespace != Namespaces.Peer.CloudUser))
|
||||
//
|
||||
// var hasPhotos = false
|
||||
// if !peer.profileImageRepresentations.isEmpty {
|
||||
// hasPhotos = true
|
||||
// }
|
||||
//
|
||||
// var hasDeleteButton = false
|
||||
// if case .generic = mode {
|
||||
// hasDeleteButton = hasPhotos && !fromGallery
|
||||
// } else if case .custom = mode {
|
||||
// hasDeleteButton = peer.profileImageRepresentations.first?.isPersonal == true
|
||||
// } else if case .fallback = mode {
|
||||
// if let cachedData = data.cachedData as? CachedUserData, case let .known(photo) = cachedData.fallbackPhoto {
|
||||
// hasDeleteButton = photo != nil
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// let _ = hasDeleteButton
|
||||
//
|
||||
// let parentController = (self.context.sharedContext.mainWindow?.viewController as? NavigationController)?.topViewController as? ViewController
|
||||
//
|
||||
// var dismissImpl: (() -> Void)?
|
||||
// let mainController = self.context.sharedContext.makeAvatarMediaPickerScreen(context: self.context, getSourceRect: { return nil }, canDelete: hasDeleteButton, performDelete: { [weak self] in
|
||||
// self?.openAvatarRemoval(mode: mode, peer: peer, item: item)
|
||||
// }, completion: { result, transitionView, transitionRect, transitionImage, fromCamera, transitionOut, cancelled in
|
||||
// let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
||||
// if let asset = result as? PHAsset {
|
||||
// subject = .single(.asset(asset))
|
||||
// } else if let image = result as? UIImage {
|
||||
// subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight))
|
||||
// } else if let result = result as? Signal<CameraScreenImpl.Result, NoError> {
|
||||
// subject = result
|
||||
// |> map { value -> MediaEditorScreenImpl.Subject? in
|
||||
// switch value {
|
||||
// case .pendingImage:
|
||||
// return nil
|
||||
// case let .image(image):
|
||||
// return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: nil, additionalImagePosition: .topLeft)
|
||||
// case let .video(video):
|
||||
// return .video(videoPath: video.videoPath, thumbnail: video.coverImage, mirror: video.mirror, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: video.dimensions, duration: video.duration, videoPositionChanges: [], additionalVideoPosition: .topLeft)
|
||||
// default:
|
||||
// return nil
|
||||
// }
|
||||
// }
|
||||
// } else {
|
||||
// let peerType: AvatarEditorScreen.PeerType
|
||||
// if mode == .suggest {
|
||||
// peerType = .suggest
|
||||
// } else if case .legacyGroup = peer {
|
||||
// peerType = .group
|
||||
// } else if case let .channel(channel) = peer {
|
||||
// if case .group = channel.info {
|
||||
// peerType = channel.flags.contains(.isForum) ? .forum : .group
|
||||
// } else {
|
||||
// peerType = .channel
|
||||
// }
|
||||
// } else {
|
||||
// peerType = .user
|
||||
// }
|
||||
// let controller = AvatarEditorScreen(context: self.context, inputData: keyboardInputData.get(), peerType: peerType, markup: emojiMarkup)
|
||||
// //controller.imageCompletion = imageCompletion
|
||||
// //controller.videoCompletion = videoCompletion
|
||||
// parentController?.push(controller)
|
||||
// //isFromEditor = true
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// let editorController = MediaEditorScreenImpl(
|
||||
// context: self.context,
|
||||
// mode: .avatarEditor,
|
||||
// subject: subject,
|
||||
// transitionIn: fromCamera ? .camera : transitionView.flatMap({ .gallery(
|
||||
// MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn(
|
||||
// sourceView: $0,
|
||||
// sourceRect: transitionRect,
|
||||
// sourceImage: transitionImage
|
||||
// )
|
||||
// ) }),
|
||||
// transitionOut: { finished, isNew in
|
||||
// if !finished, let transitionView {
|
||||
// return MediaEditorScreenImpl.TransitionOut(
|
||||
// destinationView: transitionView,
|
||||
// destinationRect: transitionView.bounds,
|
||||
// destinationCornerRadius: 0.0
|
||||
// )
|
||||
// }
|
||||
// return nil
|
||||
// }, completion: { [weak self] result, commit in
|
||||
// dismissImpl?()
|
||||
//
|
||||
// switch result.media {
|
||||
// case let .image(image, _):
|
||||
// self?.updateProfilePhoto(image, mode: mode)
|
||||
// commit({})
|
||||
// case let .video(video, coverImage, values, _, _):
|
||||
// if let coverImage {
|
||||
// self?.updateProfileVideo(coverImage, asset: video, adjustments: values, mode: mode)
|
||||
// }
|
||||
// commit({})
|
||||
// default:
|
||||
// break
|
||||
// }
|
||||
// } as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
// )
|
||||
// editorController.cancelled = { _ in
|
||||
// cancelled()
|
||||
// }
|
||||
// self.push(editorController)
|
||||
// }, dismissed: {
|
||||
//
|
||||
// })
|
||||
// dismissImpl = { [weak mainController] in
|
||||
// if let mainController, let navigationController = mainController.navigationController {
|
||||
// var viewControllers = navigationController.viewControllers
|
||||
// viewControllers = viewControllers.filter { c in
|
||||
// return !(c is CameraScreen) && c !== mainController
|
||||
// }
|
||||
// navigationController.setViewControllers(viewControllers, animated: false)
|
||||
// }
|
||||
// }
|
||||
// mainController.navigationPresentation = .flatModal
|
||||
// mainController.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait)
|
||||
// self.push(mainController)
|
||||
// })
|
||||
// }
|
||||
func newopenAvatarForEditing(mode: PeerInfoAvatarEditingMode = .generic, fromGallery: Bool = false, completion: @escaping (UIImage?) -> Void = { _ in }, completedWithUploadingImage: @escaping (UIImage, Signal<PeerInfoAvatarUploadStatus, NoError>) -> UIView? = { _, _ in nil }) {
|
||||
guard let data = self.controllerNode.data, let peer = data.peer, mode != .generic || canEditPeerInfo(context: self.context, peer: peer, chatLocation: self.chatLocation, threadData: data.threadData) else {
|
||||
return
|
||||
}
|
||||
self.view.endEditing(true)
|
||||
|
||||
let peerId = self.peerId
|
||||
var isForum = false
|
||||
if let peer = peer as? TelegramChannel, peer.flags.contains(.isForum) {
|
||||
isForum = true
|
||||
}
|
||||
|
||||
var currentIsVideo = false
|
||||
var emojiMarkup: TelegramMediaImage.EmojiMarkup?
|
||||
let item = self.controllerNode.headerNode.avatarListNode.listContainerNode.currentItemNode?.item
|
||||
if let item = item, case let .image(_, _, videoRepresentations, _, _, emojiMarkupValue) = item {
|
||||
currentIsVideo = !videoRepresentations.isEmpty
|
||||
emojiMarkup = emojiMarkupValue
|
||||
}
|
||||
|
||||
let _ = isForum
|
||||
let _ = currentIsVideo
|
||||
|
||||
let _ = (self.context.engine.data.get(
|
||||
TelegramEngine.EngineData.Item.Peer.Peer(id: peerId)
|
||||
)
|
||||
|> deliverOnMainQueue).startStandalone(next: { [weak self] peer in
|
||||
guard let self, let peer else {
|
||||
return
|
||||
}
|
||||
|
||||
let keyboardInputData = Promise<AvatarKeyboardInputData>()
|
||||
keyboardInputData.set(AvatarEditorScreen.inputData(context: self.context, isGroup: peer.id.namespace != Namespaces.Peer.CloudUser))
|
||||
|
||||
var hasPhotos = false
|
||||
if !peer.profileImageRepresentations.isEmpty {
|
||||
hasPhotos = true
|
||||
}
|
||||
|
||||
var hasDeleteButton = false
|
||||
if case .generic = mode {
|
||||
hasDeleteButton = hasPhotos && !fromGallery
|
||||
} else if case .custom = mode {
|
||||
hasDeleteButton = peer.profileImageRepresentations.first?.isPersonal == true
|
||||
} else if case .fallback = mode {
|
||||
if let cachedData = data.cachedData as? CachedUserData, case let .known(photo) = cachedData.fallbackPhoto {
|
||||
hasDeleteButton = photo != nil
|
||||
}
|
||||
}
|
||||
|
||||
let _ = hasDeleteButton
|
||||
|
||||
let parentController = (self.context.sharedContext.mainWindow?.viewController as? NavigationController)?.topViewController as? ViewController
|
||||
|
||||
var dismissImpl: (() -> Void)?
|
||||
let mainController = self.context.sharedContext.makeAvatarMediaPickerScreen(context: self.context, getSourceRect: { return nil }, canDelete: hasDeleteButton, performDelete: { [weak self] in
|
||||
self?.openAvatarRemoval(mode: mode, peer: peer, item: item)
|
||||
}, completion: { result, transitionView, transitionRect, transitionImage, fromCamera, transitionOut, cancelled in
|
||||
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
|
||||
if let asset = result as? PHAsset {
|
||||
subject = .single(.asset(asset))
|
||||
} else if let image = result as? UIImage {
|
||||
subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight))
|
||||
} else if let result = result as? Signal<CameraScreenImpl.Result, NoError> {
|
||||
subject = result
|
||||
|> map { value -> MediaEditorScreenImpl.Subject? in
|
||||
switch value {
|
||||
case .pendingImage:
|
||||
return nil
|
||||
case let .image(image):
|
||||
return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: nil, additionalImagePosition: .topLeft)
|
||||
case let .video(video):
|
||||
return .video(videoPath: video.videoPath, thumbnail: video.coverImage, mirror: video.mirror, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: video.dimensions, duration: video.duration, videoPositionChanges: [], additionalVideoPosition: .topLeft)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let peerType: AvatarEditorScreen.PeerType
|
||||
if mode == .suggest {
|
||||
peerType = .suggest
|
||||
} else if case .legacyGroup = peer {
|
||||
peerType = .group
|
||||
} else if case let .channel(channel) = peer {
|
||||
if case .group = channel.info {
|
||||
peerType = channel.flags.contains(.isForum) ? .forum : .group
|
||||
} else {
|
||||
peerType = .channel
|
||||
}
|
||||
} else {
|
||||
peerType = .user
|
||||
}
|
||||
let controller = AvatarEditorScreen(context: self.context, inputData: keyboardInputData.get(), peerType: peerType, markup: emojiMarkup)
|
||||
//controller.imageCompletion = imageCompletion
|
||||
//controller.videoCompletion = videoCompletion
|
||||
parentController?.push(controller)
|
||||
//isFromEditor = true
|
||||
return
|
||||
}
|
||||
|
||||
var resultImage: UIImage?
|
||||
let uploadStatusPromise = Promise<PeerInfoAvatarUploadStatus>(.progress(0.0))
|
||||
let editorController = MediaEditorScreenImpl(
|
||||
context: self.context,
|
||||
mode: .avatarEditor,
|
||||
subject: subject,
|
||||
transitionIn: fromCamera ? .camera : transitionView.flatMap({ .gallery(
|
||||
MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn(
|
||||
sourceView: $0,
|
||||
sourceRect: transitionRect,
|
||||
sourceImage: transitionImage
|
||||
)
|
||||
) }),
|
||||
transitionOut: { finished, isNew in
|
||||
if !finished {
|
||||
if let transitionView {
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: transitionView,
|
||||
destinationRect: transitionView.bounds,
|
||||
destinationCornerRadius: 0.0
|
||||
)
|
||||
}
|
||||
} else if let resultImage, let transitionOutView = completedWithUploadingImage(resultImage, uploadStatusPromise.get()) {
|
||||
transitionOutView.isHidden = true
|
||||
return MediaEditorScreenImpl.TransitionOut(
|
||||
destinationView: transitionOutView,
|
||||
destinationRect: transitionOutView.bounds,
|
||||
destinationCornerRadius: transitionOutView.bounds.height * 0.5,
|
||||
completion: { [weak transitionOutView] in
|
||||
transitionOutView?.isHidden = false
|
||||
}
|
||||
)
|
||||
}
|
||||
return nil
|
||||
}, completion: { [weak self] result, commit in
|
||||
switch result.media {
|
||||
case let .image(image, _):
|
||||
resultImage = image
|
||||
self?.updateProfilePhoto(image, mode: mode, uploadStatus: uploadStatusPromise)
|
||||
commit({})
|
||||
case let .video(video, coverImage, values, _, _):
|
||||
if let coverImage {
|
||||
let _ = values
|
||||
//TODO:release
|
||||
resultImage = coverImage
|
||||
self?.updateProfileVideo(coverImage, asset: video, adjustments: nil, mode: mode)
|
||||
}
|
||||
commit({})
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
dismissImpl?()
|
||||
} as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void
|
||||
)
|
||||
editorController.cancelled = { _ in
|
||||
cancelled()
|
||||
}
|
||||
if self.navigationController != nil {
|
||||
self.push(editorController)
|
||||
} else {
|
||||
self.parentController?.pushViewController(editorController)
|
||||
}
|
||||
}, dismissed: {
|
||||
|
||||
})
|
||||
dismissImpl = { [weak self, weak mainController] in
|
||||
if let mainController, let navigationController = mainController.navigationController {
|
||||
var viewControllers = navigationController.viewControllers
|
||||
viewControllers = viewControllers.filter { c in
|
||||
return !(c is CameraScreen) && c !== mainController
|
||||
}
|
||||
navigationController.setViewControllers(viewControllers, animated: false)
|
||||
}
|
||||
if let self, let navigationController = self.parentController, let mainController {
|
||||
var viewControllers = navigationController.viewControllers
|
||||
viewControllers = viewControllers.filter { c in
|
||||
return !(c is CameraScreen) && c !== mainController
|
||||
}
|
||||
navigationController.setViewControllers(viewControllers, animated: false)
|
||||
}
|
||||
}
|
||||
mainController.navigationPresentation = .flatModal
|
||||
mainController.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait)
|
||||
if self.navigationController != nil {
|
||||
self.push(mainController)
|
||||
} else {
|
||||
self.parentController?.pushViewController(mainController)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func openAvatarRemoval(mode: PeerInfoAvatarEditingMode, peer: EnginePeer? = nil, item: PeerInfoAvatarListItem? = nil, completion: @escaping () -> Void = {}) {
|
||||
let proceed = { [weak self] in
|
||||
@ -251,8 +284,9 @@ extension PeerInfoScreenImpl {
|
||||
(self.navigationController?.topViewController as? ViewController)?.present(actionSheet, in: .window(.root))
|
||||
}
|
||||
|
||||
public func updateProfilePhoto(_ image: UIImage, mode: PeerInfoAvatarEditingMode) {
|
||||
public func updateProfilePhoto(_ image: UIImage, mode: PeerInfoAvatarEditingMode, uploadStatus: Promise<PeerInfoAvatarUploadStatus>?) {
|
||||
guard let data = image.jpegData(compressionQuality: 0.6) else {
|
||||
uploadStatus?.set(.single(.done))
|
||||
return
|
||||
}
|
||||
|
||||
@ -328,8 +362,10 @@ extension PeerInfoScreenImpl {
|
||||
}
|
||||
switch result {
|
||||
case .complete:
|
||||
uploadStatus?.set(.single(.done))
|
||||
strongSelf.controllerNode.state = strongSelf.controllerNode.state.withUpdatingAvatar(nil).withAvatarUploadProgress(nil)
|
||||
case let .progress(value):
|
||||
uploadStatus?.set(.single(.progress(value)))
|
||||
strongSelf.controllerNode.state = strongSelf.controllerNode.state.withAvatarUploadProgress(.value(CGFloat(value)))
|
||||
}
|
||||
if let (layout, navigationHeight) = strongSelf.controllerNode.validLayout {
|
||||
|
@ -78,7 +78,7 @@ public final class ForwardInfoPanelComponent: Component {
|
||||
self.blurBackgroundView.clipsToBounds = true
|
||||
|
||||
self.backgroundView = UIImageView()
|
||||
self.backgroundView.image = generateStretchableFilledCircleImage(radius: 4.0, color: UIColor(white: 1.0, alpha: 0.4))
|
||||
self.backgroundView.image = generateStretchableFilledCircleImage(radius: 4.0, color: UIColor(white: 0.0, alpha: 0.4))
|
||||
|
||||
self.blockView = MessageInlineBlockBackgroundView()
|
||||
|
||||
|
@ -1122,6 +1122,14 @@ private final class StoryContainerScreenComponent: Component {
|
||||
self.didAnimateOut = true
|
||||
}
|
||||
|
||||
func inFocusUpdated(isInFocus: Bool) {
|
||||
for (_, itemSetView) in self.visibleItemSetViews {
|
||||
if let itemSetComponentView = itemSetView.view.view as? StoryItemSetContainerComponent.View {
|
||||
itemSetComponentView.inFocusUpdated(isInFocus: isInFocus)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func updateVolumeButtonMonitoring() {
|
||||
guard self.volumeButtonsListener == nil, let component = self.component else {
|
||||
return
|
||||
@ -2126,6 +2134,14 @@ public class StoryContainerScreen: ViewControllerComponentContainer {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override public func inFocusUpdated(isInFocus: Bool) {
|
||||
super.inFocusUpdated(isInFocus: isInFocus)
|
||||
|
||||
if let componentView = self.node.hostView.componentView as? StoryContainerScreenComponent.View {
|
||||
componentView.inFocusUpdated(isInFocus: isInFocus)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func allowedStoryReactions(context: AccountContext) -> Signal<[ReactionItem], NoError> {
|
||||
|
@ -1908,6 +1908,10 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
func inFocusUpdated(isInFocus: Bool) {
|
||||
self.updateIsProgressPaused()
|
||||
}
|
||||
|
||||
func activateInput() -> Bool {
|
||||
guard let component = self.component else {
|
||||
return false
|
||||
@ -5205,6 +5209,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
navigationController.setViewControllers(viewControllers, animated: true)
|
||||
}
|
||||
self.updateIsProgressPaused()
|
||||
}
|
||||
|
||||
func navigateToPeer(peer: EnginePeer, chat: Bool, subject: ChatControllerSubject? = nil) {
|
||||
|
@ -410,7 +410,7 @@ public final class TextFieldComponent: Component {
|
||||
}
|
||||
|
||||
self.updateInputState { state in
|
||||
if let characterLimit = component.characterLimit, state.inputText.length + text.length > characterLimit {
|
||||
if let characterLimit = component.characterLimit, state.inputText.string.count + text.string.count > characterLimit {
|
||||
return state
|
||||
}
|
||||
return state.insertText(text)
|
||||
@ -732,14 +732,21 @@ public final class TextFieldComponent: Component {
|
||||
}
|
||||
|
||||
if let characterLimit = component.characterLimit {
|
||||
let replacementString = text as NSString
|
||||
let string = self.inputState.inputText.string as NSString
|
||||
let deltaLength = replacementString.length - range.length
|
||||
let resultingLength = string.length + deltaLength
|
||||
let changingRangeString = string.substring(with: range)
|
||||
|
||||
let deltaLength = text.count - changingRangeString.count
|
||||
let resultingLength = (string as String).count + deltaLength
|
||||
if resultingLength > characterLimit {
|
||||
let availableLength = characterLimit - string.length
|
||||
let availableLength = characterLimit - (string as String).count
|
||||
if availableLength > 0 {
|
||||
var insertString = replacementString.substring(to: availableLength)
|
||||
var insertString = ""
|
||||
for i in 0 ..< availableLength {
|
||||
if text.count <= i {
|
||||
break
|
||||
}
|
||||
insertString.append(text[text.index(text.startIndex, offsetBy: i)])
|
||||
}
|
||||
|
||||
switch component.emptyLineHandling {
|
||||
case .allowed:
|
||||
|
@ -2395,6 +2395,11 @@ extension ChatControllerImpl {
|
||||
|
||||
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: interactive, { current in
|
||||
return current.updatedSearch(current.search == nil ? ChatSearchData(domain: domain).withUpdatedQuery(query) : current.search?.withUpdatedDomain(domain).withUpdatedQuery(query))
|
||||
}, completion: { [weak strongSelf] _ in
|
||||
guard let strongSelf else {
|
||||
return
|
||||
}
|
||||
strongSelf.chatDisplayNode.searchNavigationNode?.activate()
|
||||
})
|
||||
strongSelf.updateItemNodesSearchTextHighlightStates()
|
||||
})
|
||||
|
@ -1228,7 +1228,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
controller.imageCompletion = { [weak self] image, commit in
|
||||
if let strongSelf = self {
|
||||
if let rootController = strongSelf.effectiveNavigationController as? TelegramRootController, let settingsController = rootController.accountSettingsController as? PeerInfoScreenImpl {
|
||||
settingsController.updateProfilePhoto(image, mode: .accept)
|
||||
settingsController.updateProfilePhoto(image, mode: .accept, uploadStatus: nil)
|
||||
commit()
|
||||
}
|
||||
}
|
||||
@ -1265,7 +1265,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
}, imageCompletion: { [weak self] image in
|
||||
if let strongSelf = self {
|
||||
if let rootController = strongSelf.effectiveNavigationController as? TelegramRootController, let settingsController = rootController.accountSettingsController as? PeerInfoScreenImpl {
|
||||
settingsController.updateProfilePhoto(image, mode: .accept)
|
||||
settingsController.updateProfilePhoto(image, mode: .accept, uploadStatus: nil)
|
||||
}
|
||||
}
|
||||
}, videoCompletion: { [weak self] image, url, adjustments in
|
||||
@ -7812,7 +7812,9 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
|
||||
self.chatDisplayNode.historyNode.experimentalSnapScrollToItem = false
|
||||
self.chatDisplayNode.historyNode.canReadHistory.set(self.computedCanReadHistoryPromise.get())
|
||||
|
||||
self.chatDisplayNode.loadInputPanels(theme: self.presentationInterfaceState.theme, strings: self.presentationInterfaceState.strings, fontSize: self.presentationInterfaceState.fontSize)
|
||||
if !self.alwaysShowSearchResultsAsList {
|
||||
self.chatDisplayNode.loadInputPanels(theme: self.presentationInterfaceState.theme, strings: self.presentationInterfaceState.strings, fontSize: self.presentationInterfaceState.fontSize)
|
||||
}
|
||||
|
||||
if self.recentlyUsedInlineBotsDisposable == nil {
|
||||
self.recentlyUsedInlineBotsDisposable = (self.context.engine.peers.recentlyUsedInlineBots() |> deliverOnMainQueue).startStrict(next: { [weak self] peers in
|
||||
|
@ -54,6 +54,7 @@ final class VideoNavigationControllerDropContentItem: NavigationControllerDropCo
|
||||
}
|
||||
|
||||
private final class ChatControllerNodeView: UITracingLayerView, WindowInputAccessoryHeightProvider {
|
||||
weak var node: ChatControllerNode?
|
||||
var inputAccessoryHeight: (() -> CGFloat)?
|
||||
var hitTestImpl: ((CGPoint, UIEvent?) -> UIView?)?
|
||||
|
||||
@ -65,7 +66,17 @@ private final class ChatControllerNodeView: UITracingLayerView, WindowInputAcces
|
||||
if let result = self.hitTestImpl?(point, event) {
|
||||
return result
|
||||
}
|
||||
return super.hitTest(point, with: event)
|
||||
guard let result = super.hitTest(point, with: event) else {
|
||||
return nil
|
||||
}
|
||||
if let node = self.node {
|
||||
if result === node.historyNodeContainer.view {
|
||||
if node.historyNode.alpha == 0.0 {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@ -172,7 +183,7 @@ class ChatControllerNode: ASDisplayNode, ASScrollViewDelegate {
|
||||
private(set) var validLayout: (ContainerViewLayout, CGFloat)?
|
||||
private var visibleAreaInset = UIEdgeInsets()
|
||||
|
||||
private var searchNavigationNode: ChatSearchNavigationContentNode?
|
||||
private(set) var searchNavigationNode: ChatSearchNavigationContentNode?
|
||||
|
||||
private var navigationModalFrame: NavigationModalFrame?
|
||||
|
||||
@ -727,6 +738,8 @@ class ChatControllerNode: ASDisplayNode, ASScrollViewDelegate {
|
||||
return ChatControllerNodeView()
|
||||
})
|
||||
|
||||
(self.view as? ChatControllerNodeView)?.node = self
|
||||
|
||||
(self.view as? ChatControllerNodeView)?.inputAccessoryHeight = { [weak self] in
|
||||
if let strongSelf = self {
|
||||
return strongSelf.getWindowInputAccessoryHeight()
|
||||
|
@ -785,7 +785,87 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
|
||||
if peerId.namespace == Namespaces.Peer.CloudUser {
|
||||
adMessages = .single((nil, []))
|
||||
} else {
|
||||
adMessages = adMessagesContext.state
|
||||
if context.sharedContext.immediateExperimentalUISettings.fakeAds {
|
||||
adMessages = context.engine.data.get(
|
||||
TelegramEngine.EngineData.Item.Peer.Peer(id: peerId)
|
||||
)
|
||||
|> map { peer -> (interPostInterval: Int32?, messages: [Message]) in
|
||||
let fakeAdMessages: [Message] = (0 ..< 10).map { i -> Message in
|
||||
var attributes: [MessageAttribute] = []
|
||||
|
||||
let mappedMessageType: AdMessageAttribute.MessageType = .sponsored
|
||||
attributes.append(AdMessageAttribute(opaqueId: "fake_ad_\(i)".data(using: .utf8)!, messageType: mappedMessageType, url: "t.me/telegram", buttonText: "VIEW", sponsorInfo: nil, additionalInfo: nil, canReport: false, hasContentMedia: false))
|
||||
|
||||
var messagePeers = SimpleDictionary<PeerId, Peer>()
|
||||
|
||||
if let peer {
|
||||
messagePeers[peer.id] = peer._asPeer()
|
||||
}
|
||||
|
||||
let author: Peer = TelegramChannel(
|
||||
id: PeerId(namespace: Namespaces.Peer.CloudChannel, id: PeerId.Id._internalFromInt64Value(1)),
|
||||
accessHash: nil,
|
||||
title: "Fake Ad",
|
||||
username: nil,
|
||||
photo: [],
|
||||
creationDate: 0,
|
||||
version: 0,
|
||||
participationStatus: .left,
|
||||
info: .broadcast(TelegramChannelBroadcastInfo(flags: [])),
|
||||
flags: [],
|
||||
restrictionInfo: nil,
|
||||
adminRights: nil,
|
||||
bannedRights: nil,
|
||||
defaultBannedRights: nil,
|
||||
usernames: [],
|
||||
storiesHidden: nil,
|
||||
nameColor: .blue,
|
||||
backgroundEmojiId: nil,
|
||||
profileColor: nil,
|
||||
profileBackgroundEmojiId: nil,
|
||||
emojiStatus: nil,
|
||||
approximateBoostLevel: nil,
|
||||
subscriptionUntilDate: nil,
|
||||
verificationIconFileId: nil
|
||||
)
|
||||
messagePeers[author.id] = author
|
||||
|
||||
let messageText = "Fake Ad N\(i)"
|
||||
let messageHash = (messageText.hashValue &+ 31 &* peerId.hashValue) &* 31 &+ author.id.hashValue
|
||||
let messageStableVersion = UInt32(bitPattern: Int32(truncatingIfNeeded: messageHash))
|
||||
|
||||
return Message(
|
||||
stableId: 0,
|
||||
stableVersion: messageStableVersion,
|
||||
id: MessageId(peerId: peerId, namespace: Namespaces.Message.Local, id: 0),
|
||||
globallyUniqueId: nil,
|
||||
groupingKey: nil,
|
||||
groupInfo: nil,
|
||||
threadId: nil,
|
||||
timestamp: Int32.max - 1,
|
||||
flags: [.Incoming],
|
||||
tags: [],
|
||||
globalTags: [],
|
||||
localTags: [],
|
||||
customTags: [],
|
||||
forwardInfo: nil,
|
||||
author: author,
|
||||
text: messageText,
|
||||
attributes: attributes,
|
||||
media: [],
|
||||
peers: messagePeers,
|
||||
associatedMessages: SimpleDictionary<MessageId, Message>(),
|
||||
associatedMessageIds: [],
|
||||
associatedMedia: [:],
|
||||
associatedThreadInfo: nil,
|
||||
associatedStories: [:]
|
||||
)
|
||||
}
|
||||
return (10, fakeAdMessages)
|
||||
}
|
||||
} else {
|
||||
adMessages = adMessagesContext.state
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.adMessagesContext = nil
|
||||
@ -2444,6 +2524,10 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
|
||||
var insertionTimestamp: Int32?
|
||||
if self.currentPrefetchDirectionIsToLater {
|
||||
outer: for i in selectedRange.0 ... selectedRange.1 {
|
||||
if historyView.originalView.laterId == nil && i >= historyView.filteredEntries.count - 4 {
|
||||
break
|
||||
}
|
||||
|
||||
switch historyView.filteredEntries[i] {
|
||||
case let .MessageEntry(message, _, _, _, _, _):
|
||||
if message.id.namespace == Namespaces.Message.Cloud {
|
||||
|
@ -497,7 +497,7 @@ final class ChatSearchTitleAccessoryPanelNode: ChatTitleAccessoryPanelNode, Chat
|
||||
|
||||
var isFirstUpdate = true
|
||||
self.itemsDisposable = (combineLatest(
|
||||
context.engine.stickers.availableReactions(),
|
||||
context.availableReactions,
|
||||
context.engine.stickers.savedMessageTagData(),
|
||||
tagsAndFiles
|
||||
)
|
||||
|
@ -750,8 +750,12 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
|
||||
self.accountSettingsController?.openBirthdaySetup()
|
||||
}
|
||||
|
||||
public func openPhotoSetup() {
|
||||
self.accountSettingsController?.openAvatarSetup()
|
||||
public func openPhotoSetup(completedWithUploadingImage: @escaping (UIImage, Signal<PeerInfoAvatarUploadStatus, NoError>) -> UIView?) {
|
||||
self.accountSettingsController?.openAvatarSetup(completedWithUploadingImage: completedWithUploadingImage)
|
||||
}
|
||||
|
||||
public func openAvatars() {
|
||||
self.accountSettingsController?.openAvatars()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -64,6 +64,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
public var autoBenchmarkReflectors: Bool?
|
||||
public var conferenceCalls: Bool
|
||||
public var playerV2: Bool
|
||||
public var devRequests: Bool
|
||||
public var fakeAds: Bool
|
||||
|
||||
public static var defaultSettings: ExperimentalUISettings {
|
||||
return ExperimentalUISettings(
|
||||
@ -105,7 +107,9 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
enableLocalTranslation: false,
|
||||
autoBenchmarkReflectors: nil,
|
||||
conferenceCalls: false,
|
||||
playerV2: false
|
||||
playerV2: false,
|
||||
devRequests: false,
|
||||
fakeAds: false
|
||||
)
|
||||
}
|
||||
|
||||
@ -148,7 +152,9 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
enableLocalTranslation: Bool,
|
||||
autoBenchmarkReflectors: Bool?,
|
||||
conferenceCalls: Bool,
|
||||
playerV2: Bool
|
||||
playerV2: Bool,
|
||||
devRequests: Bool,
|
||||
fakeAds: Bool
|
||||
) {
|
||||
self.keepChatNavigationStack = keepChatNavigationStack
|
||||
self.skipReadHistory = skipReadHistory
|
||||
@ -189,6 +195,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
self.autoBenchmarkReflectors = autoBenchmarkReflectors
|
||||
self.conferenceCalls = conferenceCalls
|
||||
self.playerV2 = playerV2
|
||||
self.devRequests = devRequests
|
||||
self.fakeAds = fakeAds
|
||||
}
|
||||
|
||||
public init(from decoder: Decoder) throws {
|
||||
@ -233,6 +241,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
self.autoBenchmarkReflectors = try container.decodeIfPresent(Bool.self, forKey: "autoBenchmarkReflectors")
|
||||
self.conferenceCalls = try container.decodeIfPresent(Bool.self, forKey: "conferenceCalls") ?? false
|
||||
self.playerV2 = try container.decodeIfPresent(Bool.self, forKey: "playerV2") ?? false
|
||||
self.devRequests = try container.decodeIfPresent(Bool.self, forKey: "devRequests") ?? false
|
||||
self.fakeAds = try container.decodeIfPresent(Bool.self, forKey: "fakeAds") ?? false
|
||||
}
|
||||
|
||||
public func encode(to encoder: Encoder) throws {
|
||||
@ -277,6 +287,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
try container.encodeIfPresent(self.autoBenchmarkReflectors, forKey: "autoBenchmarkReflectors")
|
||||
try container.encodeIfPresent(self.conferenceCalls, forKey: "conferenceCalls")
|
||||
try container.encodeIfPresent(self.playerV2, forKey: "playerV2")
|
||||
try container.encodeIfPresent(self.devRequests, forKey: "devRequests")
|
||||
try container.encodeIfPresent(self.fakeAds, forKey: "fakeAds")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -230,7 +230,7 @@ public final class HLSVideoContent: UniversalVideoContent {
|
||||
|
||||
public let id: AnyHashable
|
||||
public let nativeId: NativeVideoContentId
|
||||
let userLocation: MediaResourceUserLocation
|
||||
public let userLocation: MediaResourceUserLocation
|
||||
public let fileReference: FileMediaReference
|
||||
public let dimensions: CGSize
|
||||
public let duration: Double
|
||||
|
@ -461,7 +461,6 @@ public final class OngoingGroupCallContext {
|
||||
#if os(iOS)
|
||||
let audioDevice: OngoingCallContext.AudioDevice?
|
||||
#endif
|
||||
let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max))
|
||||
|
||||
let joinPayload = Promise<(String, UInt32)>()
|
||||
let networkState = ValuePromise<NetworkState>(NetworkState(isConnected: false, isTransitioningFromBroadcastToRtc: false), ignoreRepeated: true)
|
||||
@ -507,14 +506,9 @@ public final class OngoingGroupCallContext {
|
||||
self.tempStatsLogFile = EngineTempBox.shared.tempFile(fileName: "CallStats.json")
|
||||
let tempStatsLogPath = self.tempStatsLogFile.path
|
||||
|
||||
#if os(iOS)
|
||||
if sharedAudioDevice == nil {
|
||||
self.audioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: false)
|
||||
} else {
|
||||
self.audioDevice = sharedAudioDevice
|
||||
}
|
||||
self.audioDevice = sharedAudioDevice
|
||||
let audioDevice = self.audioDevice
|
||||
#endif
|
||||
|
||||
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
|
||||
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
|
||||
var activityUpdatedImpl: (([UInt32]) -> Void)?
|
||||
@ -882,7 +876,7 @@ public final class OngoingGroupCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
func stop(account: Account, reportCallId: CallId?) {
|
||||
func stop(account: Account?, reportCallId: CallId?, debugLog: Promise<String?>) {
|
||||
self.context.stop()
|
||||
|
||||
let logPath = self.logPath
|
||||
@ -892,16 +886,18 @@ public final class OngoingGroupCallContext {
|
||||
}
|
||||
let tempStatsLogPath = self.tempStatsLogFile.path
|
||||
|
||||
debugLog.set(.single(nil))
|
||||
|
||||
let queue = self.queue
|
||||
self.context.stop({
|
||||
queue.async {
|
||||
if !statsLogPath.isEmpty {
|
||||
if !statsLogPath.isEmpty, let account {
|
||||
let logsPath = callLogsPath(account: account)
|
||||
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
|
||||
let _ = try? FileManager.default.moveItem(atPath: tempStatsLogPath, toPath: statsLogPath)
|
||||
}
|
||||
|
||||
if let callId = reportCallId, !statsLogPath.isEmpty, let data = try? Data(contentsOf: URL(fileURLWithPath: statsLogPath)), let dataString = String(data: data, encoding: .utf8) {
|
||||
if let callId = reportCallId, !statsLogPath.isEmpty, let data = try? Data(contentsOf: URL(fileURLWithPath: statsLogPath)), let dataString = String(data: data, encoding: .utf8), let account {
|
||||
let engine = TelegramEngine(account: account)
|
||||
let _ = engine.calls.saveCallDebugLog(callId: callId, log: dataString).start(next: { result in
|
||||
switch result {
|
||||
@ -1219,9 +1215,9 @@ public final class OngoingGroupCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public func stop(account: Account, reportCallId: CallId?) {
|
||||
public func stop(account: Account?, reportCallId: CallId?, debugLog: Promise<String?>) {
|
||||
self.impl.with { impl in
|
||||
impl.stop(account: account, reportCallId: reportCallId)
|
||||
impl.stop(account: account, reportCallId: reportCallId, debugLog: debugLog)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,11 +3,6 @@ import SwiftSignalKit
|
||||
import CoreMedia
|
||||
import ImageIO
|
||||
|
||||
private struct PayloadDescription: Codable {
|
||||
var id: UInt32
|
||||
var timestamp: Int32
|
||||
}
|
||||
|
||||
private struct JoinPayload: Codable {
|
||||
var id: UInt32
|
||||
var string: String
|
||||
@ -18,11 +13,6 @@ private struct JoinResponsePayload: Codable {
|
||||
var string: String
|
||||
}
|
||||
|
||||
private struct KeepaliveInfo: Codable {
|
||||
var id: UInt32
|
||||
var timestamp: Int32
|
||||
}
|
||||
|
||||
private struct CutoffPayload: Codable {
|
||||
var id: UInt32
|
||||
var timestamp: Int32
|
||||
@ -370,6 +360,16 @@ private final class MappedFile {
|
||||
}
|
||||
|
||||
public final class IpcGroupCallBufferAppContext {
|
||||
struct KeepaliveInfo: Codable {
|
||||
var id: UInt32
|
||||
var timestamp: Int32
|
||||
}
|
||||
|
||||
struct PayloadDescription: Codable {
|
||||
var id: UInt32
|
||||
var timestamp: Int32
|
||||
}
|
||||
|
||||
private let basePath: String
|
||||
private var audioServer: NamedPipeReader?
|
||||
|
||||
@ -460,7 +460,7 @@ public final class IpcGroupCallBufferAppContext {
|
||||
|
||||
private func updateCallIsActive() {
|
||||
let timestamp = Int32(Date().timeIntervalSince1970)
|
||||
let payloadDescription = PayloadDescription(
|
||||
let payloadDescription = IpcGroupCallBufferAppContext.PayloadDescription(
|
||||
id: self.id,
|
||||
timestamp: timestamp
|
||||
)
|
||||
@ -477,7 +477,7 @@ public final class IpcGroupCallBufferAppContext {
|
||||
guard let keepaliveInfoData = try? Data(contentsOf: URL(fileURLWithPath: filePath)) else {
|
||||
return
|
||||
}
|
||||
guard let keepaliveInfo = try? JSONDecoder().decode(KeepaliveInfo.self, from: keepaliveInfoData) else {
|
||||
guard let keepaliveInfo = try? JSONDecoder().decode(IpcGroupCallBufferAppContext.KeepaliveInfo.self, from: keepaliveInfoData) else {
|
||||
return
|
||||
}
|
||||
if keepaliveInfo.id != self.id {
|
||||
@ -587,7 +587,7 @@ public final class IpcGroupCallBufferBroadcastContext {
|
||||
return
|
||||
}
|
||||
|
||||
guard let payloadDescription = try? JSONDecoder().decode(PayloadDescription.self, from: payloadDescriptionData) else {
|
||||
guard let payloadDescription = try? JSONDecoder().decode(IpcGroupCallBufferAppContext.PayloadDescription.self, from: payloadDescriptionData) else {
|
||||
self.statusPromise.set(.single(.finished(.error)))
|
||||
return
|
||||
}
|
||||
@ -646,7 +646,7 @@ public final class IpcGroupCallBufferBroadcastContext {
|
||||
guard let currentId = self.currentId else {
|
||||
preconditionFailure()
|
||||
}
|
||||
let keepaliveInfo = KeepaliveInfo(
|
||||
let keepaliveInfo = IpcGroupCallBufferAppContext.KeepaliveInfo(
|
||||
id: currentId,
|
||||
timestamp: Int32(Date().timeIntervalSince1970)
|
||||
)
|
||||
@ -795,3 +795,319 @@ public func deserializePixelBuffer(data: Data) -> CVPixelBuffer? {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class IpcGroupCallEmbeddedAppContext {
|
||||
public struct JoinPayload: Codable, Equatable {
|
||||
public var id: UInt32
|
||||
public var data: String
|
||||
public var ssrc: UInt32
|
||||
|
||||
public init(id: UInt32, data: String, ssrc: UInt32) {
|
||||
self.id = id
|
||||
self.data = data
|
||||
self.ssrc = ssrc
|
||||
}
|
||||
}
|
||||
|
||||
public struct JoinResponse: Codable, Equatable {
|
||||
public var data: String
|
||||
|
||||
public init(data: String) {
|
||||
self.data = data
|
||||
}
|
||||
}
|
||||
|
||||
struct KeepaliveInfo: Codable {
|
||||
var id: UInt32
|
||||
var timestamp: Int32
|
||||
var joinPayload: JoinPayload?
|
||||
|
||||
init(id: UInt32, timestamp: Int32, joinPayload: JoinPayload?) {
|
||||
self.id = id
|
||||
self.timestamp = timestamp
|
||||
self.joinPayload = joinPayload
|
||||
}
|
||||
}
|
||||
|
||||
struct PayloadDescription: Codable {
|
||||
var id: UInt32
|
||||
var timestamp: Int32
|
||||
var activeRequestId: UInt32?
|
||||
var joinResponse: JoinResponse?
|
||||
|
||||
init(id: UInt32, timestamp: Int32, activeRequestId: UInt32?, joinResponse: JoinResponse?) {
|
||||
self.id = id
|
||||
self.timestamp = timestamp
|
||||
self.activeRequestId = activeRequestId
|
||||
self.joinResponse = joinResponse
|
||||
}
|
||||
}
|
||||
|
||||
private let basePath: String
|
||||
|
||||
private let id: UInt32
|
||||
|
||||
private let isActivePromise = ValuePromise<Bool>(false, ignoreRepeated: true)
|
||||
public var isActive: Signal<Bool, NoError> {
|
||||
return self.isActivePromise.get()
|
||||
}
|
||||
private var isActiveCheckTimer: SwiftSignalKit.Timer?
|
||||
|
||||
private var joinPayloadValue: JoinPayload? {
|
||||
didSet {
|
||||
if let joinPayload = self.joinPayloadValue, joinPayload != oldValue {
|
||||
self.joinPayloadPromise.set(.single(joinPayload))
|
||||
}
|
||||
}
|
||||
}
|
||||
private let joinPayloadPromise = Promise<JoinPayload>()
|
||||
public var joinPayload: Signal<JoinPayload, NoError> {
|
||||
return self.joinPayloadPromise.get()
|
||||
}
|
||||
|
||||
private var nextActiveRequestId: UInt32 = 0
|
||||
private var activeRequestId: UInt32? {
|
||||
didSet {
|
||||
if self.activeRequestId != oldValue {
|
||||
self.updateCallIsActive()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public var joinResponse: JoinResponse? {
|
||||
didSet {
|
||||
if self.joinResponse != oldValue {
|
||||
self.updateCallIsActive()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private var callActiveInfoTimer: SwiftSignalKit.Timer?
|
||||
|
||||
public init(basePath: String) {
|
||||
self.basePath = basePath
|
||||
let _ = try? FileManager.default.createDirectory(atPath: basePath, withIntermediateDirectories: true, attributes: nil)
|
||||
|
||||
self.id = UInt32.random(in: 0 ..< UInt32.max)
|
||||
|
||||
self.updateCallIsActive()
|
||||
|
||||
let callActiveInfoTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in
|
||||
self?.updateCallIsActive()
|
||||
}, queue: .mainQueue())
|
||||
self.callActiveInfoTimer = callActiveInfoTimer
|
||||
callActiveInfoTimer.start()
|
||||
|
||||
let isActiveCheckTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in
|
||||
self?.updateKeepaliveInfo()
|
||||
}, queue: .mainQueue())
|
||||
self.isActiveCheckTimer = isActiveCheckTimer
|
||||
isActiveCheckTimer.start()
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.callActiveInfoTimer?.invalidate()
|
||||
self.isActiveCheckTimer?.invalidate()
|
||||
}
|
||||
|
||||
private func updateCallIsActive() {
|
||||
let timestamp = Int32(Date().timeIntervalSince1970)
|
||||
let payloadDescription = IpcGroupCallEmbeddedAppContext.PayloadDescription(
|
||||
id: self.id,
|
||||
timestamp: timestamp,
|
||||
activeRequestId: self.activeRequestId,
|
||||
joinResponse: self.joinResponse
|
||||
)
|
||||
guard let payloadDescriptionData = try? JSONEncoder().encode(payloadDescription) else {
|
||||
return
|
||||
}
|
||||
guard let _ = try? payloadDescriptionData.write(to: URL(fileURLWithPath: payloadDescriptionPath(basePath: self.basePath)), options: .atomic) else {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
private func updateKeepaliveInfo() {
|
||||
let filePath = keepaliveInfoPath(basePath: self.basePath)
|
||||
guard let keepaliveInfoData = try? Data(contentsOf: URL(fileURLWithPath: filePath)) else {
|
||||
return
|
||||
}
|
||||
guard let keepaliveInfo = try? JSONDecoder().decode(KeepaliveInfo.self, from: keepaliveInfoData) else {
|
||||
return
|
||||
}
|
||||
if keepaliveInfo.id != self.id {
|
||||
self.isActivePromise.set(false)
|
||||
return
|
||||
}
|
||||
let timestamp = Int32(Date().timeIntervalSince1970)
|
||||
if keepaliveInfo.timestamp < timestamp - Int32(keepaliveTimeout) {
|
||||
self.isActivePromise.set(false)
|
||||
return
|
||||
}
|
||||
|
||||
self.isActivePromise.set(true)
|
||||
|
||||
self.joinPayloadValue = keepaliveInfo.joinPayload
|
||||
}
|
||||
|
||||
public func startScreencast() -> UInt32? {
|
||||
if self.activeRequestId == nil {
|
||||
let id = self.nextActiveRequestId
|
||||
self.nextActiveRequestId += 1
|
||||
self.activeRequestId = id
|
||||
return id
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public func stopScreencast() {
|
||||
self.activeRequestId = nil
|
||||
|
||||
let timestamp = Int32(Date().timeIntervalSince1970)
|
||||
let cutoffPayload = CutoffPayload(
|
||||
id: self.id,
|
||||
timestamp: timestamp
|
||||
)
|
||||
guard let cutoffPayloadData = try? JSONEncoder().encode(cutoffPayload) else {
|
||||
return
|
||||
}
|
||||
guard let _ = try? cutoffPayloadData.write(to: URL(fileURLWithPath: cutoffPayloadPath(basePath: self.basePath)), options: .atomic) else {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class IpcGroupCallEmbeddedBroadcastContext {
|
||||
public enum Status {
|
||||
public enum FinishReason {
|
||||
case screencastEnded
|
||||
case callEnded
|
||||
case error
|
||||
}
|
||||
case active(id: UInt32?, joinResponse: IpcGroupCallEmbeddedAppContext.JoinResponse?)
|
||||
case finished(FinishReason)
|
||||
}
|
||||
|
||||
private let basePath: String
|
||||
private var timer: SwiftSignalKit.Timer?
|
||||
|
||||
private let statusPromise = Promise<Status>()
|
||||
public var status: Signal<Status, NoError> {
|
||||
return self.statusPromise.get()
|
||||
}
|
||||
|
||||
private var currentId: UInt32?
|
||||
|
||||
private var callActiveInfoTimer: SwiftSignalKit.Timer?
|
||||
private var keepaliveInfoTimer: SwiftSignalKit.Timer?
|
||||
private var screencastCutoffTimer: SwiftSignalKit.Timer?
|
||||
|
||||
public var joinPayload: IpcGroupCallEmbeddedAppContext.JoinPayload? {
|
||||
didSet {
|
||||
if self.joinPayload != oldValue {
|
||||
self.writeKeepaliveInfo()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public init(basePath: String) {
|
||||
self.basePath = basePath
|
||||
let _ = try? FileManager.default.createDirectory(atPath: basePath, withIntermediateDirectories: true, attributes: nil)
|
||||
|
||||
let callActiveInfoTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in
|
||||
self?.updateCallIsActive()
|
||||
}, queue: .mainQueue())
|
||||
self.callActiveInfoTimer = callActiveInfoTimer
|
||||
callActiveInfoTimer.start()
|
||||
|
||||
let screencastCutoffTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in
|
||||
self?.updateScreencastCutoff()
|
||||
}, queue: .mainQueue())
|
||||
self.screencastCutoffTimer = screencastCutoffTimer
|
||||
screencastCutoffTimer.start()
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.endActiveIndication()
|
||||
|
||||
self.callActiveInfoTimer?.invalidate()
|
||||
self.keepaliveInfoTimer?.invalidate()
|
||||
self.screencastCutoffTimer?.invalidate()
|
||||
}
|
||||
|
||||
private func updateScreencastCutoff() {
|
||||
let filePath = cutoffPayloadPath(basePath: self.basePath)
|
||||
guard let cutoffPayloadData = try? Data(contentsOf: URL(fileURLWithPath: filePath)) else {
|
||||
return
|
||||
}
|
||||
|
||||
guard let cutoffPayload = try? JSONDecoder().decode(CutoffPayload.self, from: cutoffPayloadData) else {
|
||||
return
|
||||
}
|
||||
|
||||
let timestamp = Int32(Date().timeIntervalSince1970)
|
||||
if let currentId = self.currentId, currentId == cutoffPayload.id && cutoffPayload.timestamp > timestamp - 10 {
|
||||
self.statusPromise.set(.single(.finished(.screencastEnded)))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
private func updateCallIsActive() {
|
||||
let filePath = payloadDescriptionPath(basePath: self.basePath)
|
||||
guard let payloadDescriptionData = try? Data(contentsOf: URL(fileURLWithPath: filePath)) else {
|
||||
self.statusPromise.set(.single(.finished(.error)))
|
||||
return
|
||||
}
|
||||
|
||||
guard let payloadDescription = try? JSONDecoder().decode(IpcGroupCallEmbeddedAppContext.PayloadDescription.self, from: payloadDescriptionData) else {
|
||||
self.statusPromise.set(.single(.finished(.error)))
|
||||
return
|
||||
}
|
||||
let timestamp = Int32(Date().timeIntervalSince1970)
|
||||
if payloadDescription.timestamp < timestamp - 4 {
|
||||
self.statusPromise.set(.single(.finished(.callEnded)))
|
||||
return
|
||||
}
|
||||
|
||||
if let currentId = self.currentId {
|
||||
if currentId != payloadDescription.id {
|
||||
self.statusPromise.set(.single(.finished(.callEnded)))
|
||||
} else {
|
||||
self.statusPromise.set(.single(.active(id: payloadDescription.activeRequestId, joinResponse: payloadDescription.joinResponse)))
|
||||
}
|
||||
} else {
|
||||
self.currentId = payloadDescription.id
|
||||
|
||||
self.writeKeepaliveInfo()
|
||||
|
||||
let keepaliveInfoTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in
|
||||
self?.writeKeepaliveInfo()
|
||||
}, queue: .mainQueue())
|
||||
self.keepaliveInfoTimer = keepaliveInfoTimer
|
||||
keepaliveInfoTimer.start()
|
||||
|
||||
self.statusPromise.set(.single(.active(id: payloadDescription.activeRequestId, joinResponse: payloadDescription.joinResponse)))
|
||||
}
|
||||
}
|
||||
|
||||
private func writeKeepaliveInfo() {
|
||||
guard let currentId = self.currentId else {
|
||||
preconditionFailure()
|
||||
}
|
||||
let keepaliveInfo = IpcGroupCallEmbeddedAppContext.KeepaliveInfo(
|
||||
id: currentId,
|
||||
timestamp: Int32(Date().timeIntervalSince1970),
|
||||
joinPayload: self.joinPayload
|
||||
)
|
||||
guard let keepaliveInfoData = try? JSONEncoder().encode(keepaliveInfo) else {
|
||||
preconditionFailure()
|
||||
}
|
||||
guard let _ = try? keepaliveInfoData.write(to: URL(fileURLWithPath: keepaliveInfoPath(basePath: self.basePath)), options: .atomic) else {
|
||||
preconditionFailure()
|
||||
}
|
||||
}
|
||||
|
||||
private func endActiveIndication() {
|
||||
let _ = try? FileManager.default.removeItem(atPath: keepaliveInfoPath(basePath: self.basePath))
|
||||
}
|
||||
}
|
||||
|
@ -7,13 +7,13 @@ import TelegramUIPreferences
|
||||
import TgVoip
|
||||
import TgVoipWebrtc
|
||||
|
||||
private let debugUseLegacyVersionForReflectors: Bool = {
|
||||
private func debugUseLegacyVersionForReflectors() -> Bool {
|
||||
#if DEBUG && false
|
||||
return true
|
||||
#else
|
||||
return false
|
||||
#endif
|
||||
}()
|
||||
}
|
||||
|
||||
private struct PeerTag: Hashable, CustomStringConvertible {
|
||||
var bytes: [UInt8] = Array<UInt8>(repeating: 0, count: 16)
|
||||
@ -510,21 +510,21 @@ public final class OngoingCallVideoCapturer {
|
||||
self.impl.setIsVideoEnabled(value)
|
||||
}
|
||||
|
||||
public func injectPixelBuffer(_ pixelBuffer: CVPixelBuffer, rotation: CGImagePropertyOrientation) {
|
||||
public func injectSampleBuffer(_ sampleBuffer: CMSampleBuffer, rotation: CGImagePropertyOrientation, completion: @escaping () -> Void) {
|
||||
var videoRotation: OngoingCallVideoOrientation = .rotation0
|
||||
switch rotation {
|
||||
case .up:
|
||||
videoRotation = .rotation0
|
||||
case .left:
|
||||
videoRotation = .rotation90
|
||||
case .right:
|
||||
videoRotation = .rotation270
|
||||
case .down:
|
||||
videoRotation = .rotation180
|
||||
default:
|
||||
videoRotation = .rotation0
|
||||
case .up:
|
||||
videoRotation = .rotation0
|
||||
case .left:
|
||||
videoRotation = .rotation90
|
||||
case .right:
|
||||
videoRotation = .rotation270
|
||||
case .down:
|
||||
videoRotation = .rotation180
|
||||
default:
|
||||
videoRotation = .rotation0
|
||||
}
|
||||
self.impl.submitPixelBuffer(pixelBuffer, rotation: videoRotation.orientation)
|
||||
self.impl.submitSampleBuffer(sampleBuffer, rotation: videoRotation.orientation, completion: completion)
|
||||
}
|
||||
|
||||
public func video() -> Signal<OngoingGroupCallContext.VideoFrameData, NoError> {
|
||||
@ -819,7 +819,7 @@ public final class OngoingCallContext {
|
||||
}
|
||||
#endif
|
||||
|
||||
if debugUseLegacyVersionForReflectors {
|
||||
if debugUseLegacyVersionForReflectors() {
|
||||
return [(OngoingCallThreadLocalContext.version(), true)]
|
||||
} else {
|
||||
var result: [(version: String, supportsVideo: Bool)] = [(OngoingCallThreadLocalContext.version(), false)]
|
||||
@ -860,9 +860,9 @@ public final class OngoingCallContext {
|
||||
var useModernImplementation = true
|
||||
var version = version
|
||||
var allowP2P = allowP2P
|
||||
if debugUseLegacyVersionForReflectors {
|
||||
if debugUseLegacyVersionForReflectors() {
|
||||
useModernImplementation = true
|
||||
version = "5.0.0"
|
||||
version = "12.0.0"
|
||||
allowP2P = false
|
||||
} else {
|
||||
useModernImplementation = version != OngoingCallThreadLocalContext.version()
|
||||
@ -879,7 +879,23 @@ public final class OngoingCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
let unfilteredConnections = [connections.primary] + connections.alternatives
|
||||
var unfilteredConnections: [CallSessionConnection]
|
||||
unfilteredConnections = [connections.primary] + connections.alternatives
|
||||
|
||||
if version == "12.0.0" {
|
||||
for connection in unfilteredConnections {
|
||||
if case let .reflector(reflector) = connection {
|
||||
unfilteredConnections.append(.reflector(CallSessionConnection.Reflector(
|
||||
id: 123456,
|
||||
ip: "91.108.9.38",
|
||||
ipv6: "",
|
||||
isTcp: true,
|
||||
port: 595,
|
||||
peerTag: reflector.peerTag
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var reflectorIdList: [Int64] = []
|
||||
for connection in unfilteredConnections {
|
||||
@ -911,11 +927,17 @@ public final class OngoingCallContext {
|
||||
switch connection {
|
||||
case let .reflector(reflector):
|
||||
if reflector.isTcp {
|
||||
if signalingReflector == nil {
|
||||
signalingReflector = OngoingCallConnectionDescriptionWebrtc(reflectorId: 0, hasStun: false, hasTurn: true, hasTcp: true, ip: reflector.ip, port: reflector.port, username: "reflector", password: hexString(reflector.peerTag))
|
||||
if version == "12.0.0" {
|
||||
/*if signalingReflector == nil {
|
||||
signalingReflector = OngoingCallConnectionDescriptionWebrtc(reflectorId: 0, hasStun: false, hasTurn: true, hasTcp: true, ip: reflector.ip, port: reflector.port, username: "reflector", password: hexString(reflector.peerTag))
|
||||
}*/
|
||||
} else {
|
||||
if signalingReflector == nil {
|
||||
signalingReflector = OngoingCallConnectionDescriptionWebrtc(reflectorId: 0, hasStun: false, hasTurn: true, hasTcp: true, ip: reflector.ip, port: reflector.port, username: "reflector", password: hexString(reflector.peerTag))
|
||||
}
|
||||
|
||||
continue connectionsLoop
|
||||
}
|
||||
|
||||
continue connectionsLoop
|
||||
}
|
||||
case .webRtcReflector:
|
||||
break
|
||||
@ -962,22 +984,37 @@ public final class OngoingCallContext {
|
||||
directConnection = nil
|
||||
}
|
||||
|
||||
#if DEBUG && false
|
||||
#if DEBUG && true
|
||||
var customParameters = customParameters
|
||||
if let initialCustomParameters = try? JSONSerialization.jsonObject(with: (customParameters ?? "{}").data(using: .utf8)!) as? [String: Any] {
|
||||
var customParametersValue: [String: Any]
|
||||
customParametersValue = initialCustomParameters
|
||||
customParametersValue["network_standalone_reflectors"] = true as NSNumber
|
||||
customParametersValue["network_use_mtproto"] = true as NSNumber
|
||||
customParametersValue["network_skip_initial_ping"] = true as NSNumber
|
||||
customParameters = String(data: try! JSONSerialization.data(withJSONObject: customParametersValue), encoding: .utf8)!
|
||||
if version == "12.0.0" {
|
||||
customParametersValue["network_use_tcponly"] = true as NSNumber
|
||||
customParameters = String(data: try! JSONSerialization.data(withJSONObject: customParametersValue), encoding: .utf8)!
|
||||
}
|
||||
|
||||
if let reflector = filteredConnections.first(where: { $0.username == "reflector" && $0.reflectorId == 1 }) {
|
||||
filteredConnections = [reflector]
|
||||
if let value = customParametersValue["network_use_tcponly"] as? Bool, value {
|
||||
filteredConnections = filteredConnections.filter { connection in
|
||||
if connection.hasTcp {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
allowP2P = false
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
/*#if DEBUG
|
||||
if let initialCustomParameters = try? JSONSerialization.jsonObject(with: (customParameters ?? "{}").data(using: .utf8)!) as? [String: Any] {
|
||||
var customParametersValue: [String: Any]
|
||||
customParametersValue = initialCustomParameters
|
||||
customParametersValue["network_kcp_experiment"] = true as NSNumber
|
||||
customParameters = String(data: try! JSONSerialization.data(withJSONObject: customParametersValue), encoding: .utf8)!
|
||||
}
|
||||
#endif*/
|
||||
|
||||
let context = OngoingCallThreadLocalContextWebrtc(
|
||||
version: version,
|
||||
customParameters: customParameters,
|
||||
|
@ -120,12 +120,15 @@ sources = glob([
|
||||
"tgcalls/tgcalls/v2/InstanceV2Impl.cpp",
|
||||
"tgcalls/tgcalls/v2/InstanceV2ReferenceImpl.cpp",
|
||||
"tgcalls/tgcalls/v2/NativeNetworkingImpl.cpp",
|
||||
"tgcalls/tgcalls/v2/RawTcpSocket.cpp",
|
||||
"tgcalls/tgcalls/v2/ReflectorPort.cpp",
|
||||
"tgcalls/tgcalls/v2/ReflectorRelayPortFactory.cpp",
|
||||
"tgcalls/tgcalls/v2/Signaling.cpp",
|
||||
"tgcalls/tgcalls/v2/SignalingConnection.cpp",
|
||||
"tgcalls/tgcalls/v2/SignalingEncryption.cpp",
|
||||
"tgcalls/tgcalls/v2/SignalingSctpConnection.cpp",
|
||||
"tgcalls/tgcalls/v2/SignalingKcpConnection.cpp",
|
||||
"tgcalls/tgcalls/v2/ikcp.cpp",
|
||||
]
|
||||
|
||||
objc_library(
|
||||
|
@ -209,10 +209,10 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
|
||||
- (void)setOnFatalError:(dispatch_block_t _Nullable)onError;
|
||||
- (void)setOnPause:(void (^ _Nullable)(bool))onPause;
|
||||
- (void)setOnIsActiveUpdated:(void (^_Nonnull)(bool))onIsActiveUpdated;
|
||||
- (void)setOnIsActiveUpdated:(void (^ _Nonnull)(bool))onIsActiveUpdated;
|
||||
|
||||
#if TARGET_OS_IOS
|
||||
- (void)submitPixelBuffer:(CVPixelBufferRef _Nonnull)pixelBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation;
|
||||
- (void)submitSampleBuffer:(CMSampleBufferRef _Nonnull)sampleBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation completion:(void (^_Nonnull)())completion;
|
||||
#endif
|
||||
|
||||
- (GroupCallDisposable * _Nonnull)addVideoOutput:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))sink;
|
||||
|
@ -667,8 +667,11 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
||||
}
|
||||
|
||||
#if TARGET_OS_IOS
|
||||
- (void)submitPixelBuffer:(CVPixelBufferRef _Nonnull)pixelBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation {
|
||||
if (!pixelBuffer) {
|
||||
- (void)submitSampleBuffer:(CMSampleBufferRef _Nonnull)sampleBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation completion:(void (^_Nonnull)())completion {
|
||||
if (!sampleBuffer) {
|
||||
if (completion) {
|
||||
completion();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
@ -688,19 +691,30 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
||||
break;
|
||||
}
|
||||
|
||||
if (_isProcessingCustomSampleBuffer.value) {
|
||||
/*if (_isProcessingCustomSampleBuffer.value) {
|
||||
if (completion) {
|
||||
completion();
|
||||
}
|
||||
return;
|
||||
}
|
||||
}*/
|
||||
_isProcessingCustomSampleBuffer.value = true;
|
||||
|
||||
tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask([interface = _interface, pixelBuffer = CFRetain(pixelBuffer), croppingBuffer = _croppingBuffer, videoRotation = videoRotation, isProcessingCustomSampleBuffer = _isProcessingCustomSampleBuffer]() {
|
||||
void (^capturedCompletion)() = [completion copy];
|
||||
|
||||
tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask([interface = _interface, sampleBuffer = CFRetain(sampleBuffer), croppingBuffer = _croppingBuffer, videoRotation = videoRotation, isProcessingCustomSampleBuffer = _isProcessingCustomSampleBuffer, capturedCompletion]() {
|
||||
auto capture = GetVideoCaptureAssumingSameThread(interface.get());
|
||||
auto source = capture->source();
|
||||
if (source) {
|
||||
[CustomExternalCapturer passPixelBuffer:(CVPixelBufferRef)pixelBuffer rotation:videoRotation toSource:source croppingBuffer:*croppingBuffer];
|
||||
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer((CMSampleBufferRef)sampleBuffer);
|
||||
|
||||
[CustomExternalCapturer passPixelBuffer:pixelBuffer sampleBufferReference:(CMSampleBufferRef)sampleBuffer rotation:videoRotation toSource:source croppingBuffer:*croppingBuffer];
|
||||
}
|
||||
CFRelease(pixelBuffer);
|
||||
CFRelease(sampleBuffer);
|
||||
isProcessingCustomSampleBuffer.value = false;
|
||||
|
||||
if (capturedCompletion) {
|
||||
capturedCompletion();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 2d45febcfe741be127eaee236e71be09acbc7a38
|
||||
Subproject commit e3e408f75d03c9f3961c9a0dea0fdc7101708286
|
37
third-party/flatc/BUILD
vendored
Normal file
37
third-party/flatc/BUILD
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
|
||||
genrule(
|
||||
name = "flatc",
|
||||
srcs = [
|
||||
"@flatbuffers_zip//file",
|
||||
"@cmake_tar_gz//file",
|
||||
],
|
||||
cmd_bash =
|
||||
"""
|
||||
set -x
|
||||
core_count=`PATH="$$PATH:/usr/sbin" sysctl -n hw.logicalcpu`
|
||||
BUILD_DIR="$(RULEDIR)/build"
|
||||
rm -rf "$$BUILD_DIR"
|
||||
mkdir -p "$$BUILD_DIR"
|
||||
|
||||
CMAKE_DIR="$$(pwd)/$$BUILD_DIR/cmake"
|
||||
rm -rf "$$CMAKE_DIR"
|
||||
mkdir -p "$$CMAKE_DIR"
|
||||
tar -xf "$(location @cmake_tar_gz//file)" -C "$$CMAKE_DIR"
|
||||
|
||||
tar -xzf "$(location @flatbuffers_zip//file)" --directory "$$BUILD_DIR"
|
||||
pushd "$$BUILD_DIR/flatbuffers-24.12.23"
|
||||
mkdir build
|
||||
cd build
|
||||
PATH="$$PATH:$$CMAKE_DIR/cmake-3.23.1-macos-universal/CMake.app/Contents/bin" cmake .. -DCMAKE_BUILD_TYPE=Release"
|
||||
make -j $$core_count
|
||||
popd
|
||||
|
||||
tar -cf "$(location flatc.tar)" -C "$$BUILD_DIR/flatbuffers-24.12.23/build" .
|
||||
""",
|
||||
outs = [
|
||||
"flatc.tar",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
]
|
||||
)
|
Loading…
x
Reference in New Issue
Block a user