diff --git a/Telegram/BroadcastUpload/BroadcastUploadExtension.swift b/Telegram/BroadcastUpload/BroadcastUploadExtension.swift index b6b002f205..93159cbb30 100644 --- a/Telegram/BroadcastUpload/BroadcastUploadExtension.swift +++ b/Telegram/BroadcastUpload/BroadcastUploadExtension.swift @@ -6,26 +6,60 @@ import SwiftSignalKit import BuildConfig import BroadcastUploadHelpers import AudioToolbox +import Postbox +import CoreMedia +import AVFoundation private func rootPathForBasePath(_ appGroupPath: String) -> String { return appGroupPath + "/telegram-data" } -@available(iOS 10.0, *) -@objc(BroadcastUploadSampleHandler) class BroadcastUploadSampleHandler: RPBroadcastSampleHandler { +private protocol BroadcastUploadImpl: AnyObject { + func initialize(rootPath: String) + func processVideoSampleBuffer(sampleBuffer: CMSampleBuffer) + func processAudioSampleBuffer(data: Data) +} + +private final class InProcessBroadcastUploadImpl: BroadcastUploadImpl { + private weak var extensionContext: RPBroadcastSampleHandler? private var screencastBufferClientContext: IpcGroupCallBufferBroadcastContext? private var statusDisposable: Disposable? - private var audioConverter: CustomAudioConverter? - + + init(extensionContext: RPBroadcastSampleHandler) { + self.extensionContext = extensionContext + } + deinit { self.statusDisposable?.dispose() } + + func initialize(rootPath: String) { + let screencastBufferClientContext = IpcGroupCallBufferBroadcastContext(basePath: rootPath + "/broadcast-coordination") + self.screencastBufferClientContext = screencastBufferClientContext - public override func beginRequest(with context: NSExtensionContext) { - super.beginRequest(with: context) + var wasRunning = false + self.statusDisposable = (screencastBufferClientContext.status + |> deliverOnMainQueue).start(next: { [weak self] status in + guard let self else { + return + } + switch status { + case .active: + wasRunning = true + case let .finished(reason): + if wasRunning { + self.finish(with: .screencastEnded) + } else { + self.finish(with: reason) + } + } + }) } - + private func finish(with reason: IpcGroupCallBufferBroadcastContext.Status.FinishReason) { + guard let extensionContext = self.extensionContext else { + return + } var errorString: String? switch reason { case .callEnded: @@ -39,16 +73,247 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String { let error = NSError(domain: "BroadcastUploadExtension", code: 1, userInfo: [ NSLocalizedDescriptionKey: errorString ]) - finishBroadcastWithError(error) + extensionContext.finishBroadcastWithError(error) } else { - finishBroadcastGracefully(self) + finishBroadcastGracefully(extensionContext) } } + + func processVideoSampleBuffer(sampleBuffer: CMSampleBuffer) { + guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + return + } + var orientation = CGImagePropertyOrientation.up + if #available(iOS 11.0, *) { + if let orientationAttachment = CMGetAttachment(sampleBuffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil) as? NSNumber { + orientation = CGImagePropertyOrientation(rawValue: orientationAttachment.uint32Value) ?? .up + } + } + if let data = serializePixelBuffer(buffer: pixelBuffer) { + self.screencastBufferClientContext?.setCurrentFrame(data: data, orientation: orientation) + } + } + + func processAudioSampleBuffer(data: Data) { + self.screencastBufferClientContext?.writeAudioData(data: data) + } +} +private final class EmbeddedBroadcastUploadImpl: BroadcastUploadImpl { + private weak var extensionContext: RPBroadcastSampleHandler? + + private var clientContext: IpcGroupCallEmbeddedBroadcastContext? + private var statusDisposable: Disposable? + + private var callContextId: UInt32? + private var callContextDidSetJoinResponse: Bool = false + private var callContext: OngoingGroupCallContext? + private let screencastCapturer: OngoingCallVideoCapturer + + private var joinPayloadDisposable: Disposable? + + private var sampleBuffers: [CMSampleBuffer] = [] + private var lastAcceptedTimestamp: Double? + + init(extensionContext: RPBroadcastSampleHandler) { + self.extensionContext = extensionContext + + self.screencastCapturer = OngoingCallVideoCapturer(isCustom: true) + } + + deinit { + self.joinPayloadDisposable?.dispose() + } + + func initialize(rootPath: String) { + let clientContext = IpcGroupCallEmbeddedBroadcastContext(basePath: rootPath + "/embedded-broadcast-coordination") + self.clientContext = clientContext + + var wasRunning = false + self.statusDisposable = (clientContext.status + |> deliverOnMainQueue).start(next: { [weak self] status in + guard let self else { + return + } + switch status { + case let .active(id, joinResponse): + wasRunning = true + + if self.callContextId != id { + if let callContext = self.callContext { + self.callContext = nil + self.callContextId = nil + self.callContextDidSetJoinResponse = false + self.joinPayloadDisposable?.dispose() + self.joinPayloadDisposable = nil + callContext.stop(account: nil, reportCallId: nil, debugLog: Promise()) + } + } + + if let id { + if self.callContext == nil { + self.callContextId = id + let callContext = OngoingGroupCallContext( + audioSessionActive: .single(true), + video: self.screencastCapturer, + requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, + rejoinNeeded: { }, + outgoingAudioBitrateKbit: nil, + videoContentType: .screencast, + enableNoiseSuppression: false, + disableAudioInput: true, + enableSystemMute: false, + preferX264: false, + logPath: "", + onMutedSpeechActivityDetected: { _ in }, + encryptionKey: nil, + isConference: false, + sharedAudioDevice: nil + ) + self.callContext = callContext + self.joinPayloadDisposable = (callContext.joinPayload + |> deliverOnMainQueue).start(next: { [weak self] joinPayload in + guard let self else { + return + } + if self.callContextId != id { + return + } + self.clientContext?.joinPayload = IpcGroupCallEmbeddedAppContext.JoinPayload( + id: id, + data: joinPayload.0, + ssrc: joinPayload.1 + ) + }) + } + + if let callContext = self.callContext { + if let joinResponse, !self.callContextDidSetJoinResponse { + self.callContextDidSetJoinResponse = true + callContext.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false, isUnifiedBroadcast: false) + callContext.setJoinResponse(payload: joinResponse.data) + } + } + } + case let .finished(reason): + if wasRunning { + self.finish(with: .screencastEnded) + } else { + self.finish(with: reason) + } + } + }) + } + + private func finish(with reason: IpcGroupCallEmbeddedBroadcastContext.Status.FinishReason) { + guard let extensionContext = self.extensionContext else { + return + } + var errorString: String? + switch reason { + case .callEnded: + errorString = "You're not in a voice chat" + case .error: + errorString = "Finished" + case .screencastEnded: + break + } + if let errorString = errorString { + let error = NSError(domain: "BroadcastUploadExtension", code: 1, userInfo: [ + NSLocalizedDescriptionKey: errorString + ]) + extensionContext.finishBroadcastWithError(error) + } else { + finishBroadcastGracefully(extensionContext) + } + } + + func processVideoSampleBuffer(sampleBuffer: CMSampleBuffer) { + let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds + if let lastAcceptedTimestamp = self.lastAcceptedTimestamp { + if lastAcceptedTimestamp + 1.0 / 30.0 > timestamp { + return + } + } + self.lastAcceptedTimestamp = timestamp + + guard let sourceImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + return + } + let sourcePixelBuffer: CVPixelBuffer = sourceImageBuffer as CVPixelBuffer + + let width = CVPixelBufferGetWidth(sourcePixelBuffer) + let height = CVPixelBufferGetHeight(sourcePixelBuffer) + let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(sourcePixelBuffer) + + var outputPixelBuffer: CVPixelBuffer? + let pixelFormat = CVPixelBufferGetPixelFormatType(sourcePixelBuffer) + CVPixelBufferCreate(nil, width, height, pixelFormat, nil, &outputPixelBuffer) + guard let outputPixelBuffer else { + return + } + CVPixelBufferLockBaseAddress(sourcePixelBuffer, []) + CVPixelBufferLockBaseAddress(outputPixelBuffer, []) + + let outputBytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer) + + let sourceBaseAddress = CVPixelBufferGetBaseAddress(sourcePixelBuffer) + let outputBaseAddress = CVPixelBufferGetBaseAddress(outputPixelBuffer) + + if outputBytesPerRow == sourceBytesPerRow { + memcpy(outputBaseAddress!, sourceBaseAddress!, height * outputBytesPerRow) + } else { + for y in 0 ..< height { + memcpy(outputBaseAddress!.advanced(by: y * outputBytesPerRow), sourceBaseAddress!.advanced(by: y * sourceBytesPerRow), min(sourceBytesPerRow, outputBytesPerRow)) + } + } + + defer { + CVPixelBufferUnlockBaseAddress(sourcePixelBuffer, []) + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, []) + } + + var orientation = CGImagePropertyOrientation.up + if #available(iOS 11.0, *) { + if let orientationAttachment = CMGetAttachment(sampleBuffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil) as? NSNumber { + orientation = CGImagePropertyOrientation(rawValue: orientationAttachment.uint32Value) ?? .up + } + } + + if let outputSampleBuffer = sampleBufferFromPixelBuffer(pixelBuffer: outputPixelBuffer) { + let semaphore = DispatchSemaphore(value: 0) + self.screencastCapturer.injectSampleBuffer(outputSampleBuffer, rotation: orientation, completion: { + //semaphore.signal() + }) + let _ = semaphore.wait(timeout: DispatchTime.now() + 1.0 / 30.0) + } + } + + func processAudioSampleBuffer(data: Data) { + self.callContext?.addExternalAudioData(data: data) + } +} + +@available(iOS 10.0, *) +@objc(BroadcastUploadSampleHandler) class BroadcastUploadSampleHandler: RPBroadcastSampleHandler { + private var impl: BroadcastUploadImpl? + private var audioConverter: CustomAudioConverter? + + public override func beginRequest(with context: NSExtensionContext) { + super.beginRequest(with: context) + } + + private func finishWithError() { + let errorString = "Finished" + let error = NSError(domain: "BroadcastUploadExtension", code: 1, userInfo: [ + NSLocalizedDescriptionKey: errorString + ]) + self.finishBroadcastWithError(error) + } override public func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) { guard let appBundleIdentifier = Bundle.main.bundleIdentifier, let lastDotRange = appBundleIdentifier.range(of: ".", options: [.backwards]) else { - self.finish(with: .error) + self.finishWithError() return } @@ -58,35 +323,32 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String { let maybeAppGroupUrl = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: appGroupName) guard let appGroupUrl = maybeAppGroupUrl else { - self.finish(with: .error) + self.finishWithError() return } let rootPath = rootPathForBasePath(appGroupUrl.path) + + TempBox.initializeShared(basePath: rootPath, processType: "share", launchSpecificId: Int64.random(in: Int64.min ... Int64.max)) let logsPath = rootPath + "/logs/broadcast-logs" let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil) - let screencastBufferClientContext = IpcGroupCallBufferBroadcastContext(basePath: rootPath + "/broadcast-coordination") - self.screencastBufferClientContext = screencastBufferClientContext - - var wasRunning = false - self.statusDisposable = (screencastBufferClientContext.status - |> deliverOnMainQueue).start(next: { [weak self] status in - guard let strongSelf = self else { - return - } - switch status { - case .active: - wasRunning = true - case let .finished(reason): - if wasRunning { - strongSelf.finish(with: .screencastEnded) - } else { - strongSelf.finish(with: reason) - } - } - }) + let embeddedBroadcastImplementationTypePath = rootPath + "/broadcast-coordination-type" + + var useIPCContext = false + if let typeData = try? Data(contentsOf: URL(fileURLWithPath: embeddedBroadcastImplementationTypePath)), let type = String(data: typeData, encoding: .utf8) { + useIPCContext = type == "ipc" + } + + let impl: BroadcastUploadImpl + if useIPCContext { + impl = EmbeddedBroadcastUploadImpl(extensionContext: self) + } else { + impl = InProcessBroadcastUploadImpl(extensionContext: self) + } + self.impl = impl + impl.initialize(rootPath: rootPath) } override public func broadcastPaused() { @@ -112,18 +374,7 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String { } private func processVideoSampleBuffer(sampleBuffer: CMSampleBuffer) { - guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { - return - } - var orientation = CGImagePropertyOrientation.up - if #available(iOS 11.0, *) { - if let orientationAttachment = CMGetAttachment(sampleBuffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil) as? NSNumber { - orientation = CGImagePropertyOrientation(rawValue: orientationAttachment.uint32Value) ?? .up - } - } - if let data = serializePixelBuffer(buffer: pixelBuffer) { - self.screencastBufferClientContext?.setCurrentFrame(data: data, orientation: orientation) - } + self.impl?.processVideoSampleBuffer(sampleBuffer: sampleBuffer) } private func processAudioSampleBuffer(sampleBuffer: CMSampleBuffer) { @@ -133,9 +384,6 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String { guard let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) else { return } - /*guard let blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) else { - return - }*/ let format = CustomAudioConverter.Format( numChannels: Int(asbd.pointee.mChannelsPerFrame), @@ -146,7 +394,7 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String { } if let audioConverter = self.audioConverter { if let data = audioConverter.convert(sampleBuffer: sampleBuffer), !data.isEmpty { - self.screencastBufferClientContext?.writeAudioData(data: data) + self.impl?.processAudioSampleBuffer(data: data) } } } @@ -287,3 +535,36 @@ private func converterComplexInputDataProc(inAudioConverter: AudioConverterRef, return 0 } + +private func sampleBufferFromPixelBuffer(pixelBuffer: CVPixelBuffer) -> CMSampleBuffer? { + var maybeFormat: CMVideoFormatDescription? + let status = CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescriptionOut: &maybeFormat) + if status != noErr { + return nil + } + guard let format = maybeFormat else { + return nil + } + + var timingInfo = CMSampleTimingInfo( + duration: CMTimeMake(value: 1, timescale: 30), + presentationTimeStamp: CMTimeMake(value: 0, timescale: 30), + decodeTimeStamp: CMTimeMake(value: 0, timescale: 30) + ) + + var maybeSampleBuffer: CMSampleBuffer? + let bufferStatus = CMSampleBufferCreateReadyWithImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescription: format, sampleTiming: &timingInfo, sampleBufferOut: &maybeSampleBuffer) + + if (bufferStatus != noErr) { + return nil + } + guard let sampleBuffer = maybeSampleBuffer else { + return nil + } + + let attachments: NSArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true)! as NSArray + let dict: NSMutableDictionary = attachments[0] as! NSMutableDictionary + dict[kCMSampleAttachmentKey_DisplayImmediately as NSString] = true as NSNumber + + return sampleBuffer +} diff --git a/submodules/AccountContext/Sources/AccountContext.swift b/submodules/AccountContext/Sources/AccountContext.swift index 1d31f0f535..0da1413dc6 100644 --- a/submodules/AccountContext/Sources/AccountContext.swift +++ b/submodules/AccountContext/Sources/AccountContext.swift @@ -805,7 +805,8 @@ public protocol TelegramRootControllerInterface: NavigationController { func getPrivacySettings() -> Promise? func openSettings() func openBirthdaySetup() - func openPhotoSetup() + func openPhotoSetup(completedWithUploadingImage: @escaping (UIImage, Signal) -> UIView?) + func openAvatars() } public protocol QuickReplySetupScreenInitialData: AnyObject { diff --git a/submodules/AccountContext/Sources/ChatController.swift b/submodules/AccountContext/Sources/ChatController.swift index b44d75c0be..e43111a684 100644 --- a/submodules/AccountContext/Sources/ChatController.swift +++ b/submodules/AccountContext/Sources/ChatController.swift @@ -953,6 +953,11 @@ public final class PeerInfoNavigationSourceTag { } } +public enum PeerInfoAvatarUploadStatus { + case progress(Float) + case done +} + public protocol PeerInfoScreen: ViewController { var peerId: PeerId { get } var privacySettings: Promise { get } @@ -961,7 +966,8 @@ public protocol PeerInfoScreen: ViewController { func toggleStorySelection(ids: [Int32], isSelected: Bool) func togglePaneIsReordering(isReordering: Bool) func cancelItemSelection() - func openAvatarSetup() + func openAvatarSetup(completedWithUploadingImage: @escaping (UIImage, Signal) -> UIView?) + func openAvatars() } public extension Peer { diff --git a/submodules/ChatListUI/BUILD b/submodules/ChatListUI/BUILD index 4d2512beef..2260529f1a 100644 --- a/submodules/ChatListUI/BUILD +++ b/submodules/ChatListUI/BUILD @@ -112,6 +112,7 @@ swift_library( "//submodules/ChatPresentationInterfaceState", "//submodules/ShimmerEffect:ShimmerEffect", "//submodules/TelegramUI/Components/LottieComponent", + "//submodules/TelegramUI/Components/AvatarUploadToastScreen", ], visibility = [ "//visibility:public", diff --git a/submodules/ChatListUI/Sources/ChatListController.swift b/submodules/ChatListUI/Sources/ChatListController.swift index 0dc0f80c7a..2eb09c1bc1 100644 --- a/submodules/ChatListUI/Sources/ChatListController.swift +++ b/submodules/ChatListUI/Sources/ChatListController.swift @@ -52,6 +52,7 @@ import ArchiveInfoScreen import BirthdayPickerScreen import OldChannelsController import TextFormat +import AvatarUploadToastScreen private final class ContextControllerContentSourceImpl: ContextControllerContentSource { let controller: ViewController @@ -1208,7 +1209,54 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController return } if let rootController = self.navigationController as? TelegramRootControllerInterface { - rootController.openPhotoSetup() + rootController.openPhotoSetup(completedWithUploadingImage: { [weak self] image, uploadStatus in + guard let self else { + return nil + } + + let toastScreen = AvatarUploadToastScreen( + context: self.context, + image: image, + uploadStatus: uploadStatus, + arrowTarget: { [weak self] in + guard let self else { + return nil + } + guard let tabController = self.parent as? TabBarController else { + return nil + } + guard let settingsController = tabController.controllers.first(where: { $0 is PeerInfoScreen }) as? PeerInfoScreen else { + return nil + } + guard let tabFrame = tabController.frameForControllerTab(controller: settingsController) else { + return nil + } + return (tabController.view, tabFrame) + }, + viewUploadedAvatar: { [weak self] in + guard let self else { + return + } + if let rootController = self.navigationController as? TelegramRootControllerInterface { + rootController.openAvatars() + } + } + ) + + if let navigationController = self.navigationController as? NavigationController { + var viewControllers = navigationController.viewControllers + if let index = viewControllers.firstIndex(where: { $0 is TabBarController }) { + viewControllers.insert(toastScreen, at: index + 1) + } else { + viewControllers.append(toastScreen) + } + navigationController.setViewControllers(viewControllers, animated: true) + } else { + self.push(toastScreen) + } + + return toastScreen.targetAvatarView + }) } } diff --git a/submodules/ChatListUI/Sources/Node/ChatListItem.swift b/submodules/ChatListUI/Sources/Node/ChatListItem.swift index 1255aaea6c..9184eef095 100644 --- a/submodules/ChatListUI/Sources/Node/ChatListItem.swift +++ b/submodules/ChatListUI/Sources/Node/ChatListItem.swift @@ -3211,7 +3211,7 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode { var actionButtonTitleNodeLayoutAndApply: (TextNodeLayout, () -> TextNode)? if case .none = badgeContent, case .none = mentionBadgeContent, case let .chat(itemPeer) = contentPeer, case let .user(user) = itemPeer.chatMainPeer, let botInfo = user.botInfo, botInfo.flags.contains(.hasWebApp) { - actionButtonTitleNodeLayoutAndApply = makeActionButtonTitleNodeLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.ChatList_InlineButtonOpenApp, font: Font.semibold(15.0), textColor: theme.unreadBadgeActiveTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: rawContentWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + actionButtonTitleNodeLayoutAndApply = makeActionButtonTitleNodeLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.ChatList_InlineButtonOpenApp, font: Font.semibold(floor(item.presentationData.fontSize.itemListBaseFontSize * 15.0 / 17.0)), textColor: theme.unreadBadgeActiveTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: rawContentWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) } var badgeSize: CGFloat = 0.0 @@ -4010,8 +4010,13 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode { } if let (actionButtonTitleNodeLayout, apply) = actionButtonTitleNodeLayoutAndApply { - let actionButtonSize = CGSize(width: actionButtonTitleNodeLayout.size.width + 12.0 * 2.0, height: actionButtonTitleNodeLayout.size.height + 5.0 + 4.0) - let actionButtonFrame = CGRect(x: nextBadgeX - actionButtonSize.width, y: contentRect.maxY - actionButtonSize.height, width: actionButtonSize.width, height: actionButtonSize.height) + let actionButtonSideInset = floor(item.presentationData.fontSize.itemListBaseFontSize * 12.0 / 17.0) + let actionButtonTopInset = floor(item.presentationData.fontSize.itemListBaseFontSize * 5.0 / 17.0) + let actionButtonBottomInset = floor(item.presentationData.fontSize.itemListBaseFontSize * 4.0 / 17.0) + + let actionButtonSize = CGSize(width: actionButtonTitleNodeLayout.size.width + actionButtonSideInset * 2.0, height: actionButtonTitleNodeLayout.size.height + actionButtonTopInset + actionButtonBottomInset) + var actionButtonFrame = CGRect(x: nextBadgeX - actionButtonSize.width, y: contentRect.minY + floor((contentRect.height - actionButtonSize.height) * 0.5), width: actionButtonSize.width, height: actionButtonSize.height) + actionButtonFrame.origin.y = max(actionButtonFrame.origin.y, dateFrame.maxY + floor(item.presentationData.fontSize.itemListBaseFontSize * 4.0 / 17.0)) let actionButtonNode: HighlightableButtonNode if let current = strongSelf.actionButtonNode { @@ -4030,10 +4035,10 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode { actionButtonBackgroundView = UIImageView() strongSelf.actionButtonBackgroundView = actionButtonBackgroundView actionButtonNode.view.addSubview(actionButtonBackgroundView) - - if actionButtonBackgroundView.image?.size.height != actionButtonSize.height { - actionButtonBackgroundView.image = generateStretchableFilledCircleImage(diameter: actionButtonSize.height, color: .white)?.withRenderingMode(.alwaysTemplate) - } + } + + if actionButtonBackgroundView.image?.size.height != actionButtonSize.height { + actionButtonBackgroundView.image = generateStretchableFilledCircleImage(diameter: actionButtonSize.height, color: .white)?.withRenderingMode(.alwaysTemplate) } actionButtonBackgroundView.tintColor = theme.unreadBadgeActiveBackgroundColor @@ -4047,7 +4052,7 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode { actionButtonNode.frame = actionButtonFrame actionButtonBackgroundView.frame = CGRect(origin: CGPoint(), size: actionButtonFrame.size) - actionButtonTitleNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((actionButtonFrame.width - actionButtonTitleNodeLayout.size.width) * 0.5), y: 5.0), size: actionButtonTitleNodeLayout.size) + actionButtonTitleNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((actionButtonFrame.width - actionButtonTitleNodeLayout.size.width) * 0.5), y: actionButtonTopInset), size: actionButtonTitleNodeLayout.size) nextBadgeX -= actionButtonSize.width + 6.0 } else { diff --git a/submodules/ChatListUI/Sources/Node/ChatListNode.swift b/submodules/ChatListUI/Sources/Node/ChatListNode.swift index dfc8c12f68..39b8b52130 100644 --- a/submodules/ChatListUI/Sources/Node/ChatListNode.swift +++ b/submodules/ChatListUI/Sources/Node/ChatListNode.swift @@ -1994,6 +1994,11 @@ public final class ChatListNode: ListView { starsSubscriptionsContextPromise.get() ) |> mapToSignal { suggestions, dismissedSuggestions, configuration, newSessionReviews, data, birthdays, starsSubscriptionsContext -> Signal in + #if DEBUG + var suggestions = suggestions + suggestions.insert(.setupPhoto, at: 0) + #endif + let (accountPeer, birthday) = data if let newSessionReview = newSessionReviews.first { diff --git a/submodules/Components/ViewControllerComponent/Sources/ViewControllerComponent.swift b/submodules/Components/ViewControllerComponent/Sources/ViewControllerComponent.swift index 517e315863..049c3feff7 100644 --- a/submodules/Components/ViewControllerComponent/Sources/ViewControllerComponent.swift +++ b/submodules/Components/ViewControllerComponent/Sources/ViewControllerComponent.swift @@ -218,6 +218,16 @@ open class ViewControllerComponentContainer: ViewController { } self.containerLayoutUpdated(layout: currentLayout.layout, navigationHeight: currentLayout.navigationHeight, transition: transition) } + + override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { + if let result = super.hitTest(point, with: event) { + if result === self.view { + return nil + } + return result + } + return nil + } } public var node: Node { diff --git a/submodules/DebugSettingsUI/Sources/DebugController.swift b/submodules/DebugSettingsUI/Sources/DebugController.swift index 92666bcef5..40ffd2764c 100644 --- a/submodules/DebugSettingsUI/Sources/DebugController.swift +++ b/submodules/DebugSettingsUI/Sources/DebugController.swift @@ -107,7 +107,8 @@ private enum DebugControllerEntry: ItemListNodeEntry { case experimentalCallMute(Bool) case conferenceCalls(Bool) case playerV2(Bool) - case benchmarkReflectors + case devRequests(Bool) + case fakeAds(Bool) case enableLocalTranslation(Bool) case preferredVideoCodec(Int, String, String?, Bool) case disableVideoAspectScaling(Bool) @@ -133,7 +134,7 @@ private enum DebugControllerEntry: ItemListNodeEntry { return DebugControllerSection.web.rawValue case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure: return DebugControllerSection.experiments.rawValue - case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .liveStreamV2, .experimentalCallMute, .conferenceCalls, .playerV2, .benchmarkReflectors, .enableLocalTranslation: + case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .liveStreamV2, .experimentalCallMute, .conferenceCalls, .playerV2, .devRequests, .fakeAds, .enableLocalTranslation: return DebugControllerSection.experiments.rawValue case .logTranslationRecognition, .resetTranslationStates: return DebugControllerSection.translation.rawValue @@ -254,12 +255,14 @@ private enum DebugControllerEntry: ItemListNodeEntry { return 53 case .playerV2: return 54 - case .benchmarkReflectors: + case .devRequests: return 55 - case .enableLocalTranslation: + case .fakeAds: return 56 + case .enableLocalTranslation: + return 57 case let .preferredVideoCodec(index, _, _, _): - return 57 + index + return 58 + index case .disableVideoAspectScaling: return 100 case .enableNetworkFramework: @@ -1368,60 +1371,25 @@ private enum DebugControllerEntry: ItemListNodeEntry { }) }).start() }) - case .benchmarkReflectors: - return ItemListActionItem(presentationData: presentationData, title: "Benchmark Reflectors", kind: .generic, alignment: .natural, sectionId: self.section, style: .blocks, action: { - guard let context = arguments.context else { - return - } - - var signal: Signal = Signal { subscriber in - var reflectorBenchmark: ReflectorBenchmark? = ReflectorBenchmark(address: "91.108.13.35", port: 599) - reflectorBenchmark?.start(completion: { results in - subscriber.putNext(results) - subscriber.putCompletion() + case let .devRequests(value): + return ItemListSwitchItem(presentationData: presentationData, title: "PlayerV2", value: value, sectionId: self.section, style: .blocks, updated: { value in + let _ = arguments.sharedContext.accountManager.transaction ({ transaction in + transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in + var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings + settings.devRequests = value + return PreferencesEntry(settings) }) - - return ActionDisposable { - reflectorBenchmark = nil - } - } - |> runOn(.mainQueue()) - - var cancelImpl: (() -> Void)? - let presentationData = context.sharedContext.currentPresentationData.with { $0 } - let progressSignal = Signal { subscriber in - let controller = OverlayStatusController(theme: presentationData.theme, type: .loading(cancelled: { - cancelImpl?() - })) - arguments.presentController(controller, nil) - return ActionDisposable { [weak controller] in - Queue.mainQueue().async() { - controller?.dismiss() - } - } - } - |> runOn(Queue.mainQueue()) - |> delay(0.15, queue: Queue.mainQueue()) - let progressDisposable = progressSignal.start() - - let reindexDisposable = MetaDisposable() - - signal = signal - |> afterDisposed { - Queue.mainQueue().async { - progressDisposable.dispose() - } - } - cancelImpl = { - reindexDisposable.set(nil) - } - reindexDisposable.set((signal - |> deliverOnMainQueue).start(next: { results in - if let context = arguments.context { - let controller = textAlertController(context: context, title: nil, text: "Bandwidth: \(results.bandwidthBytesPerSecond * 8 / 1024) kbit/s (expected \(results.expectedBandwidthBytesPerSecond * 8 / 1024) kbit/s)\nAvg latency: \(Int(results.averageDelay * 1000.0)) ms", actions: [TextAlertAction(type: .genericAction, title: "OK", action: {})]) - arguments.presentController(controller, nil) - } - })) + }).start() + }) + case let .fakeAds(value): + return ItemListSwitchItem(presentationData: presentationData, title: "Fake Ads", value: value, sectionId: self.section, style: .blocks, updated: { value in + let _ = arguments.sharedContext.accountManager.transaction ({ transaction in + transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in + var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings + settings.fakeAds = value + return PreferencesEntry(settings) + }) + }).start() }) case let .enableLocalTranslation(value): return ItemListSwitchItem(presentationData: presentationData, title: "Local Translation", value: value, sectionId: self.section, style: .blocks, updated: { value in @@ -1593,21 +1561,10 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present entries.append(.conferenceCalls(experimentalSettings.conferenceCalls)) entries.append(.playerV2(experimentalSettings.playerV2)) - entries.append(.benchmarkReflectors) + entries.append(.devRequests(experimentalSettings.devRequests)) + entries.append(.fakeAds(experimentalSettings.fakeAds)) entries.append(.enableLocalTranslation(experimentalSettings.enableLocalTranslation)) } - - /*let codecs: [(String, String?)] = [ - ("No Preference", nil), - ("H265", "H265"), - ("H264", "H264"), - ("VP8", "VP8"), - ("VP9", "VP9") - ] - - for i in 0 ..< codecs.count { - entries.append(.preferredVideoCodec(i, codecs[i].0, codecs[i].1, experimentalSettings.preferredVideoCodec == codecs[i].1)) - }*/ if isMainApp { entries.append(.disableVideoAspectScaling(experimentalSettings.disableVideoAspectScaling)) diff --git a/submodules/Display/Source/Navigation/NavigationModalContainer.swift b/submodules/Display/Source/Navigation/NavigationModalContainer.swift index 185ac4432b..5e09c77e07 100644 --- a/submodules/Display/Source/Navigation/NavigationModalContainer.swift +++ b/submodules/Display/Source/Navigation/NavigationModalContainer.swift @@ -535,6 +535,9 @@ final class NavigationModalContainer: ASDisplayNode, ASScrollViewDelegate, ASGes return self.dim.view } if self.isFlat { + if result === self.container.view { + return nil + } return result } var currentParent: UIView? = result diff --git a/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift b/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift index 47da141849..5d47d50928 100644 --- a/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift +++ b/submodules/GalleryUI/Sources/Items/UniversalVideoGalleryItem.swift @@ -1738,8 +1738,25 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode { } else if let _ = item.content as? PlatformVideoContent { disablePlayerControls = true forceEnablePiP = true - } else if let _ = item.content as? HLSVideoContent { + } else if let content = item.content as? HLSVideoContent { isAdaptive = true + + if let qualitySet = HLSQualitySet(baseFile: content.fileReference, codecConfiguration: HLSCodecConfiguration(isHardwareAv1Supported: false, isSoftwareAv1Supported: true)), let (quality, playlistFile) = qualitySet.playlistFiles.sorted(by: { $0.key < $1.key }).first, let dataFile = qualitySet.qualityFiles[quality] { + var alternativeQualities: [(playlist: FileMediaReference, dataFile: FileMediaReference)] = [] + for (otherQuality, otherPlaylistFile) in qualitySet.playlistFiles { + if otherQuality != quality, let otherDataFile = qualitySet.qualityFiles[otherQuality] { + alternativeQualities.append((otherPlaylistFile, dataFile: otherDataFile)) + } + } + self.videoFramePreview = MediaPlayerFramePreviewHLS( + postbox: item.context.account.postbox, + userLocation: content.userLocation, + userContentType: .video, + playlistFile: playlistFile, + mainDataFile: dataFile, + alternativeQualities: alternativeQualities + ) + } } let _ = isAdaptive diff --git a/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift b/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift index db9361c534..0f0154f935 100644 --- a/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift +++ b/submodules/MediaPickerUI/Sources/MediaPickerScreen.swift @@ -651,6 +651,7 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att self.gridNode.scrollView.addSubview(cameraView) self.gridNode.addSubnode(self.cameraActivateAreaNode) } else if useModernCamera, !Camera.isIpad { + #if !targetEnvironment(simulator) var cameraPosition: Camera.Position = .back if case .assets(nil, .createAvatar) = controller.subject { cameraPosition = .front @@ -703,6 +704,7 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att } else { setupCamera() } + #endif } else { self.containerNode.clipsToBounds = true } diff --git a/submodules/MediaPlayer/Sources/ChunkMediaPlayerDirectFetchSourceImpl.swift b/submodules/MediaPlayer/Sources/ChunkMediaPlayerDirectFetchSourceImpl.swift index 411cd10786..cc34be53ae 100644 --- a/submodules/MediaPlayer/Sources/ChunkMediaPlayerDirectFetchSourceImpl.swift +++ b/submodules/MediaPlayer/Sources/ChunkMediaPlayerDirectFetchSourceImpl.swift @@ -45,6 +45,7 @@ private func FFMpegLookaheadReader_readPacketCallback(userData: UnsafeMutableRaw memcpy(buffer, bytes, fetchedData.count) } let fetchedCount = Int32(fetchedData.count) + print("Fetched from \(context.readingOffset) (\(fetchedCount) bytes)") context.setReadingOffset(offset: context.readingOffset + Int64(fetchedCount)) if fetchedCount == 0 { return FFMPEG_CONSTANT_AVERROR_EOF @@ -79,12 +80,12 @@ private final class FFMpegLookaheadReader { var audioStream: FFMpegFileReader.StreamInfo? var videoStream: FFMpegFileReader.StreamInfo? - var seekInfo: FFMpegLookaheadThread.State.Seek? - var maxReadPts: FFMpegLookaheadThread.State.Seek? - var audioStreamState: FFMpegLookaheadThread.StreamState? - var videoStreamState: FFMpegLookaheadThread.StreamState? + var seekInfo: FFMpegLookahead.State.Seek? + var maxReadPts: FFMpegLookahead.State.Seek? + var audioStreamState: FFMpegLookahead.StreamState? + var videoStreamState: FFMpegLookahead.StreamState? - var reportedState: FFMpegLookaheadThread.State? + var reportedState: FFMpegLookahead.State? var readingOffset: Int64 = 0 var isCancelled: Bool = false @@ -108,6 +109,8 @@ private final class FFMpegLookaheadReader { let avFormatContext = FFMpegAVFormatContext() avFormatContext.setIO(avIoContext) + self.setReadingOffset(offset: 0) + if !avFormatContext.openInput(withDirectFilePath: nil) { return nil } @@ -170,7 +173,7 @@ private final class FFMpegLookaheadReader { if let preferredStream = self.videoStream ?? self.audioStream { let pts = CMTimeMakeWithSeconds(params.seekToTimestamp, preferredTimescale: preferredStream.timeScale) - self.seekInfo = FFMpegLookaheadThread.State.Seek(streamIndex: preferredStream.index, pts: pts.value) + self.seekInfo = FFMpegLookahead.State.Seek(streamIndex: preferredStream.index, pts: pts.value) avFormatContext.seekFrame(forStreamIndex: Int32(preferredStream.index), pts: pts.value, positionOnKeyframe: true) } @@ -223,7 +226,7 @@ private final class FFMpegLookaheadReader { return } - let maxPtsSeconds = max(self.params.seekToTimestamp, currentTimestamp) + 10.0 + let maxPtsSeconds = max(self.params.seekToTimestamp, currentTimestamp) + self.params.lookaheadDuration var currentAudioPtsSecondsAdvanced: Double = 0.0 var currentVideoPtsSecondsAdvanced: Double = 0.0 @@ -258,14 +261,14 @@ private final class FFMpegLookaheadReader { break } - self.maxReadPts = FFMpegLookaheadThread.State.Seek(streamIndex: Int(packet.streamIndex), pts: packet.pts) + self.maxReadPts = FFMpegLookahead.State.Seek(streamIndex: Int(packet.streamIndex), pts: packet.pts) if let audioStream = self.audioStream, Int(packet.streamIndex) == audioStream.index { let pts = CMTimeMake(value: packet.pts, timescale: audioStream.timeScale) if let audioStreamState = self.audioStreamState { currentAudioPtsSecondsAdvanced += pts.seconds - audioStreamState.readableToTime.seconds } - self.audioStreamState = FFMpegLookaheadThread.StreamState( + self.audioStreamState = FFMpegLookahead.StreamState( info: audioStream, readableToTime: pts ) @@ -274,7 +277,7 @@ private final class FFMpegLookaheadReader { if let videoStreamState = self.videoStreamState { currentVideoPtsSecondsAdvanced += pts.seconds - videoStreamState.readableToTime.seconds } - self.videoStreamState = FFMpegLookaheadThread.StreamState( + self.videoStreamState = FFMpegLookahead.StreamState( info: videoStream, readableToTime: pts ) @@ -300,7 +303,7 @@ private final class FFMpegLookaheadReader { stateIsFullyInitialised = false } - let state = FFMpegLookaheadThread.State( + let state = FFMpegLookahead.State( seek: seekInfo, maxReadablePts: self.maxReadPts, audio: (stateIsFullyInitialised && self.maxReadPts != nil) ? self.audioStreamState : nil, @@ -315,45 +318,10 @@ private final class FFMpegLookaheadReader { } private final class FFMpegLookaheadThread: NSObject { - struct StreamState: Equatable { - let info: FFMpegFileReader.StreamInfo - let readableToTime: CMTime - - init(info: FFMpegFileReader.StreamInfo, readableToTime: CMTime) { - self.info = info - self.readableToTime = readableToTime - } - } - - struct State: Equatable { - struct Seek: Equatable { - var streamIndex: Int - var pts: Int64 - - init(streamIndex: Int, pts: Int64) { - self.streamIndex = streamIndex - self.pts = pts - } - } - - let seek: Seek - let maxReadablePts: Seek? - let audio: StreamState? - let video: StreamState? - let isEnded: Bool - - init(seek: Seek, maxReadablePts: Seek?, audio: StreamState?, video: StreamState?, isEnded: Bool) { - self.seek = seek - self.maxReadablePts = maxReadablePts - self.audio = audio - self.video = video - self.isEnded = isEnded - } - } - final class Params: NSObject { let seekToTimestamp: Double - let updateState: (State) -> Void + let lookaheadDuration: Double + let updateState: (FFMpegLookahead.State) -> Void let fetchInRange: (Range) -> Disposable let getDataInRange: (Range, @escaping (Data?) -> Void) -> Disposable let isDataCachedInRange: (Range) -> Bool @@ -363,7 +331,8 @@ private final class FFMpegLookaheadThread: NSObject { init( seekToTimestamp: Double, - updateState: @escaping (State) -> Void, + lookaheadDuration: Double, + updateState: @escaping (FFMpegLookahead.State) -> Void, fetchInRange: @escaping (Range) -> Disposable, getDataInRange: @escaping (Range, @escaping (Data?) -> Void) -> Disposable, isDataCachedInRange: @escaping (Range) -> Bool, @@ -372,6 +341,7 @@ private final class FFMpegLookaheadThread: NSObject { currentTimestamp: Atomic ) { self.seekToTimestamp = seekToTimestamp + self.lookaheadDuration = lookaheadDuration self.updateState = updateState self.fetchInRange = fetchInRange self.getDataInRange = getDataInRange @@ -414,14 +384,51 @@ private final class FFMpegLookaheadThread: NSObject { } } -private final class FFMpegLookahead { +final class FFMpegLookahead { + struct StreamState: Equatable { + let info: FFMpegFileReader.StreamInfo + let readableToTime: CMTime + + init(info: FFMpegFileReader.StreamInfo, readableToTime: CMTime) { + self.info = info + self.readableToTime = readableToTime + } + } + + struct State: Equatable { + struct Seek: Equatable { + var streamIndex: Int + var pts: Int64 + + init(streamIndex: Int, pts: Int64) { + self.streamIndex = streamIndex + self.pts = pts + } + } + + let seek: Seek + let maxReadablePts: Seek? + let audio: StreamState? + let video: StreamState? + let isEnded: Bool + + init(seek: Seek, maxReadablePts: Seek?, audio: StreamState?, video: StreamState?, isEnded: Bool) { + self.seek = seek + self.maxReadablePts = maxReadablePts + self.audio = audio + self.video = video + self.isEnded = isEnded + } + } + private let cancel = Promise() private let currentTimestamp = Atomic(value: nil) private let thread: Thread init( seekToTimestamp: Double, - updateState: @escaping (FFMpegLookaheadThread.State) -> Void, + lookaheadDuration: Double, + updateState: @escaping (FFMpegLookahead.State) -> Void, fetchInRange: @escaping (Range) -> Disposable, getDataInRange: @escaping (Range, @escaping (Data?) -> Void) -> Disposable, isDataCachedInRange: @escaping (Range) -> Bool, @@ -432,6 +439,7 @@ private final class FFMpegLookahead { selector: #selector(FFMpegLookaheadThread.entryPoint(_:)), object: FFMpegLookaheadThread.Params( seekToTimestamp: seekToTimestamp, + lookaheadDuration: lookaheadDuration, updateState: updateState, fetchInRange: fetchInRange, getDataInRange: getDataInRange, @@ -496,7 +504,7 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl { let lookaheadId = self.currentLookaheadId let resource = self.resource - let updateState: (FFMpegLookaheadThread.State) -> Void = { [weak self] state in + let updateState: (FFMpegLookahead.State) -> Void = { [weak self] state in Queue.mainQueue().async { guard let self else { return @@ -580,6 +588,7 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl { self.lookahead = FFMpegLookahead( seekToTimestamp: position, + lookaheadDuration: 10.0, updateState: updateState, fetchInRange: { range in return fetchedMediaResource( diff --git a/submodules/MediaPlayer/Sources/FFMpegFileReader.swift b/submodules/MediaPlayer/Sources/FFMpegFileReader.swift index 6f1cf016ed..28982408e9 100644 --- a/submodules/MediaPlayer/Sources/FFMpegFileReader.swift +++ b/submodules/MediaPlayer/Sources/FFMpegFileReader.swift @@ -26,16 +26,41 @@ private func FFMpegFileReader_readPacketCallback(userData: UnsafeMutableRawPoint return Int32(result) case let .resource(resource): let readCount = min(256 * 1024, Int64(bufferSize)) - let requestRange: Range = resource.readingPosition ..< (resource.readingPosition + readCount) - //TODO:improve thread safe read if incomplete - if let (file, readSize) = resource.mediaBox.internal_resourceData(id: resource.resource.id, size: resource.size, in: requestRange) { - let result = file.read(buffer, readSize) - if result == 0 { - return FFMPEG_CONSTANT_AVERROR_EOF + var bufferOffset = 0 + let doRead: (Range) -> Void = { range in + //TODO:improve thread safe read if incomplete + if let (file, readSize) = resource.mediaBox.internal_resourceData(id: resource.resource.id, size: resource.resourceSize, in: range) { + let effectiveReadSize = max(0, min(Int(readCount) - bufferOffset, readSize)) + let count = file.read(buffer.advanced(by: bufferOffset), effectiveReadSize) + bufferOffset += count + resource.readingPosition += Int64(count) } - resource.readingPosition += Int64(result) - return Int32(result) + } + + var mappedRangePosition: Int64 = 0 + for mappedRange in resource.mappedRanges { + let bytesToRead = readCount - Int64(bufferOffset) + if bytesToRead <= 0 { + break + } + + let mappedRangeSize = mappedRange.upperBound - mappedRange.lowerBound + let mappedRangeReadingPosition = resource.readingPosition - mappedRangePosition + + if mappedRangeReadingPosition >= 0 && mappedRangeReadingPosition < mappedRangeSize { + let mappedRangeAvailableBytesToRead = mappedRangeSize - mappedRangeReadingPosition + let mappedRangeBytesToRead = min(bytesToRead, mappedRangeAvailableBytesToRead) + if mappedRangeBytesToRead > 0 { + let mappedReadRange = (mappedRange.lowerBound + mappedRangeReadingPosition) ..< (mappedRange.lowerBound + mappedRangeReadingPosition + mappedRangeBytesToRead) + doRead(mappedReadRange) + } + } + + mappedRangePosition += mappedRangeSize + } + if bufferOffset != 0 { + return Int32(bufferOffset) } else { return FFMPEG_CONSTANT_AVERROR_EOF } @@ -65,7 +90,7 @@ private func FFMpegFileReader_seekCallback(userData: UnsafeMutableRawPointer?, o final class FFMpegFileReader { enum SourceDescription { case file(String) - case resource(mediaBox: MediaBox, resource: MediaResource, size: Int64) + case resource(mediaBox: MediaBox, resource: MediaResource, resourceSize: Int64, mappedRanges: [Range]) } final class StreamInfo: Equatable { @@ -117,12 +142,21 @@ final class FFMpegFileReader { final class Resource { let mediaBox: MediaBox let resource: MediaResource + let resourceSize: Int64 + let mappedRanges: [Range] let size: Int64 var readingPosition: Int64 = 0 - init(mediaBox: MediaBox, resource: MediaResource, size: Int64) { + init(mediaBox: MediaBox, resource: MediaResource, resourceSize: Int64, mappedRanges: [Range]) { self.mediaBox = mediaBox self.resource = resource + self.resourceSize = resourceSize + self.mappedRanges = mappedRanges + + var size: Int64 = 0 + for range in mappedRanges { + size += range.upperBound - range.lowerBound + } self.size = size } } @@ -179,6 +213,11 @@ final class FFMpegFileReader { case index(Int) } + enum Seek { + case stream(streamIndex: Int, pts: Int64) + case direct(position: Double) + } + enum ReadFrameResult { case frame(MediaTrackFrame) case waitingForMoreData @@ -200,7 +239,7 @@ final class FFMpegFileReader { private var lastReadPts: (streamIndex: Int, pts: Int64)? private var isWaitingForMoreData: Bool = false - public init?(source: SourceDescription, passthroughDecoder: Bool = false, useHardwareAcceleration: Bool, selectedStream: SelectedStream, seek: (streamIndex: Int, pts: Int64)?, maxReadablePts: (streamIndex: Int, pts: Int64, isEnded: Bool)?) { + public init?(source: SourceDescription, passthroughDecoder: Bool = false, useHardwareAcceleration: Bool, selectedStream: SelectedStream, seek: Seek?, maxReadablePts: (streamIndex: Int, pts: Int64, isEnded: Bool)?) { let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals switch source { @@ -209,8 +248,8 @@ final class FFMpegFileReader { return nil } self.source = .file(file) - case let .resource(mediaBox, resource, size): - self.source = .resource(Source.Resource(mediaBox: mediaBox, resource: resource, size: size)) + case let .resource(mediaBox, resource, resourceSize, mappedRanges): + self.source = .resource(Source.Resource(mediaBox: mediaBox, resource: resource, resourceSize: resourceSize, mappedRanges: mappedRanges)) } self.maxReadablePts = maxReadablePts @@ -350,7 +389,12 @@ final class FFMpegFileReader { self.stream = stream if let seek { - avFormatContext.seekFrame(forStreamIndex: Int32(seek.streamIndex), pts: seek.pts, positionOnKeyframe: true) + switch seek { + case let .stream(streamIndex, pts): + avFormatContext.seekFrame(forStreamIndex: Int32(streamIndex), pts: pts, positionOnKeyframe: true) + case let .direct(position): + avFormatContext.seekFrame(forStreamIndex: Int32(stream.info.index), pts: CMTimeMakeWithSeconds(Float64(position), preferredTimescale: stream.info.timeScale).value, positionOnKeyframe: true) + } } else { avFormatContext.seekFrame(forStreamIndex: Int32(stream.info.index), pts: 0, positionOnKeyframe: true) } diff --git a/submodules/MediaPlayer/Sources/MediaDataReader.swift b/submodules/MediaPlayer/Sources/MediaDataReader.swift index ec7444fcef..ccdd8782c2 100644 --- a/submodules/MediaPlayer/Sources/MediaDataReader.swift +++ b/submodules/MediaPlayer/Sources/MediaDataReader.swift @@ -135,14 +135,27 @@ public final class FFMpegMediaDataReaderV2: MediaDataReader { self.isVideo = isVideo let source: FFMpegFileReader.SourceDescription - var seek: (streamIndex: Int, pts: Int64)? + var seek: FFMpegFileReader.Seek? var maxReadablePts: (streamIndex: Int, pts: Int64, isEnded: Bool)? switch content { case let .tempFile(tempFile): source = .file(tempFile.file.path) case let .directStream(directStream): - source = .resource(mediaBox: directStream.mediaBox, resource: directStream.resource, size: directStream.size) - seek = (directStream.seek.streamIndex, directStream.seek.pts) + let mappedRanges: [Range] + #if DEBUG && false + var mappedRangesValue: [Range] = [] + var testOffset: Int64 = 0 + while testOffset < directStream.size { + let testBlock: Int64 = min(3 * 1024 + 1, directStream.size - testOffset) + mappedRangesValue.append(testOffset ..< (testOffset + testBlock)) + testOffset += testBlock + } + mappedRanges = mappedRangesValue + #else + mappedRanges = [0 ..< directStream.size] + #endif + source = .resource(mediaBox: directStream.mediaBox, resource: directStream.resource, resourceSize: directStream.size, mappedRanges: mappedRanges) + seek = .stream(streamIndex: directStream.seek.streamIndex, pts: directStream.seek.pts) maxReadablePts = directStream.maxReadablePts } diff --git a/submodules/MediaPlayer/Sources/MediaPlayerFramePreview.swift b/submodules/MediaPlayer/Sources/MediaPlayerFramePreview.swift index ef00996925..6cbc0a105e 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayerFramePreview.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayerFramePreview.swift @@ -4,6 +4,7 @@ import SwiftSignalKit import Postbox import TelegramCore import FFMpegBinding +import VideoToolbox public enum FramePreviewResult { case image(UIImage) @@ -151,3 +152,534 @@ public final class MediaPlayerFramePreview: FramePreview { } } } + +public final class MediaPlayerFramePreviewHLS: FramePreview { + private final class Impl { + private struct Part { + var timestamp: Int + var duration: Int + var range: Range + + init(timestamp: Int, duration: Int, range: Range) { + self.timestamp = timestamp + self.duration = duration + self.range = range + } + } + + private final class Playlist { + let dataFile: FileMediaReference + let initializationPart: Part + let parts: [Part] + + init(dataFile: FileMediaReference, initializationPart: Part, parts: [Part]) { + self.dataFile = dataFile + self.initializationPart = initializationPart + self.parts = parts + } + } + + let queue: Queue + let postbox: Postbox + let userLocation: MediaResourceUserLocation + let userContentType: MediaResourceUserContentType + let playlistFile: FileMediaReference + let mainDataFile: FileMediaReference + let alternativeQualities: [(playlist: FileMediaReference, dataFile: FileMediaReference)] + + private var playlist: Playlist? + private var alternativePlaylists: [Playlist] = [] + private var fetchPlaylistDisposable: Disposable? + private var playlistDisposable: Disposable? + + private var pendingFrame: (Int, FFMpegLookahead)? + private let nextRequestedFrame: Atomic + + let framePipe = ValuePipe() + + init( + queue: Queue, + postbox: Postbox, + userLocation: MediaResourceUserLocation, + userContentType: MediaResourceUserContentType, + playlistFile: FileMediaReference, + mainDataFile: FileMediaReference, + alternativeQualities: [(playlist: FileMediaReference, dataFile: FileMediaReference)], + nextRequestedFrame: Atomic + ) { + self.queue = queue + self.postbox = postbox + self.userLocation = userLocation + self.userContentType = userContentType + self.playlistFile = playlistFile + self.mainDataFile = mainDataFile + self.alternativeQualities = alternativeQualities + self.nextRequestedFrame = nextRequestedFrame + + self.loadPlaylist() + } + + deinit { + self.fetchPlaylistDisposable?.dispose() + self.playlistDisposable?.dispose() + } + + func generateFrame() { + if self.pendingFrame != nil { + return + } + + self.updateFrameRequest() + } + + func cancelPendingFrames() { + self.pendingFrame = nil + } + + private func loadPlaylist() { + if self.fetchPlaylistDisposable != nil { + return + } + + let loadPlaylist: (FileMediaReference, FileMediaReference) -> Signal = { playlistFile, dataFile in + return self.postbox.mediaBox.resourceData(playlistFile.media.resource) + |> mapToSignal { data -> Signal in + if !data.complete { + return .never() + } + + guard let data = try? Data(contentsOf: URL(fileURLWithPath: data.path)) else { + return .single(nil) + } + guard let playlistString = String(data: data, encoding: .utf8) else { + return .single(nil) + } + + var durations: [Int] = [] + var byteRanges: [Range] = [] + + let extinfRegex = try! NSRegularExpression(pattern: "EXTINF:(\\d+)", options: []) + let byteRangeRegex = try! NSRegularExpression(pattern: "EXT-X-BYTERANGE:(\\d+)@(\\d+)", options: []) + + let extinfResults = extinfRegex.matches(in: playlistString, range: NSRange(playlistString.startIndex..., in: playlistString)) + for result in extinfResults { + if let durationRange = Range(result.range(at: 1), in: playlistString) { + if let duration = Int(String(playlistString[durationRange])) { + durations.append(duration) + } + } + } + + let byteRangeResults = byteRangeRegex.matches(in: playlistString, range: NSRange(playlistString.startIndex..., in: playlistString)) + for result in byteRangeResults { + if let lengthRange = Range(result.range(at: 1), in: playlistString), let upperBoundRange = Range(result.range(at: 2), in: playlistString) { + if let length = Int(String(playlistString[lengthRange])), let lowerBound = Int(String(playlistString[upperBoundRange])) { + byteRanges.append(lowerBound ..< (lowerBound + length)) + } + } + } + + if durations.count != byteRanges.count { + return .single(nil) + } + + var durationOffset = 0 + var initializationPart: Part? + var parts: [Part] = [] + for i in 0 ..< durations.count { + let part = Part(timestamp: durationOffset, duration: durations[i], range: byteRanges[i]) + if i == 0 { + initializationPart = Part(timestamp: 0, duration: 0, range: 0 ..< byteRanges[i].lowerBound) + } + parts.append(part) + durationOffset += durations[i] + } + + if let initializationPart { + return .single(Playlist(dataFile: dataFile, initializationPart: initializationPart, parts: parts)) + } else { + return .single(nil) + } + } + } + + let fetchPlaylist: (FileMediaReference) -> Signal = { playlistFile in + return fetchedMediaResource( + mediaBox: self.postbox.mediaBox, + userLocation: self.userLocation, + userContentType: self.userContentType, + reference: playlistFile.resourceReference(playlistFile.media.resource) + ) + |> ignoreValues + |> `catch` { _ -> Signal in + return .complete() + } + } + + var fetchSignals: [Signal] = [] + fetchSignals.append(fetchPlaylist(self.playlistFile)) + for quality in self.alternativeQualities { + fetchSignals.append(fetchPlaylist(quality.playlist)) + } + self.fetchPlaylistDisposable = combineLatest(fetchSignals).startStrict() + + self.playlistDisposable = (combineLatest(queue: self.queue, + loadPlaylist(self.playlistFile, self.mainDataFile), + combineLatest(self.alternativeQualities.map { + return loadPlaylist($0.playlist, $0.dataFile) + }) + ) + |> deliverOn(self.queue)).startStrict(next: { [weak self] mainPlaylist, alternativePlaylists in + guard let self else { + return + } + + self.playlist = mainPlaylist + self.alternativePlaylists = alternativePlaylists.compactMap{ $0 } + }) + } + + private func updateFrameRequest() { + guard let playlist = self.playlist else { + return + } + if self.pendingFrame != nil { + return + } + guard let nextRequestedFrame = self.nextRequestedFrame.swap(nil) else { + return + } + + var allPlaylists: [Playlist] = [playlist] + allPlaylists.append(contentsOf: self.alternativePlaylists) + outer: for playlist in allPlaylists { + if let dataFileSize = playlist.dataFile.media.size, let part = playlist.parts.first(where: { $0.timestamp <= Int(nextRequestedFrame) && ($0.timestamp + $0.duration) > Int(nextRequestedFrame) }) { + let mappedRanges: [Range] = [ + Int64(playlist.initializationPart.range.lowerBound) ..< Int64(playlist.initializationPart.range.upperBound), + Int64(part.range.lowerBound) ..< Int64(part.range.upperBound) + ] + for mappedRange in mappedRanges { + if !self.postbox.mediaBox.internal_resourceDataIsCached(id: playlist.dataFile.media.resource.id, size: dataFileSize, in: mappedRange) { + continue outer + } + } + + if let directReader = FFMpegFileReader( + source: .resource(mediaBox: self.postbox.mediaBox, resource: playlist.dataFile.media.resource, resourceSize: dataFileSize, mappedRanges: mappedRanges), + useHardwareAcceleration: false, + selectedStream: .mediaType(.video), + seek: .direct(position: nextRequestedFrame), + maxReadablePts: nil + ) { + var lastFrame: CMSampleBuffer? + findFrame: while true { + switch directReader.readFrame() { + case let .frame(frame): + if lastFrame == nil { + lastFrame = frame.sampleBuffer + } else if CMSampleBufferGetPresentationTimeStamp(frame.sampleBuffer).seconds > nextRequestedFrame { + break findFrame + } else { + lastFrame = frame.sampleBuffer + } + default: + break findFrame + } + } + if let lastFrame { + if let imageBuffer = CMSampleBufferGetImageBuffer(lastFrame) { + var cgImage: CGImage? + VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &cgImage) + if let cgImage { + self.framePipe.putNext(.image(UIImage(cgImage: cgImage))) + } + } + } + } + + self.updateFrameRequest() + return + } + } + + let initializationPart = playlist.initializationPart + guard let part = playlist.parts.first(where: { $0.timestamp <= Int(nextRequestedFrame) && ($0.timestamp + $0.duration) > Int(nextRequestedFrame) }) else { + return + } + guard let dataFileSize = self.mainDataFile.media.size else { + return + } + + let resource = self.mainDataFile.media.resource + let postbox = self.postbox + let userLocation = self.userLocation + let userContentType = self.userContentType + let dataFile = self.mainDataFile + + let partRange: Range = Int64(part.range.lowerBound) ..< Int64(part.range.upperBound) + + let mappedRanges: [Range] = [ + Int64(initializationPart.range.lowerBound) ..< Int64(initializationPart.range.upperBound), + partRange + ] + var mappedSize: Int64 = 0 + for range in mappedRanges { + mappedSize += range.upperBound - range.lowerBound + } + + let queue = self.queue + let updateState: (FFMpegLookahead.State) -> Void = { [weak self] state in + queue.async { + guard let self else { + return + } + if self.pendingFrame?.0 != part.timestamp { + return + } + guard let video = state.video else { + return + } + + if let directReader = FFMpegFileReader( + source: .resource(mediaBox: postbox.mediaBox, resource: resource, resourceSize: dataFileSize, mappedRanges: mappedRanges), + useHardwareAcceleration: false, + selectedStream: .index(video.info.index), + seek: .stream(streamIndex: state.seek.streamIndex, pts: state.seek.pts), + maxReadablePts: (video.info.index, video.readableToTime.value, state.isEnded) + ) { + switch directReader.readFrame() { + case let .frame(frame): + if let imageBuffer = CMSampleBufferGetImageBuffer(frame.sampleBuffer) { + var cgImage: CGImage? + VTCreateCGImageFromCVPixelBuffer(imageBuffer, options: nil, imageOut: &cgImage) + if let cgImage { + self.framePipe.putNext(.image(UIImage(cgImage: cgImage))) + } + } + default: + break + } + } + + self.pendingFrame = nil + self.updateFrameRequest() + } + } + + let lookahead = FFMpegLookahead( + seekToTimestamp: 0.0, + lookaheadDuration: 0.0, + updateState: updateState, + fetchInRange: { fetchRange in + let disposable = DisposableSet() + + let readCount = fetchRange.upperBound - fetchRange.lowerBound + var readingPosition = fetchRange.lowerBound + + var bufferOffset = 0 + let doRead: (Range) -> Void = { range in + disposable.add(fetchedMediaResource( + mediaBox: postbox.mediaBox, + userLocation: userLocation, + userContentType: userContentType, + reference: dataFile.resourceReference(dataFile.media.resource), + range: (range, .elevated), + statsCategory: .video, + preferBackgroundReferenceRevalidation: false + ).startStrict()) + let count = Int(range.upperBound - range.lowerBound) + bufferOffset += count + readingPosition += Int64(count) + } + + var mappedRangePosition: Int64 = 0 + for mappedRange in mappedRanges { + let bytesToRead = readCount - Int64(bufferOffset) + if bytesToRead <= 0 { + break + } + + let mappedRangeSize = mappedRange.upperBound - mappedRange.lowerBound + let mappedRangeReadingPosition = readingPosition - mappedRangePosition + + if mappedRangeReadingPosition >= 0 && mappedRangeReadingPosition < mappedRangeSize { + let mappedRangeAvailableBytesToRead = mappedRangeSize - mappedRangeReadingPosition + let mappedRangeBytesToRead = min(bytesToRead, mappedRangeAvailableBytesToRead) + if mappedRangeBytesToRead > 0 { + let mappedReadRange = (mappedRange.lowerBound + mappedRangeReadingPosition) ..< (mappedRange.lowerBound + mappedRangeReadingPosition + mappedRangeBytesToRead) + doRead(mappedReadRange) + } + } + + mappedRangePosition += mappedRangeSize + } + + return disposable + }, + getDataInRange: { getRange, completion in + var signals: [Signal<(Data, Bool), NoError>] = [] + + let readCount = getRange.upperBound - getRange.lowerBound + var readingPosition = getRange.lowerBound + + var bufferOffset = 0 + let doRead: (Range) -> Void = { range in + signals.append(postbox.mediaBox.resourceData(resource, size: dataFileSize, in: range, mode: .complete)) + + let readSize = Int(range.upperBound - range.lowerBound) + let effectiveReadSize = max(0, min(Int(readCount) - bufferOffset, readSize)) + let count = effectiveReadSize + bufferOffset += count + readingPosition += Int64(count) + } + + var mappedRangePosition: Int64 = 0 + for mappedRange in mappedRanges { + let bytesToRead = readCount - Int64(bufferOffset) + if bytesToRead <= 0 { + break + } + + let mappedRangeSize = mappedRange.upperBound - mappedRange.lowerBound + let mappedRangeReadingPosition = readingPosition - mappedRangePosition + + if mappedRangeReadingPosition >= 0 && mappedRangeReadingPosition < mappedRangeSize { + let mappedRangeAvailableBytesToRead = mappedRangeSize - mappedRangeReadingPosition + let mappedRangeBytesToRead = min(bytesToRead, mappedRangeAvailableBytesToRead) + if mappedRangeBytesToRead > 0 { + let mappedReadRange = (mappedRange.lowerBound + mappedRangeReadingPosition) ..< (mappedRange.lowerBound + mappedRangeReadingPosition + mappedRangeBytesToRead) + doRead(mappedReadRange) + } + } + + mappedRangePosition += mappedRangeSize + } + + let singal = combineLatest(signals) + |> map { results -> Data? in + var result = Data() + for (partData, partIsComplete) in results { + if !partIsComplete { + return nil + } + result.append(partData) + } + return result + } + + return singal.start(next: { result in + completion(result) + }) + }, + isDataCachedInRange: { cachedRange in + let readCount = cachedRange.upperBound - cachedRange.lowerBound + var readingPosition = cachedRange.lowerBound + + var allDataIsCached = true + + var bufferOffset = 0 + let doRead: (Range) -> Void = { range in + let isCached = postbox.mediaBox.internal_resourceDataIsCached( + id: resource.id, + size: dataFileSize, + in: range + ) + if !isCached { + allDataIsCached = false + } + + let effectiveReadSize = Int(range.upperBound - range.lowerBound) + let count = effectiveReadSize + bufferOffset += count + readingPosition += Int64(count) + } + + var mappedRangePosition: Int64 = 0 + for mappedRange in mappedRanges { + let bytesToRead = readCount - Int64(bufferOffset) + if bytesToRead <= 0 { + break + } + + let mappedRangeSize = mappedRange.upperBound - mappedRange.lowerBound + let mappedRangeReadingPosition = readingPosition - mappedRangePosition + + if mappedRangeReadingPosition >= 0 && mappedRangeReadingPosition < mappedRangeSize { + let mappedRangeAvailableBytesToRead = mappedRangeSize - mappedRangeReadingPosition + let mappedRangeBytesToRead = min(bytesToRead, mappedRangeAvailableBytesToRead) + if mappedRangeBytesToRead > 0 { + let mappedReadRange = (mappedRange.lowerBound + mappedRangeReadingPosition) ..< (mappedRange.lowerBound + mappedRangeReadingPosition + mappedRangeBytesToRead) + doRead(mappedReadRange) + } + } + + mappedRangePosition += mappedRangeSize + } + + return allDataIsCached + }, + size: mappedSize + ) + + self.pendingFrame = (part.timestamp, lookahead) + + lookahead.updateCurrentTimestamp(timestamp: 0.0) + } + } + + private let queue: Queue + private let impl: QueueLocalObject + + public var generatedFrames: Signal { + return Signal { subscriber in + let disposable = MetaDisposable() + self.impl.with { impl in + disposable.set(impl.framePipe.signal().start(next: { result in + subscriber.putNext(result) + })) + } + return disposable + } + } + + private let nextRequestedFrame = Atomic(value: nil) + + public init( + postbox: Postbox, + userLocation: MediaResourceUserLocation, + userContentType: MediaResourceUserContentType, + playlistFile: FileMediaReference, + mainDataFile: FileMediaReference, + alternativeQualities: [(playlist: FileMediaReference, dataFile: FileMediaReference)] + ) { + let queue = Queue() + self.queue = queue + let nextRequestedFrame = self.nextRequestedFrame + self.impl = QueueLocalObject(queue: queue, generate: { + return Impl( + queue: queue, + postbox: postbox, + userLocation: userLocation, + userContentType: userContentType, + playlistFile: playlistFile, + mainDataFile: mainDataFile, + alternativeQualities: alternativeQualities, + nextRequestedFrame: nextRequestedFrame + ) + }) + } + + public func generateFrame(at timestamp: Double) { + let _ = self.nextRequestedFrame.swap(timestamp) + self.impl.with { impl in + impl.generateFrame() + } + } + + public func cancelPendingFrames() { + self.impl.with { impl in + impl.cancelPendingFrames() + } + } +} + diff --git a/submodules/TelegramCallsUI/Sources/PresentationCall.swift b/submodules/TelegramCallsUI/Sources/PresentationCall.swift index 50b17933d5..8eea2892dd 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCall.swift @@ -918,8 +918,12 @@ public final class PresentationCallImpl: PresentationCall { self.audioSessionShouldBeActive.set(false) if wasActive { let debugLogValue = Promise() - self.ongoingContext?.stop(debugLogValue: debugLogValue) - let _ = self.conferenceCall?.leave(terminateIfPossible: false).start() + if let conferenceCall = self.conferenceCall { + debugLogValue.set(conferenceCall.debugLog.get()) + let _ = conferenceCall.leave(terminateIfPossible: false).start() + } else { + self.ongoingContext?.stop(debugLogValue: debugLogValue) + } } } var terminating = false @@ -1198,8 +1202,12 @@ public final class PresentationCallImpl: PresentationCall { public func hangUp() -> Signal { let debugLogValue = Promise() self.callSessionManager.drop(internalId: self.internalId, reason: .hangUp, debugLog: debugLogValue.get()) - self.ongoingContext?.stop(debugLogValue: debugLogValue) - let _ = self.conferenceCall?.leave(terminateIfPossible: false).start() + if let conferenceCall = self.conferenceCall { + debugLogValue.set(conferenceCall.debugLog.get()) + let _ = conferenceCall.leave(terminateIfPossible: false).start() + } else { + self.ongoingContext?.stop(debugLogValue: debugLogValue) + } return self.hungUpPromise.get() } @@ -1207,8 +1215,12 @@ public final class PresentationCallImpl: PresentationCall { public func rejectBusy() { self.callSessionManager.drop(internalId: self.internalId, reason: .busy, debugLog: .single(nil)) let debugLog = Promise() - self.ongoingContext?.stop(debugLogValue: debugLog) - let _ = self.conferenceCall?.leave(terminateIfPossible: false).start() + if let conferenceCall = self.conferenceCall { + debugLog.set(conferenceCall.debugLog.get()) + let _ = conferenceCall.leave(terminateIfPossible: false).start() + } else { + self.ongoingContext?.stop(debugLogValue: debugLog) + } } public func toggleIsMuted() { @@ -1262,7 +1274,11 @@ public final class PresentationCallImpl: PresentationCall { guard let screencastCapturer = screencastCapturer else { return } - screencastCapturer.injectPixelBuffer(screencastFrame.0, rotation: screencastFrame.1) + guard let sampleBuffer = sampleBufferFromPixelBuffer(pixelBuffer: screencastFrame.0) else { + return + } + + screencastCapturer.injectSampleBuffer(sampleBuffer, rotation: screencastFrame.1, completion: {}) })) self.screencastAudioDataDisposable.set((screencastBufferServerContext.audioData |> deliverOnMainQueue).start(next: { [weak self] data in @@ -1467,3 +1483,36 @@ public final class PresentationCallImpl: PresentationCall { self.videoCapturer?.switchVideoInput(isFront: self.useFrontCamera) } } + +func sampleBufferFromPixelBuffer(pixelBuffer: CVPixelBuffer) -> CMSampleBuffer? { + var maybeFormat: CMVideoFormatDescription? + let status = CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescriptionOut: &maybeFormat) + if status != noErr { + return nil + } + guard let format = maybeFormat else { + return nil + } + + var timingInfo = CMSampleTimingInfo( + duration: CMTimeMake(value: 1, timescale: 30), + presentationTimeStamp: CMTimeMake(value: 0, timescale: 30), + decodeTimeStamp: CMTimeMake(value: 0, timescale: 30) + ) + + var maybeSampleBuffer: CMSampleBuffer? + let bufferStatus = CMSampleBufferCreateReadyWithImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescription: format, sampleTiming: &timingInfo, sampleBufferOut: &maybeSampleBuffer) + + if (bufferStatus != noErr) { + return nil + } + guard let sampleBuffer = maybeSampleBuffer else { + return nil + } + + let attachments: NSArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true)! as NSArray + let dict: NSMutableDictionary = attachments[0] as! NSMutableDictionary + dict[kCMSampleAttachmentKey_DisplayImmediately as NSString] = true as NSNumber + + return sampleBuffer +} diff --git a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift index de0c83eb2b..4ef10924e5 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift @@ -321,12 +321,12 @@ private extension CurrentImpl { } } - func stop(account: Account, reportCallId: CallId?) { + func stop(account: Account, reportCallId: CallId?, debugLog: Promise) { switch self { case let .call(callContext): - callContext.stop(account: account, reportCallId: reportCallId) + callContext.stop(account: account, reportCallId: reportCallId, debugLog: debugLog) case .mediaStream, .externalMediaStream: - break + debugLog.set(.single(nil)) } } @@ -466,6 +466,138 @@ public func allocateCallLogPath(account: Account) -> String { return "\(path)/\(name).log" } +private protocol ScreencastIPCContext: AnyObject { + var isActive: Signal { get } + + func requestScreencast() -> Signal<(String, UInt32), NoError>? + func setJoinResponse(clientParams: String) + func disableScreencast(account: Account) +} + +private final class ScreencastInProcessIPCContext: ScreencastIPCContext { + private let isConference: Bool + + private let screencastBufferServerContext: IpcGroupCallBufferAppContext + private var screencastCallContext: ScreencastContext? + private let screencastCapturer: OngoingCallVideoCapturer + private var screencastFramesDisposable: Disposable? + private var screencastAudioDataDisposable: Disposable? + + var isActive: Signal { + return self.screencastBufferServerContext.isActive + } + + init(basePath: String, isConference: Bool) { + self.isConference = isConference + + let screencastBufferServerContext = IpcGroupCallBufferAppContext(basePath: basePath + "/broadcast-coordination") + self.screencastBufferServerContext = screencastBufferServerContext + let screencastCapturer = OngoingCallVideoCapturer(isCustom: true) + self.screencastCapturer = screencastCapturer + self.screencastFramesDisposable = (screencastBufferServerContext.frames + |> deliverOnMainQueue).start(next: { [weak screencastCapturer] screencastFrame in + guard let screencastCapturer = screencastCapturer else { + return + } + guard let sampleBuffer = sampleBufferFromPixelBuffer(pixelBuffer: screencastFrame.0) else { + return + } + screencastCapturer.injectSampleBuffer(sampleBuffer, rotation: screencastFrame.1, completion: {}) + }) + self.screencastAudioDataDisposable = (screencastBufferServerContext.audioData + |> deliverOnMainQueue).start(next: { [weak self] data in + Queue.mainQueue().async { + guard let self else { + return + } + self.screencastCallContext?.addExternalAudioData(data: data) + } + }) + } + + deinit { + self.screencastFramesDisposable?.dispose() + self.screencastAudioDataDisposable?.dispose() + } + + func requestScreencast() -> Signal<(String, UInt32), NoError>? { + if self.screencastCallContext == nil { + let screencastCallContext = InProcessScreencastContext( + context: OngoingGroupCallContext( + audioSessionActive: .single(true), + video: self.screencastCapturer, + requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, + rejoinNeeded: { }, + outgoingAudioBitrateKbit: nil, + videoContentType: .screencast, + enableNoiseSuppression: false, + disableAudioInput: true, + enableSystemMute: false, + preferX264: false, + logPath: "", + onMutedSpeechActivityDetected: { _ in }, + encryptionKey: nil, + isConference: self.isConference, + sharedAudioDevice: nil + ) + ) + self.screencastCallContext = screencastCallContext + return screencastCallContext.joinPayload + } else { + return nil + } + } + + func setJoinResponse(clientParams: String) { + if let screencastCallContext = self.screencastCallContext { + screencastCallContext.setRTCJoinResponse(clientParams: clientParams) + } + } + + func disableScreencast(account: Account) { + if let screencastCallContext = self.screencastCallContext { + self.screencastCallContext = nil + screencastCallContext.stop(account: account, reportCallId: nil) + + self.screencastBufferServerContext.stopScreencast() + } + } +} + +private final class ScreencastEmbeddedIPCContext: ScreencastIPCContext { + private let serverContext: IpcGroupCallEmbeddedAppContext + + var isActive: Signal { + return self.serverContext.isActive + } + + init(basePath: String) { + self.serverContext = IpcGroupCallEmbeddedAppContext(basePath: basePath + "/embedded-broadcast-coordination") + } + + func requestScreencast() -> Signal<(String, UInt32), NoError>? { + if let id = self.serverContext.startScreencast() { + return self.serverContext.joinPayload + |> filter { joinPayload -> Bool in + return joinPayload.id == id + } + |> map { joinPayload -> (String, UInt32) in + return (joinPayload.data, joinPayload.ssrc) + } + } else { + return nil + } + } + + func setJoinResponse(clientParams: String) { + self.serverContext.joinResponse = IpcGroupCallEmbeddedAppContext.JoinResponse(data: clientParams) + } + + func disableScreencast(account: Account) { + self.serverContext.stopScreencast() + } +} + public final class PresentationGroupCallImpl: PresentationGroupCall { private enum InternalState { case requesting @@ -629,9 +761,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { let externalMediaStream = Promise() - private var screencastCallContext: OngoingGroupCallContext? - private var screencastBufferServerContext: IpcGroupCallBufferAppContext? - private var screencastCapturer: OngoingCallVideoCapturer? + private var screencastIPCContext: ScreencastIPCContext? private struct SsrcMapping { var peerId: EnginePeer.Id @@ -860,8 +990,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { return self.isSpeakingPromise.get() } - private var screencastFramesDisposable: Disposable? - private var screencastAudioDataDisposable: Disposable? private var screencastStateDisposable: Disposable? public let isStream: Bool @@ -876,6 +1004,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { public var onMutedSpeechActivityDetected: ((Bool) -> Void)? + let debugLog = Promise() + init( accountContext: AccountContext, audioSession: ManagedAudioSession, @@ -1149,26 +1279,24 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { self.requestCall(movingFromBroadcastToRtc: false) } - let basePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination" - let screencastBufferServerContext = IpcGroupCallBufferAppContext(basePath: basePath) - self.screencastBufferServerContext = screencastBufferServerContext - let screencastCapturer = OngoingCallVideoCapturer(isCustom: true) - self.screencastCapturer = screencastCapturer - self.screencastFramesDisposable = (screencastBufferServerContext.frames - |> deliverOnMainQueue).start(next: { [weak screencastCapturer] screencastFrame in - guard let screencastCapturer = screencastCapturer else { - return - } - screencastCapturer.injectPixelBuffer(screencastFrame.0, rotation: screencastFrame.1) - }) - self.screencastAudioDataDisposable = (screencastBufferServerContext.audioData - |> deliverOnMainQueue).start(next: { [weak self] data in - guard let strongSelf = self else { - return - } - strongSelf.screencastCallContext?.addExternalAudioData(data: data) - }) - self.screencastStateDisposable = (screencastBufferServerContext.isActive + var useIPCContext = "".isEmpty + if let data = self.accountContext.currentAppConfiguration.with({ $0 }).data, data["ios_killswitch_use_inprocess_screencast"] != nil { + useIPCContext = false + } + + let embeddedBroadcastImplementationTypePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination-type" + + let screencastIPCContext: ScreencastIPCContext + if useIPCContext { + screencastIPCContext = ScreencastEmbeddedIPCContext(basePath: self.accountContext.sharedContext.basePath) + let _ = try? "ipc".write(toFile: embeddedBroadcastImplementationTypePath, atomically: true, encoding: .utf8) + } else { + screencastIPCContext = ScreencastInProcessIPCContext(basePath: self.accountContext.sharedContext.basePath, isConference: self.isConference) + let _ = try? "legacy".write(toFile: embeddedBroadcastImplementationTypePath, atomically: true, encoding: .utf8) + } + self.screencastIPCContext = screencastIPCContext + + self.screencastStateDisposable = (screencastIPCContext.isActive |> distinctUntilChanged |> deliverOnMainQueue).start(next: { [weak self] isActive in guard let strongSelf = self else { @@ -1228,8 +1356,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { self.peerUpdatesSubscription?.dispose() - self.screencastFramesDisposable?.dispose() - self.screencastAudioDataDisposable?.dispose() self.screencastStateDisposable?.dispose() self.internal_isRemoteConnectedDisposable?.dispose() @@ -2658,10 +2784,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } self.markedAsCanBeRemoved = true - self.genericCallContext?.stop(account: self.account, reportCallId: self.conferenceFromCallId) - - //self.screencastIpcContext = nil - self.screencastCallContext?.stop(account: self.account, reportCallId: nil) + self.genericCallContext?.stop(account: self.account, reportCallId: self.conferenceFromCallId, debugLog: self.debugLog) + self.screencastIPCContext?.disableScreencast(account: self.account) self._canBeRemoved.set(.single(true)) @@ -3106,59 +3230,50 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } private func requestScreencast() { - guard let callInfo = self.internalState.callInfo, self.screencastCallContext == nil else { + guard let callInfo = self.internalState.callInfo else { return } self.hasScreencast = true - - let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, enableSystemMute: false, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in }, encryptionKey: nil, isConference: self.isConference, sharedAudioDevice: nil) - self.screencastCallContext = screencastCallContext - - self.screencastJoinDisposable.set((screencastCallContext.joinPayload - |> take(1) - |> deliverOnMainQueue).start(next: { [weak self] joinPayload in - guard let strongSelf = self else { - return - } - - strongSelf.requestDisposable.set((strongSelf.accountContext.engine.calls.joinGroupCallAsScreencast( - callId: callInfo.id, - accessHash: callInfo.accessHash, - joinPayload: joinPayload.0 - ) - |> deliverOnMainQueue).start(next: { joinCallResult in - guard let strongSelf = self, let screencastCallContext = strongSelf.screencastCallContext else { + if let screencastIPCContext = self.screencastIPCContext, let joinPayload = screencastIPCContext.requestScreencast() { + self.screencastJoinDisposable.set((joinPayload + |> take(1) + |> deliverOnMainQueue).start(next: { [weak self] joinPayload in + guard let strongSelf = self else { return } - let clientParams = joinCallResult.jsonParams - screencastCallContext.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false, isUnifiedBroadcast: false) - screencastCallContext.setJoinResponse(payload: clientParams) - }, error: { error in - guard let _ = self else { - return - } + strongSelf.requestDisposable.set((strongSelf.accountContext.engine.calls.joinGroupCallAsScreencast( + callId: callInfo.id, + accessHash: callInfo.accessHash, + joinPayload: joinPayload.0 + ) + |> deliverOnMainQueue).start(next: { joinCallResult in + guard let strongSelf = self, let screencastIPCContext = strongSelf.screencastIPCContext else { + return + } + screencastIPCContext.setJoinResponse(clientParams: joinCallResult.jsonParams) + + }, error: { error in + guard let _ = self else { + return + } + })) })) - })) + } } public func disableScreencast() { self.hasScreencast = false - if let screencastCallContext = self.screencastCallContext { - self.screencastCallContext = nil - screencastCallContext.stop(account: self.account, reportCallId: nil) + self.screencastIPCContext?.disableScreencast(account: self.account) + + let maybeCallInfo: GroupCallInfo? = self.internalState.callInfo - let maybeCallInfo: GroupCallInfo? = self.internalState.callInfo - - if let callInfo = maybeCallInfo { - self.screencastJoinDisposable.set(self.accountContext.engine.calls.leaveGroupCallAsScreencast( - callId: callInfo.id, - accessHash: callInfo.accessHash - ).start()) - } - - self.screencastBufferServerContext?.stopScreencast() + if let callInfo = maybeCallInfo { + self.screencastJoinDisposable.set(self.accountContext.engine.calls.leaveGroupCallAsScreencast( + callId: callInfo.id, + accessHash: callInfo.accessHash + ).start()) } } @@ -3608,3 +3723,34 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { |> runOn(.mainQueue()) } } + +private protocol ScreencastContext: AnyObject { + func addExternalAudioData(data: Data) + func stop(account: Account, reportCallId: CallId?) + func setRTCJoinResponse(clientParams: String) +} + +private final class InProcessScreencastContext: ScreencastContext { + private let context: OngoingGroupCallContext + + var joinPayload: Signal<(String, UInt32), NoError> { + return self.context.joinPayload + } + + init(context: OngoingGroupCallContext) { + self.context = context + } + + func addExternalAudioData(data: Data) { + self.context.addExternalAudioData(data: data) + } + + func stop(account: Account, reportCallId: CallId?) { + self.context.stop(account: account, reportCallId: reportCallId, debugLog: Promise()) + } + + func setRTCJoinResponse(clientParams: String) { + self.context.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false, isUnifiedBroadcast: false) + self.context.setJoinResponse(payload: clientParams) + } +} diff --git a/submodules/TelegramCallsUI/Sources/SampleBufferVideoRenderingView.swift b/submodules/TelegramCallsUI/Sources/SampleBufferVideoRenderingView.swift index 6ac153e02e..78e2253b13 100644 --- a/submodules/TelegramCallsUI/Sources/SampleBufferVideoRenderingView.swift +++ b/submodules/TelegramCallsUI/Sources/SampleBufferVideoRenderingView.swift @@ -8,39 +8,6 @@ import TelegramVoip import AVFoundation import LibYuvBinding -private func sampleBufferFromPixelBuffer(pixelBuffer: CVPixelBuffer) -> CMSampleBuffer? { - var maybeFormat: CMVideoFormatDescription? - let status = CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescriptionOut: &maybeFormat) - if status != noErr { - return nil - } - guard let format = maybeFormat else { - return nil - } - - var timingInfo = CMSampleTimingInfo( - duration: CMTimeMake(value: 1, timescale: 30), - presentationTimeStamp: CMTimeMake(value: 0, timescale: 30), - decodeTimeStamp: CMTimeMake(value: 0, timescale: 30) - ) - - var maybeSampleBuffer: CMSampleBuffer? - let bufferStatus = CMSampleBufferCreateReadyWithImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer, formatDescription: format, sampleTiming: &timingInfo, sampleBufferOut: &maybeSampleBuffer) - - if (bufferStatus != noErr) { - return nil - } - guard let sampleBuffer = maybeSampleBuffer else { - return nil - } - - let attachments: NSArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true)! as NSArray - let dict: NSMutableDictionary = attachments[0] as! NSMutableDictionary - dict[kCMSampleAttachmentKey_DisplayImmediately as NSString] = true as NSNumber - - return sampleBuffer -} - private func copyI420BufferToNV12Buffer(buffer: OngoingGroupCallContext.VideoFrameData.I420Buffer, pixelBuffer: CVPixelBuffer) -> Bool { guard CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange else { return false diff --git a/submodules/TelegramUI/Components/AvatarUploadToastScreen/BUILD b/submodules/TelegramUI/Components/AvatarUploadToastScreen/BUILD new file mode 100644 index 0000000000..a1ebb83491 --- /dev/null +++ b/submodules/TelegramUI/Components/AvatarUploadToastScreen/BUILD @@ -0,0 +1,30 @@ +load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") + +swift_library( + name = "AvatarUploadToastScreen", + module_name = "AvatarUploadToastScreen", + srcs = glob([ + "Sources/**/*.swift", + ]), + copts = [ + "-warnings-as-errors", + ], + deps = [ + "//submodules/Display", + "//submodules/TelegramPresentationData", + "//submodules/ComponentFlow", + "//submodules/Components/ComponentDisplayAdapters", + "//submodules/Postbox", + "//submodules/TelegramCore", + "//submodules/SSignalKit/SwiftSignalKit", + "//submodules/Components/ViewControllerComponent", + "//submodules/Components/MultilineTextComponent", + "//submodules/AccountContext", + "//submodules/RadialStatusNode", + "//submodules/TelegramUI/Components/AnimatedTextComponent", + "//submodules/TelegramUI/Components/PlainButtonComponent", + ], + visibility = [ + "//visibility:public", + ], +) diff --git a/submodules/TelegramUI/Components/AvatarUploadToastScreen/Sources/AvatarUploadToastScreen.swift b/submodules/TelegramUI/Components/AvatarUploadToastScreen/Sources/AvatarUploadToastScreen.swift new file mode 100644 index 0000000000..6adb8a69c4 --- /dev/null +++ b/submodules/TelegramUI/Components/AvatarUploadToastScreen/Sources/AvatarUploadToastScreen.swift @@ -0,0 +1,466 @@ +import Foundation +import UIKit +import Display +import TelegramPresentationData +import ComponentFlow +import ComponentDisplayAdapters +import AppBundle +import ViewControllerComponent +import AccountContext +import MultilineTextComponent +import RadialStatusNode +import SwiftSignalKit +import AnimatedTextComponent +import PlainButtonComponent + +private final class AvatarUploadToastScreenComponent: Component { + let context: AccountContext + let image: UIImage + let uploadStatus: Signal + let arrowTarget: () -> (UIView, CGRect)? + let viewUploadedAvatar: () -> Void + + init(context: AccountContext, image: UIImage, uploadStatus: Signal, arrowTarget: @escaping () -> (UIView, CGRect)?, viewUploadedAvatar: @escaping () -> Void) { + self.context = context + self.image = image + self.uploadStatus = uploadStatus + self.arrowTarget = arrowTarget + self.viewUploadedAvatar = viewUploadedAvatar + } + + static func ==(lhs: AvatarUploadToastScreenComponent, rhs: AvatarUploadToastScreenComponent) -> Bool { + return true + } + + final class View: UIView { + private let contentView: UIView + private let backgroundView: BlurredBackgroundView + + private let backgroundMaskView: UIView + private let backgroundMainMaskView: UIView + private let backgroundArrowMaskView: UIImageView + + private let avatarView: UIImageView + private let progressNode: RadialStatusNode + private let content = ComponentView() + private let actionButton = ComponentView() + + private var isUpdating: Bool = false + private var component: AvatarUploadToastScreenComponent? + private var environment: EnvironmentType? + private weak var state: EmptyComponentState? + + private var status: PeerInfoAvatarUploadStatus = .progress(0.0) + private var statusDisposable: Disposable? + + private var doneTimer: Foundation.Timer? + private var currentIsDone: Bool = false + + private var isDisplaying: Bool = false + + var targetAvatarView: UIView? { + return self.avatarView + } + + override init(frame: CGRect) { + self.contentView = UIView() + + self.backgroundView = BlurredBackgroundView(color: .clear, enableBlur: true) + + self.backgroundMaskView = UIView() + + self.backgroundMainMaskView = UIView() + self.backgroundMainMaskView.backgroundColor = .white + + self.backgroundArrowMaskView = UIImageView() + + self.avatarView = UIImageView() + self.progressNode = RadialStatusNode(backgroundNodeColor: .clear) + + super.init(frame: frame) + + self.backgroundView.mask = self.backgroundMaskView + self.backgroundMaskView.addSubview(self.backgroundMainMaskView) + self.backgroundMaskView.addSubview(self.backgroundArrowMaskView) + self.addSubview(self.backgroundView) + + self.addSubview(self.contentView) + self.contentView.addSubview(self.avatarView) + self.contentView.addSubview(self.progressNode.view) + } + + required init?(coder: NSCoder) { + preconditionFailure() + } + + deinit { + self.statusDisposable?.dispose() + self.doneTimer?.invalidate() + } + + override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { + if !self.contentView.frame.contains(point) { + return nil + } + return super.hitTest(point, with: event) + } + + func animateIn() { + func generateParabollicMotionKeyframes(from sourcePoint: CGFloat, elevation: CGFloat) -> [CGFloat] { + let midPoint = sourcePoint - elevation + + let y1 = sourcePoint + let y2 = midPoint + let y3 = sourcePoint + + let x1 = 0.0 + let x2 = 100.0 + let x3 = 200.0 + + var keyframes: [CGFloat] = [] + let a = (x3 * (y2 - y1) + x2 * (y1 - y3) + x1 * (y3 - y2)) / ((x1 - x2) * (x1 - x3) * (x2 - x3)) + let b = (x1 * x1 * (y2 - y3) + x3 * x3 * (y1 - y2) + x2 * x2 * (y3 - y1)) / ((x1 - x2) * (x1 - x3) * (x2 - x3)) + let c = (x2 * x2 * (x3 * y1 - x1 * y3) + x2 * (x1 * x1 * y3 - x3 * x3 * y1) + x1 * x3 * (x3 - x1) * y2) / ((x1 - x2) * (x1 - x3) * (x2 - x3)) + + for i in 0 ..< 10 { + let k = listViewAnimationCurveSystem(CGFloat(i) / CGFloat(10 - 1)) + let x = x3 * k + let y = a * x * x + b * x + c + + keyframes.append(y) + } + + return keyframes + } + let offsetValues = generateParabollicMotionKeyframes(from: 0.0, elevation: -10.0) + self.layer.animateKeyframes(values: offsetValues.map { $0 as NSNumber }, duration: 0.5, keyPath: "position.y", additive: true) + + self.contentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + self.isDisplaying = true + if !self.isUpdating { + self.state?.updated(transition: .spring(duration: 0.5)) + } + } + + func animateOut(completion: @escaping () -> Void) { + self.backgroundView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in + }) + self.contentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false, completion: { _ in + completion() + }) + } + + func update(component: AvatarUploadToastScreenComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + self.isUpdating = true + defer { + self.isUpdating = false + } + + let environment = environment[ViewControllerComponentContainer.Environment.self].value + + if self.component == nil { + self.statusDisposable = (component.uploadStatus + |> deliverOnMainQueue).startStrict(next: { [weak self] status in + guard let self else { + return + } + self.status = status + if !self.isUpdating { + self.state?.updated(transition: .spring(duration: 0.4)) + } + + if case .done = status, self.doneTimer == nil { + self.doneTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 4.0, repeats: false, block: { [weak self] _ in + guard let self else { + return + } + self.environment?.controller()?.dismiss() + }) + } + }) + } + + self.component = component + self.environment = environment + self.state = state + + var isDone = false + let effectiveProgress: CGFloat + switch self.status { + case let .progress(value): + effectiveProgress = CGFloat(value) + case .done: + isDone = true + effectiveProgress = 1.0 + } + let previousIsDone = self.currentIsDone + self.currentIsDone = isDone + + let contentInsets = UIEdgeInsets(top: 10.0, left: 12.0, bottom: 10.0, right: 10.0) + + let tabBarHeight: CGFloat + if !environment.safeInsets.left.isZero { + tabBarHeight = 34.0 + environment.safeInsets.bottom + } else { + tabBarHeight = 49.0 + environment.safeInsets.bottom + } + let containerInsets = UIEdgeInsets( + top: environment.safeInsets.top, + left: environment.safeInsets.left + 12.0, + bottom: tabBarHeight + 3.0, + right: environment.safeInsets.right + 12.0 + ) + + let availableContentSize = CGSize(width: availableSize.width - containerInsets.left - containerInsets.right, height: availableSize.height - containerInsets.top - containerInsets.bottom) + + let spacing: CGFloat = 12.0 + + let iconSize = CGSize(width: 30.0, height: 30.0) + let iconProgressInset: CGFloat = 3.0 + + var textItems: [AnimatedTextComponent.Item] = [] + textItems.append(AnimatedTextComponent.Item(id: AnyHashable(0), isUnbreakable: true, content: .text("Your photo is "))) + if isDone { + textItems.append(AnimatedTextComponent.Item(id: AnyHashable(1), isUnbreakable: true, content: .text("now set."))) + } else { + textItems.append(AnimatedTextComponent.Item(id: AnyHashable(1), isUnbreakable: true, content: .text("uploading."))) + } + + let actionButtonSize = self.actionButton.update( + transition: .immediate, + component: AnyComponent(PlainButtonComponent( + content: AnyComponent(MultilineTextComponent( + text: .plain(NSAttributedString(string: "View", font: Font.regular(17.0), textColor: environment.theme.list.itemAccentColor.withMultiplied(hue: 0.933, saturation: 0.61, brightness: 1.0))) + )), + effectAlignment: .center, + contentInsets: UIEdgeInsets(top: -8.0, left: -8.0, bottom: -8.0, right: -8.0), + action: { [weak self] in + guard let self, let component = self.component else { + return + } + self.doneTimer?.invalidate() + self.environment?.controller()?.dismiss() + component.viewUploadedAvatar() + }, + animateAlpha: true, + animateScale: false, + animateContents: false + )), + environment: {}, + containerSize: CGSize(width: availableContentSize.width - contentInsets.left - contentInsets.right - spacing - iconSize.width, height: availableContentSize.height) + ) + + //TODO:localize + let contentSize = self.content.update( + transition: transition, + component: AnyComponent(AnimatedTextComponent( + font: Font.regular(14.0), + color: .white, + items: textItems + )), + environment: {}, + containerSize: CGSize(width: availableContentSize.width - contentInsets.left - contentInsets.right - spacing - iconSize.width - actionButtonSize.width - 16.0 - 4.0, height: availableContentSize.height) + ) + + var contentHeight: CGFloat = 0.0 + contentHeight += contentInsets.top + contentInsets.bottom + max(iconSize.height, contentSize.height) + + if self.avatarView.image == nil { + self.avatarView.image = generateImage(iconSize, rotatedContext: { size, context in + UIGraphicsPushContext(context) + defer { + UIGraphicsPopContext() + } + + context.clear(CGRect(origin: CGPoint(), size: size)) + + context.addEllipse(in: CGRect(origin: CGPoint(), size: size)) + context.clip() + + component.image.draw(in: CGRect(origin: CGPoint(), size: size)) + }) + } + + let avatarFrame = CGRect(origin: CGPoint(x: contentInsets.left, y: floor((contentHeight - iconSize.height) * 0.5)), size: iconSize) + + var adjustedAvatarFrame = avatarFrame + if !isDone { + adjustedAvatarFrame = adjustedAvatarFrame.insetBy(dx: iconProgressInset, dy: iconProgressInset) + } + transition.setPosition(view: self.avatarView, position: adjustedAvatarFrame.center) + transition.setBounds(view: self.avatarView, bounds: CGRect(origin: CGPoint(), size: adjustedAvatarFrame.size)) + if isDone && !previousIsDone { + let topScale: CGFloat = 1.1 + self.avatarView.layer.animateScale(from: 1.0, to: topScale, duration: 0.16, removeOnCompletion: false, completion: { [weak self] _ in + guard let self else { + return + } + self.avatarView.layer.animateScale(from: topScale, to: 1.0, duration: 0.16) + }) + self.progressNode.layer.animateScale(from: 1.0, to: topScale, duration: 0.16, removeOnCompletion: false, completion: { [weak self] _ in + guard let self else { + return + } + self.progressNode.layer.animateScale(from: topScale, to: 1.0, duration: 0.16) + }) + HapticFeedback().success() + } + + self.progressNode.frame = avatarFrame + self.progressNode.transitionToState(.progress(color: .white, lineWidth: 1.0 + UIScreenPixel, value: effectiveProgress, cancelEnabled: false, animateRotation: true)) + transition.setAlpha(view: self.progressNode.view, alpha: isDone ? 0.0 : 1.0) + + if let contentView = self.content.view { + if contentView.superview == nil { + self.contentView.addSubview(contentView) + } + transition.setFrame(view: contentView, frame: CGRect(origin: CGPoint(x: contentInsets.left + iconSize.width + spacing, y: floor((contentHeight - contentSize.height) * 0.5)), size: contentSize)) + } + + if let actionButtonView = self.actionButton.view { + if actionButtonView.superview == nil { + self.contentView.addSubview(actionButtonView) + } + transition.setFrame(view: actionButtonView, frame: CGRect(origin: CGPoint(x: availableContentSize.width - contentInsets.right - 16.0 - actionButtonSize.width, y: floor((contentHeight - actionButtonSize.height) * 0.5)), size: actionButtonSize)) + transition.setAlpha(view: actionButtonView, alpha: isDone ? 1.0 : 0.0) + } + + let size = CGSize(width: availableContentSize.width, height: contentHeight) + + let contentFrame = CGRect(origin: CGPoint(x: containerInsets.left, y: availableSize.height - containerInsets.bottom - size.height), size: size) + + self.backgroundView.updateColor(color: self.isDisplaying ? UIColor(white: 0.0, alpha: 0.7) : UIColor.black, transition: transition.containedViewLayoutTransition) + let backgroundFrame: CGRect + if self.isDisplaying { + backgroundFrame = contentFrame + } else { + backgroundFrame = CGRect(origin: CGPoint(), size: availableSize) + } + if self.backgroundView.bounds.size != contentFrame.size { + self.backgroundView.update(size: availableSize, cornerRadius: 0.0, transition: transition.containedViewLayoutTransition) + } + transition.setFrame(view: self.backgroundView, frame: CGRect(origin: CGPoint(), size: availableSize)) + transition.setFrame(view: self.backgroundMaskView, frame: CGRect(origin: CGPoint(), size: availableSize)) + + transition.setCornerRadius(layer: self.backgroundMainMaskView.layer, cornerRadius: self.isDisplaying ? 14.0 : 0.0) + transition.setFrame(view: self.backgroundMainMaskView, frame: backgroundFrame) + + if self.backgroundArrowMaskView.image == nil { + let arrowFactor: CGFloat = 0.75 + let arrowSize = CGSize(width: floor(29.0 * arrowFactor), height: floor(10.0 * arrowFactor)) + self.backgroundArrowMaskView.image = generateImage(arrowSize, rotatedContext: { size, context in + context.clear(CGRect(origin: CGPoint(), size: size)) + context.scaleBy(x: size.width / 29.0, y: size.height / 10.0) + context.setFillColor(UIColor.white.cgColor) + context.scaleBy(x: 0.333, y: 0.333) + let _ = try? drawSvgPath(context, path: "M85.882251,0 C79.5170552,0 73.4125613,2.52817247 68.9116882,7.02834833 L51.4264069,24.5109211 C46.7401154,29.1964866 39.1421356,29.1964866 34.4558441,24.5109211 L16.9705627,7.02834833 C12.4696897,2.52817247 6.36519576,0 0,0 L85.882251,0 ") + context.fillPath() + })?.withRenderingMode(.alwaysTemplate) + } + + if let arrowImage = self.backgroundArrowMaskView.image, let (targetView, targetRect) = component.arrowTarget() { + let targetArrowRect = targetView.convert(targetRect, to: self) + self.backgroundArrowMaskView.isHidden = false + + var arrowFrame = CGRect(origin: CGPoint(x: targetArrowRect.minX + floor((targetArrowRect.width - arrowImage.size.width) * 0.5), y: contentFrame.maxY), size: arrowImage.size) + if !self.isDisplaying { + arrowFrame = arrowFrame.offsetBy(dx: 0.0, dy: -10.0) + } + transition.setFrame(view: self.backgroundArrowMaskView, frame: arrowFrame) + } else { + self.backgroundArrowMaskView.isHidden = true + } + + transition.setFrame(view: self.contentView, frame: contentFrame) + + return availableSize + } + } + + func makeView() -> View { + return View(frame: CGRect()) + } + + func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment, transition: ComponentTransition) -> CGSize { + return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition) + } +} + +public class AvatarUploadToastScreen: ViewControllerComponentContainer { + public var targetAvatarView: UIView? { + if let view = self.node.hostView.componentView as? AvatarUploadToastScreenComponent.View { + return view.targetAvatarView + } + return nil + } + + private var processedDidAppear: Bool = false + private var processedDidDisappear: Bool = false + + public init( + context: AccountContext, + image: UIImage, + uploadStatus: Signal, + arrowTarget: @escaping () -> (UIView, CGRect)?, + viewUploadedAvatar: @escaping () -> Void + ) { + super.init( + context: context, + component: AvatarUploadToastScreenComponent( + context: context, + image: image, + uploadStatus: uploadStatus, + arrowTarget: arrowTarget, + viewUploadedAvatar: viewUploadedAvatar + ), + navigationBarAppearance: .none, + statusBarStyle: .ignore, + presentationMode: .default, + updatedPresentationData: nil + ) + self.navigationPresentation = .flatModal + } + + required public init(coder aDecoder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + } + + override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) { + super.containerLayoutUpdated(layout, transition: transition) + } + + override public func viewDidAppear(_ animated: Bool) { + super.viewDidAppear(animated) + + if !self.processedDidAppear { + self.processedDidAppear = true + if let componentView = self.node.hostView.componentView as? AvatarUploadToastScreenComponent.View { + componentView.animateIn() + } + } + } + + private func superDismiss() { + super.dismiss() + } + + override public func dismiss(completion: (() -> Void)? = nil) { + if !self.processedDidDisappear { + self.processedDidDisappear = true + + if let componentView = self.node.hostView.componentView as? AvatarUploadToastScreenComponent.View { + componentView.animateOut(completion: { [weak self] in + if let self { + self.superDismiss() + } + completion?() + }) + } else { + super.dismiss(completion: completion) + } + } + } +} diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoData.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoData.swift index 62840ba730..b3c854765b 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoData.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoData.swift @@ -1251,7 +1251,7 @@ func peerInfoScreenData(context: AccountContext, peerId: PeerId, strings: Presen let starsRevenueContextAndState = context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: peerId)) |> mapToSignal { peer -> Signal<(StarsRevenueStatsContext?, StarsRevenueStats?), NoError> in var canViewStarsRevenue = false - if let peer, case let .user(user) = peer, let botInfo = user.botInfo, botInfo.flags.contains(.canEdit) || context.sharedContext.applicationBindings.appBuildType == .internal { + if let peer, case let .user(user) = peer, let botInfo = user.botInfo, botInfo.flags.contains(.canEdit) || context.sharedContext.applicationBindings.appBuildType == .internal || context.sharedContext.immediateExperimentalUISettings.devRequests { canViewStarsRevenue = true } #if DEBUG @@ -1276,7 +1276,7 @@ func peerInfoScreenData(context: AccountContext, peerId: PeerId, strings: Presen ) |> mapToSignal { peer, canViewRevenue -> Signal<(RevenueStatsContext?, RevenueStats?), NoError> in var canViewRevenue = canViewRevenue - if let peer, case let .user(user) = peer, let _ = user.botInfo, context.sharedContext.applicationBindings.appBuildType == .internal { + if let peer, case let .user(user) = peer, let _ = user.botInfo, context.sharedContext.applicationBindings.appBuildType == .internal || context.sharedContext.immediateExperimentalUISettings.devRequests { canViewRevenue = true } #if DEBUG diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift index 8d5aa05bef..ee8d55af0f 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreen.swift @@ -3947,38 +3947,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro return } - let entriesPromise = Promise<[AvatarGalleryEntry]>(entries) - let galleryController = AvatarGalleryController(context: strongSelf.context, peer: EnginePeer(peer), sourceCorners: .round, remoteEntries: entriesPromise, skipInitial: true, centralEntryIndex: centralEntry.flatMap { entries.firstIndex(of: $0) }, replaceRootController: { controller, ready in - }) - galleryController.openAvatarSetup = { [weak self] completion in - self?.controller?.openAvatarForEditing(fromGallery: true, completion: { _ in - completion() - }) - } - galleryController.avatarPhotoEditCompletion = { [weak self] image in - self?.controller?.updateProfilePhoto(image, mode: .generic) - } - galleryController.avatarVideoEditCompletion = { [weak self] image, asset, adjustments in - self?.controller?.updateProfileVideo(image, asset: asset, adjustments: adjustments, mode: .generic) - } - galleryController.removedEntry = { [weak self] entry in - if let item = PeerInfoAvatarListItem(entry: entry) { - let _ = self?.headerNode.avatarListNode.listContainerNode.deleteItem(item) - } - } - strongSelf.hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).startStrict(next: { entry in - self?.headerNode.updateAvatarIsHidden(entry: entry) - })) - strongSelf.view.endEditing(true) - strongSelf.controller?.present(galleryController, in: .window(.root), with: AvatarGalleryControllerPresentationArguments(transitionArguments: { entry in - if let transitionNode = self?.headerNode.avatarTransitionArguments(entry: entry) { - return GalleryTransitionArguments(transitionNode: transitionNode, addToTransitionSurface: { view in - self?.headerNode.addToAvatarTransitionSurface(view: view) - }) - } else { - return nil - } - })) + strongSelf.openAvatarGallery(peer: EnginePeer(peer), entries: entries, centralEntry: centralEntry, animateTransition: true) Queue.mainQueue().after(0.4) { strongSelf.resetHeaderExpansion() @@ -9715,7 +9684,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro mixin.didFinishWithImage = { [weak self] image in if let image = image { completion(image) - self?.controller?.updateProfilePhoto(image, mode: mode) + self?.controller?.updateProfilePhoto(image, mode: mode, uploadStatus: nil) } } mixin.didFinishWithVideo = { [weak self] image, asset, adjustments in @@ -12210,6 +12179,47 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro func cancelItemSelection() { self.headerNode.navigationButtonContainer.performAction?(.selectionDone, nil, nil) } + + func openAvatarGallery(peer: EnginePeer, entries: [AvatarGalleryEntry], centralEntry: AvatarGalleryEntry?, animateTransition: Bool) { + let entriesPromise = Promise<[AvatarGalleryEntry]>(entries) + let galleryController = AvatarGalleryController(context: self.context, peer: peer, sourceCorners: .round, remoteEntries: entriesPromise, skipInitial: true, centralEntryIndex: centralEntry.flatMap { entries.firstIndex(of: $0) }, replaceRootController: { controller, ready in + }) + galleryController.openAvatarSetup = { [weak self] completion in + self?.controller?.openAvatarForEditing(fromGallery: true, completion: { _ in + completion() + }) + } + galleryController.avatarPhotoEditCompletion = { [weak self] image in + self?.controller?.updateProfilePhoto(image, mode: .generic, uploadStatus: nil) + } + galleryController.avatarVideoEditCompletion = { [weak self] image, asset, adjustments in + self?.controller?.updateProfileVideo(image, asset: asset, adjustments: adjustments, mode: .generic) + } + galleryController.removedEntry = { [weak self] entry in + if let item = PeerInfoAvatarListItem(entry: entry) { + let _ = self?.headerNode.avatarListNode.listContainerNode.deleteItem(item) + } + } + self.hiddenAvatarRepresentationDisposable.set((galleryController.hiddenMedia |> deliverOnMainQueue).startStrict(next: { [weak self] entry in + self?.headerNode.updateAvatarIsHidden(entry: entry) + })) + self.view.endEditing(true) + let arguments = AvatarGalleryControllerPresentationArguments(transitionArguments: { [weak self] _ in + if animateTransition, let entry = centralEntry, let transitionNode = self?.headerNode.avatarTransitionArguments(entry: entry) { + return GalleryTransitionArguments(transitionNode: transitionNode, addToTransitionSurface: { view in + self?.headerNode.addToAvatarTransitionSurface(view: view) + }) + } else { + return nil + } + }) + if self.controller?.navigationController != nil { + self.controller?.present(galleryController, in: .window(.root), with: arguments) + } else { + galleryController.presentationArguments = arguments + self.context.sharedContext.mainWindow?.present(galleryController, on: .root) + } + } } public final class PeerInfoScreenImpl: ViewController, PeerInfoScreen, KeyShortcutResponder { @@ -12710,9 +12720,9 @@ public final class PeerInfoScreenImpl: ViewController, PeerInfoScreen, KeyShortc } } - public func openAvatarSetup() { + public func openAvatarSetup(completedWithUploadingImage: @escaping (UIImage, Signal) -> UIView?) { let proceed = { [weak self] in - self?.openAvatarForEditing() + self?.newopenAvatarForEditing(completedWithUploadingImage: completedWithUploadingImage) } if !self.isNodeLoaded { self.loadDisplayNode() @@ -12724,6 +12734,18 @@ public final class PeerInfoScreenImpl: ViewController, PeerInfoScreen, KeyShortc } } + public func openAvatars() { + let _ = (self.context.engine.data.get( + TelegramEngine.EngineData.Item.Peer.Peer(id: self.peerId) + ) + |> deliverOnMainQueue).startStandalone(next: { [weak self] peer in + guard let self, let peer else { + return + } + self.controllerNode.openAvatarGallery(peer: peer, entries: self.controllerNode.headerNode.avatarListNode.listContainerNode.galleryEntries, centralEntry: nil, animateTransition: false) + }) + } + func openAvatarForEditing(mode: PeerInfoAvatarEditingMode = .generic, fromGallery: Bool = false, completion: @escaping (UIImage?) -> Void = { _ in }) { self.controllerNode.openAvatarForEditing(mode: mode, fromGallery: fromGallery, completion: completion) } diff --git a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreenAvatarSetup.swift b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreenAvatarSetup.swift index 3b2b7ad43e..db286e2590 100644 --- a/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreenAvatarSetup.swift +++ b/submodules/TelegramUI/Components/PeerInfo/PeerInfoScreen/Sources/PeerInfoScreenAvatarSetup.swift @@ -18,164 +18,197 @@ import PresentationDataUtils import LegacyComponents extension PeerInfoScreenImpl { -// func newopenAvatarForEditing(mode: PeerInfoAvatarEditingMode = .generic, fromGallery: Bool = false, completion: @escaping (UIImage?) -> Void = { _ in }) { -// guard let data = self.controllerNode.data, let peer = data.peer, mode != .generic || canEditPeerInfo(context: self.context, peer: peer, chatLocation: self.chatLocation, threadData: data.threadData) else { -// return -// } -// self.view.endEditing(true) -// -// let peerId = self.peerId -// var isForum = false -// if let peer = peer as? TelegramChannel, peer.flags.contains(.isForum) { -// isForum = true -// } -// -// var currentIsVideo = false -// var emojiMarkup: TelegramMediaImage.EmojiMarkup? -// let item = self.controllerNode.headerNode.avatarListNode.listContainerNode.currentItemNode?.item -// if let item = item, case let .image(_, _, videoRepresentations, _, _, emojiMarkupValue) = item { -// currentIsVideo = !videoRepresentations.isEmpty -// emojiMarkup = emojiMarkupValue -// } -// -// let _ = isForum -// let _ = currentIsVideo -// -// let _ = (self.context.engine.data.get( -// TelegramEngine.EngineData.Item.Peer.Peer(id: peerId) -// ) -// |> deliverOnMainQueue).startStandalone(next: { [weak self] peer in -// guard let self, let peer else { -// return -// } -// -// let keyboardInputData = Promise() -// keyboardInputData.set(AvatarEditorScreen.inputData(context: self.context, isGroup: peer.id.namespace != Namespaces.Peer.CloudUser)) -// -// var hasPhotos = false -// if !peer.profileImageRepresentations.isEmpty { -// hasPhotos = true -// } -// -// var hasDeleteButton = false -// if case .generic = mode { -// hasDeleteButton = hasPhotos && !fromGallery -// } else if case .custom = mode { -// hasDeleteButton = peer.profileImageRepresentations.first?.isPersonal == true -// } else if case .fallback = mode { -// if let cachedData = data.cachedData as? CachedUserData, case let .known(photo) = cachedData.fallbackPhoto { -// hasDeleteButton = photo != nil -// } -// } -// -// let _ = hasDeleteButton -// -// let parentController = (self.context.sharedContext.mainWindow?.viewController as? NavigationController)?.topViewController as? ViewController -// -// var dismissImpl: (() -> Void)? -// let mainController = self.context.sharedContext.makeAvatarMediaPickerScreen(context: self.context, getSourceRect: { return nil }, canDelete: hasDeleteButton, performDelete: { [weak self] in -// self?.openAvatarRemoval(mode: mode, peer: peer, item: item) -// }, completion: { result, transitionView, transitionRect, transitionImage, fromCamera, transitionOut, cancelled in -// let subject: Signal -// if let asset = result as? PHAsset { -// subject = .single(.asset(asset)) -// } else if let image = result as? UIImage { -// subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight)) -// } else if let result = result as? Signal { -// subject = result -// |> map { value -> MediaEditorScreenImpl.Subject? in -// switch value { -// case .pendingImage: -// return nil -// case let .image(image): -// return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: nil, additionalImagePosition: .topLeft) -// case let .video(video): -// return .video(videoPath: video.videoPath, thumbnail: video.coverImage, mirror: video.mirror, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: video.dimensions, duration: video.duration, videoPositionChanges: [], additionalVideoPosition: .topLeft) -// default: -// return nil -// } -// } -// } else { -// let peerType: AvatarEditorScreen.PeerType -// if mode == .suggest { -// peerType = .suggest -// } else if case .legacyGroup = peer { -// peerType = .group -// } else if case let .channel(channel) = peer { -// if case .group = channel.info { -// peerType = channel.flags.contains(.isForum) ? .forum : .group -// } else { -// peerType = .channel -// } -// } else { -// peerType = .user -// } -// let controller = AvatarEditorScreen(context: self.context, inputData: keyboardInputData.get(), peerType: peerType, markup: emojiMarkup) -// //controller.imageCompletion = imageCompletion -// //controller.videoCompletion = videoCompletion -// parentController?.push(controller) -// //isFromEditor = true -// return -// } -// -// let editorController = MediaEditorScreenImpl( -// context: self.context, -// mode: .avatarEditor, -// subject: subject, -// transitionIn: fromCamera ? .camera : transitionView.flatMap({ .gallery( -// MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn( -// sourceView: $0, -// sourceRect: transitionRect, -// sourceImage: transitionImage -// ) -// ) }), -// transitionOut: { finished, isNew in -// if !finished, let transitionView { -// return MediaEditorScreenImpl.TransitionOut( -// destinationView: transitionView, -// destinationRect: transitionView.bounds, -// destinationCornerRadius: 0.0 -// ) -// } -// return nil -// }, completion: { [weak self] result, commit in -// dismissImpl?() -// -// switch result.media { -// case let .image(image, _): -// self?.updateProfilePhoto(image, mode: mode) -// commit({}) -// case let .video(video, coverImage, values, _, _): -// if let coverImage { -// self?.updateProfileVideo(coverImage, asset: video, adjustments: values, mode: mode) -// } -// commit({}) -// default: -// break -// } -// } as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void -// ) -// editorController.cancelled = { _ in -// cancelled() -// } -// self.push(editorController) -// }, dismissed: { -// -// }) -// dismissImpl = { [weak mainController] in -// if let mainController, let navigationController = mainController.navigationController { -// var viewControllers = navigationController.viewControllers -// viewControllers = viewControllers.filter { c in -// return !(c is CameraScreen) && c !== mainController -// } -// navigationController.setViewControllers(viewControllers, animated: false) -// } -// } -// mainController.navigationPresentation = .flatModal -// mainController.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait) -// self.push(mainController) -// }) -// } + func newopenAvatarForEditing(mode: PeerInfoAvatarEditingMode = .generic, fromGallery: Bool = false, completion: @escaping (UIImage?) -> Void = { _ in }, completedWithUploadingImage: @escaping (UIImage, Signal) -> UIView? = { _, _ in nil }) { + guard let data = self.controllerNode.data, let peer = data.peer, mode != .generic || canEditPeerInfo(context: self.context, peer: peer, chatLocation: self.chatLocation, threadData: data.threadData) else { + return + } + self.view.endEditing(true) + + let peerId = self.peerId + var isForum = false + if let peer = peer as? TelegramChannel, peer.flags.contains(.isForum) { + isForum = true + } + + var currentIsVideo = false + var emojiMarkup: TelegramMediaImage.EmojiMarkup? + let item = self.controllerNode.headerNode.avatarListNode.listContainerNode.currentItemNode?.item + if let item = item, case let .image(_, _, videoRepresentations, _, _, emojiMarkupValue) = item { + currentIsVideo = !videoRepresentations.isEmpty + emojiMarkup = emojiMarkupValue + } + + let _ = isForum + let _ = currentIsVideo + + let _ = (self.context.engine.data.get( + TelegramEngine.EngineData.Item.Peer.Peer(id: peerId) + ) + |> deliverOnMainQueue).startStandalone(next: { [weak self] peer in + guard let self, let peer else { + return + } + + let keyboardInputData = Promise() + keyboardInputData.set(AvatarEditorScreen.inputData(context: self.context, isGroup: peer.id.namespace != Namespaces.Peer.CloudUser)) + + var hasPhotos = false + if !peer.profileImageRepresentations.isEmpty { + hasPhotos = true + } + + var hasDeleteButton = false + if case .generic = mode { + hasDeleteButton = hasPhotos && !fromGallery + } else if case .custom = mode { + hasDeleteButton = peer.profileImageRepresentations.first?.isPersonal == true + } else if case .fallback = mode { + if let cachedData = data.cachedData as? CachedUserData, case let .known(photo) = cachedData.fallbackPhoto { + hasDeleteButton = photo != nil + } + } + + let _ = hasDeleteButton + + let parentController = (self.context.sharedContext.mainWindow?.viewController as? NavigationController)?.topViewController as? ViewController + + var dismissImpl: (() -> Void)? + let mainController = self.context.sharedContext.makeAvatarMediaPickerScreen(context: self.context, getSourceRect: { return nil }, canDelete: hasDeleteButton, performDelete: { [weak self] in + self?.openAvatarRemoval(mode: mode, peer: peer, item: item) + }, completion: { result, transitionView, transitionRect, transitionImage, fromCamera, transitionOut, cancelled in + let subject: Signal + if let asset = result as? PHAsset { + subject = .single(.asset(asset)) + } else if let image = result as? UIImage { + subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight)) + } else if let result = result as? Signal { + subject = result + |> map { value -> MediaEditorScreenImpl.Subject? in + switch value { + case .pendingImage: + return nil + case let .image(image): + return .image(image: image.image, dimensions: PixelDimensions(image.image.size), additionalImage: nil, additionalImagePosition: .topLeft) + case let .video(video): + return .video(videoPath: video.videoPath, thumbnail: video.coverImage, mirror: video.mirror, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: video.dimensions, duration: video.duration, videoPositionChanges: [], additionalVideoPosition: .topLeft) + default: + return nil + } + } + } else { + let peerType: AvatarEditorScreen.PeerType + if mode == .suggest { + peerType = .suggest + } else if case .legacyGroup = peer { + peerType = .group + } else if case let .channel(channel) = peer { + if case .group = channel.info { + peerType = channel.flags.contains(.isForum) ? .forum : .group + } else { + peerType = .channel + } + } else { + peerType = .user + } + let controller = AvatarEditorScreen(context: self.context, inputData: keyboardInputData.get(), peerType: peerType, markup: emojiMarkup) + //controller.imageCompletion = imageCompletion + //controller.videoCompletion = videoCompletion + parentController?.push(controller) + //isFromEditor = true + return + } + + var resultImage: UIImage? + let uploadStatusPromise = Promise(.progress(0.0)) + let editorController = MediaEditorScreenImpl( + context: self.context, + mode: .avatarEditor, + subject: subject, + transitionIn: fromCamera ? .camera : transitionView.flatMap({ .gallery( + MediaEditorScreenImpl.TransitionIn.GalleryTransitionIn( + sourceView: $0, + sourceRect: transitionRect, + sourceImage: transitionImage + ) + ) }), + transitionOut: { finished, isNew in + if !finished { + if let transitionView { + return MediaEditorScreenImpl.TransitionOut( + destinationView: transitionView, + destinationRect: transitionView.bounds, + destinationCornerRadius: 0.0 + ) + } + } else if let resultImage, let transitionOutView = completedWithUploadingImage(resultImage, uploadStatusPromise.get()) { + transitionOutView.isHidden = true + return MediaEditorScreenImpl.TransitionOut( + destinationView: transitionOutView, + destinationRect: transitionOutView.bounds, + destinationCornerRadius: transitionOutView.bounds.height * 0.5, + completion: { [weak transitionOutView] in + transitionOutView?.isHidden = false + } + ) + } + return nil + }, completion: { [weak self] result, commit in + switch result.media { + case let .image(image, _): + resultImage = image + self?.updateProfilePhoto(image, mode: mode, uploadStatus: uploadStatusPromise) + commit({}) + case let .video(video, coverImage, values, _, _): + if let coverImage { + let _ = values + //TODO:release + resultImage = coverImage + self?.updateProfileVideo(coverImage, asset: video, adjustments: nil, mode: mode) + } + commit({}) + default: + break + } + + dismissImpl?() + } as (MediaEditorScreenImpl.Result, @escaping (@escaping () -> Void) -> Void) -> Void + ) + editorController.cancelled = { _ in + cancelled() + } + if self.navigationController != nil { + self.push(editorController) + } else { + self.parentController?.pushViewController(editorController) + } + }, dismissed: { + + }) + dismissImpl = { [weak self, weak mainController] in + if let mainController, let navigationController = mainController.navigationController { + var viewControllers = navigationController.viewControllers + viewControllers = viewControllers.filter { c in + return !(c is CameraScreen) && c !== mainController + } + navigationController.setViewControllers(viewControllers, animated: false) + } + if let self, let navigationController = self.parentController, let mainController { + var viewControllers = navigationController.viewControllers + viewControllers = viewControllers.filter { c in + return !(c is CameraScreen) && c !== mainController + } + navigationController.setViewControllers(viewControllers, animated: false) + } + } + mainController.navigationPresentation = .flatModal + mainController.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait) + if self.navigationController != nil { + self.push(mainController) + } else { + self.parentController?.pushViewController(mainController) + } + }) + } func openAvatarRemoval(mode: PeerInfoAvatarEditingMode, peer: EnginePeer? = nil, item: PeerInfoAvatarListItem? = nil, completion: @escaping () -> Void = {}) { let proceed = { [weak self] in @@ -250,8 +283,9 @@ extension PeerInfoScreenImpl { (self.navigationController?.topViewController as? ViewController)?.present(actionSheet, in: .window(.root)) } - public func updateProfilePhoto(_ image: UIImage, mode: PeerInfoAvatarEditingMode) { + public func updateProfilePhoto(_ image: UIImage, mode: PeerInfoAvatarEditingMode, uploadStatus: Promise?) { guard let data = image.jpegData(compressionQuality: 0.6) else { + uploadStatus?.set(.single(.done)) return } @@ -327,8 +361,10 @@ extension PeerInfoScreenImpl { } switch result { case .complete: + uploadStatus?.set(.single(.done)) strongSelf.controllerNode.state = strongSelf.controllerNode.state.withUpdatingAvatar(nil).withAvatarUploadProgress(nil) case let .progress(value): + uploadStatus?.set(.single(.progress(value))) strongSelf.controllerNode.state = strongSelf.controllerNode.state.withAvatarUploadProgress(.value(CGFloat(value))) } if let (layout, navigationHeight) = strongSelf.controllerNode.validLayout { diff --git a/submodules/TelegramUI/Sources/ChatController.swift b/submodules/TelegramUI/Sources/ChatController.swift index d826577770..125f2760ed 100644 --- a/submodules/TelegramUI/Sources/ChatController.swift +++ b/submodules/TelegramUI/Sources/ChatController.swift @@ -1221,7 +1221,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G controller.imageCompletion = { [weak self] image, commit in if let strongSelf = self { if let rootController = strongSelf.effectiveNavigationController as? TelegramRootController, let settingsController = rootController.accountSettingsController as? PeerInfoScreenImpl { - settingsController.updateProfilePhoto(image, mode: .accept) + settingsController.updateProfilePhoto(image, mode: .accept, uploadStatus: nil) commit() } } @@ -1258,7 +1258,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G }, imageCompletion: { [weak self] image in if let strongSelf = self { if let rootController = strongSelf.effectiveNavigationController as? TelegramRootController, let settingsController = rootController.accountSettingsController as? PeerInfoScreenImpl { - settingsController.updateProfilePhoto(image, mode: .accept) + settingsController.updateProfilePhoto(image, mode: .accept, uploadStatus: nil) } } }, videoCompletion: { [weak self] image, url, adjustments in diff --git a/submodules/TelegramUI/Sources/ChatHistoryListNode.swift b/submodules/TelegramUI/Sources/ChatHistoryListNode.swift index 67b5531abc..6021c78c6e 100644 --- a/submodules/TelegramUI/Sources/ChatHistoryListNode.swift +++ b/submodules/TelegramUI/Sources/ChatHistoryListNode.swift @@ -785,7 +785,87 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto if peerId.namespace == Namespaces.Peer.CloudUser { adMessages = .single((nil, [])) } else { - adMessages = adMessagesContext.state + if context.sharedContext.immediateExperimentalUISettings.fakeAds { + adMessages = context.engine.data.get( + TelegramEngine.EngineData.Item.Peer.Peer(id: peerId) + ) + |> map { peer -> (interPostInterval: Int32?, messages: [Message]) in + let fakeAdMessages: [Message] = (0 ..< 10).map { i -> Message in + var attributes: [MessageAttribute] = [] + + let mappedMessageType: AdMessageAttribute.MessageType = .sponsored + attributes.append(AdMessageAttribute(opaqueId: "fake_ad_\(i)".data(using: .utf8)!, messageType: mappedMessageType, url: "t.me/telegram", buttonText: "VIEW", sponsorInfo: nil, additionalInfo: nil, canReport: false, hasContentMedia: false)) + + var messagePeers = SimpleDictionary() + + if let peer { + messagePeers[peer.id] = peer._asPeer() + } + + let author: Peer = TelegramChannel( + id: PeerId(namespace: Namespaces.Peer.CloudChannel, id: PeerId.Id._internalFromInt64Value(1)), + accessHash: nil, + title: "Fake Ad", + username: nil, + photo: [], + creationDate: 0, + version: 0, + participationStatus: .left, + info: .broadcast(TelegramChannelBroadcastInfo(flags: [])), + flags: [], + restrictionInfo: nil, + adminRights: nil, + bannedRights: nil, + defaultBannedRights: nil, + usernames: [], + storiesHidden: nil, + nameColor: .blue, + backgroundEmojiId: nil, + profileColor: nil, + profileBackgroundEmojiId: nil, + emojiStatus: nil, + approximateBoostLevel: nil, + subscriptionUntilDate: nil, + verificationIconFileId: nil + ) + messagePeers[author.id] = author + + let messageText = "Fake Ad N\(i)" + let messageHash = (messageText.hashValue &+ 31 &* peerId.hashValue) &* 31 &+ author.id.hashValue + let messageStableVersion = UInt32(bitPattern: Int32(truncatingIfNeeded: messageHash)) + + return Message( + stableId: 0, + stableVersion: messageStableVersion, + id: MessageId(peerId: peerId, namespace: Namespaces.Message.Local, id: 0), + globallyUniqueId: nil, + groupingKey: nil, + groupInfo: nil, + threadId: nil, + timestamp: Int32.max - 1, + flags: [.Incoming], + tags: [], + globalTags: [], + localTags: [], + customTags: [], + forwardInfo: nil, + author: author, + text: messageText, + attributes: attributes, + media: [], + peers: messagePeers, + associatedMessages: SimpleDictionary(), + associatedMessageIds: [], + associatedMedia: [:], + associatedThreadInfo: nil, + associatedStories: [:] + ) + } + return (10, fakeAdMessages) + } + } else { + adMessages = adMessagesContext.state + } } } else { self.adMessagesContext = nil @@ -2444,6 +2524,10 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto var insertionTimestamp: Int32? if self.currentPrefetchDirectionIsToLater { outer: for i in selectedRange.0 ... selectedRange.1 { + if historyView.originalView.laterId == nil && i >= historyView.filteredEntries.count - 4 { + break + } + switch historyView.filteredEntries[i] { case let .MessageEntry(message, _, _, _, _, _): if message.id.namespace == Namespaces.Message.Cloud { diff --git a/submodules/TelegramUI/Sources/ChatSearchTitleAccessoryPanelNode.swift b/submodules/TelegramUI/Sources/ChatSearchTitleAccessoryPanelNode.swift index fae6aca654..65c9a4eb20 100644 --- a/submodules/TelegramUI/Sources/ChatSearchTitleAccessoryPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatSearchTitleAccessoryPanelNode.swift @@ -497,7 +497,7 @@ final class ChatSearchTitleAccessoryPanelNode: ChatTitleAccessoryPanelNode, Chat var isFirstUpdate = true self.itemsDisposable = (combineLatest( - context.engine.stickers.availableReactions(), + context.availableReactions, context.engine.stickers.savedMessageTagData(), tagsAndFiles ) diff --git a/submodules/TelegramUI/Sources/TelegramRootController.swift b/submodules/TelegramUI/Sources/TelegramRootController.swift index d9979b4cd1..2c0b6826ce 100644 --- a/submodules/TelegramUI/Sources/TelegramRootController.swift +++ b/submodules/TelegramUI/Sources/TelegramRootController.swift @@ -750,8 +750,12 @@ public final class TelegramRootController: NavigationController, TelegramRootCon self.accountSettingsController?.openBirthdaySetup() } - public func openPhotoSetup() { - self.accountSettingsController?.openAvatarSetup() + public func openPhotoSetup(completedWithUploadingImage: @escaping (UIImage, Signal) -> UIView?) { + self.accountSettingsController?.openAvatarSetup(completedWithUploadingImage: completedWithUploadingImage) + } + + public func openAvatars() { + self.accountSettingsController?.openAvatars() } } diff --git a/submodules/TelegramUIPreferences/Sources/ExperimentalUISettings.swift b/submodules/TelegramUIPreferences/Sources/ExperimentalUISettings.swift index d009392ddb..5114787d01 100644 --- a/submodules/TelegramUIPreferences/Sources/ExperimentalUISettings.swift +++ b/submodules/TelegramUIPreferences/Sources/ExperimentalUISettings.swift @@ -64,6 +64,8 @@ public struct ExperimentalUISettings: Codable, Equatable { public var autoBenchmarkReflectors: Bool? public var conferenceCalls: Bool public var playerV2: Bool + public var devRequests: Bool + public var fakeAds: Bool public static var defaultSettings: ExperimentalUISettings { return ExperimentalUISettings( @@ -105,7 +107,9 @@ public struct ExperimentalUISettings: Codable, Equatable { enableLocalTranslation: false, autoBenchmarkReflectors: nil, conferenceCalls: false, - playerV2: false + playerV2: false, + devRequests: false, + fakeAds: false ) } @@ -148,7 +152,9 @@ public struct ExperimentalUISettings: Codable, Equatable { enableLocalTranslation: Bool, autoBenchmarkReflectors: Bool?, conferenceCalls: Bool, - playerV2: Bool + playerV2: Bool, + devRequests: Bool, + fakeAds: Bool ) { self.keepChatNavigationStack = keepChatNavigationStack self.skipReadHistory = skipReadHistory @@ -189,6 +195,8 @@ public struct ExperimentalUISettings: Codable, Equatable { self.autoBenchmarkReflectors = autoBenchmarkReflectors self.conferenceCalls = conferenceCalls self.playerV2 = playerV2 + self.devRequests = devRequests + self.fakeAds = fakeAds } public init(from decoder: Decoder) throws { @@ -233,6 +241,8 @@ public struct ExperimentalUISettings: Codable, Equatable { self.autoBenchmarkReflectors = try container.decodeIfPresent(Bool.self, forKey: "autoBenchmarkReflectors") self.conferenceCalls = try container.decodeIfPresent(Bool.self, forKey: "conferenceCalls") ?? false self.playerV2 = try container.decodeIfPresent(Bool.self, forKey: "playerV2") ?? false + self.devRequests = try container.decodeIfPresent(Bool.self, forKey: "devRequests") ?? false + self.fakeAds = try container.decodeIfPresent(Bool.self, forKey: "fakeAds") ?? false } public func encode(to encoder: Encoder) throws { @@ -277,6 +287,8 @@ public struct ExperimentalUISettings: Codable, Equatable { try container.encodeIfPresent(self.autoBenchmarkReflectors, forKey: "autoBenchmarkReflectors") try container.encodeIfPresent(self.conferenceCalls, forKey: "conferenceCalls") try container.encodeIfPresent(self.playerV2, forKey: "playerV2") + try container.encodeIfPresent(self.devRequests, forKey: "devRequests") + try container.encodeIfPresent(self.fakeAds, forKey: "fakeAds") } } diff --git a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift index 15c2cc2b91..1b3f218060 100644 --- a/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift +++ b/submodules/TelegramUniversalVideoContent/Sources/HLSVideoContent.swift @@ -230,7 +230,7 @@ public final class HLSVideoContent: UniversalVideoContent { public let id: AnyHashable public let nativeId: NativeVideoContentId - let userLocation: MediaResourceUserLocation + public let userLocation: MediaResourceUserLocation public let fileReference: FileMediaReference public let dimensions: CGSize public let duration: Double diff --git a/submodules/TelegramVoip/Sources/GroupCallContext.swift b/submodules/TelegramVoip/Sources/GroupCallContext.swift index b0af5d23a2..413b40b5fc 100644 --- a/submodules/TelegramVoip/Sources/GroupCallContext.swift +++ b/submodules/TelegramVoip/Sources/GroupCallContext.swift @@ -461,7 +461,6 @@ public final class OngoingGroupCallContext { #if os(iOS) let audioDevice: OngoingCallContext.AudioDevice? #endif - let sessionId = UInt32.random(in: 0 ..< UInt32(Int32.max)) let joinPayload = Promise<(String, UInt32)>() let networkState = ValuePromise(NetworkState(isConnected: false, isTransitioningFromBroadcastToRtc: false), ignoreRepeated: true) @@ -507,14 +506,9 @@ public final class OngoingGroupCallContext { self.tempStatsLogFile = EngineTempBox.shared.tempFile(fileName: "CallStats.json") let tempStatsLogPath = self.tempStatsLogFile.path -#if os(iOS) - if sharedAudioDevice == nil { - self.audioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: false) - } else { - self.audioDevice = sharedAudioDevice - } + self.audioDevice = sharedAudioDevice let audioDevice = self.audioDevice -#endif + var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)? var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)? var activityUpdatedImpl: (([UInt32]) -> Void)? @@ -882,7 +876,7 @@ public final class OngoingGroupCallContext { } } - func stop(account: Account, reportCallId: CallId?) { + func stop(account: Account?, reportCallId: CallId?, debugLog: Promise) { self.context.stop() let logPath = self.logPath @@ -892,16 +886,18 @@ public final class OngoingGroupCallContext { } let tempStatsLogPath = self.tempStatsLogFile.path + debugLog.set(.single(nil)) + let queue = self.queue self.context.stop({ queue.async { - if !statsLogPath.isEmpty { + if !statsLogPath.isEmpty, let account { let logsPath = callLogsPath(account: account) let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil) let _ = try? FileManager.default.moveItem(atPath: tempStatsLogPath, toPath: statsLogPath) } - if let callId = reportCallId, !statsLogPath.isEmpty, let data = try? Data(contentsOf: URL(fileURLWithPath: statsLogPath)), let dataString = String(data: data, encoding: .utf8) { + if let callId = reportCallId, !statsLogPath.isEmpty, let data = try? Data(contentsOf: URL(fileURLWithPath: statsLogPath)), let dataString = String(data: data, encoding: .utf8), let account { let engine = TelegramEngine(account: account) let _ = engine.calls.saveCallDebugLog(callId: callId, log: dataString).start(next: { result in switch result { @@ -1219,9 +1215,9 @@ public final class OngoingGroupCallContext { } } - public func stop(account: Account, reportCallId: CallId?) { + public func stop(account: Account?, reportCallId: CallId?, debugLog: Promise) { self.impl.with { impl in - impl.stop(account: account, reportCallId: reportCallId) + impl.stop(account: account, reportCallId: reportCallId, debugLog: debugLog) } } diff --git a/submodules/TelegramVoip/Sources/IpcGroupCallContext.swift b/submodules/TelegramVoip/Sources/IpcGroupCallContext.swift index 1ca888ce29..0b90bd1508 100644 --- a/submodules/TelegramVoip/Sources/IpcGroupCallContext.swift +++ b/submodules/TelegramVoip/Sources/IpcGroupCallContext.swift @@ -3,11 +3,6 @@ import SwiftSignalKit import CoreMedia import ImageIO -private struct PayloadDescription: Codable { - var id: UInt32 - var timestamp: Int32 -} - private struct JoinPayload: Codable { var id: UInt32 var string: String @@ -18,11 +13,6 @@ private struct JoinResponsePayload: Codable { var string: String } -private struct KeepaliveInfo: Codable { - var id: UInt32 - var timestamp: Int32 -} - private struct CutoffPayload: Codable { var id: UInt32 var timestamp: Int32 @@ -370,6 +360,16 @@ private final class MappedFile { } public final class IpcGroupCallBufferAppContext { + struct KeepaliveInfo: Codable { + var id: UInt32 + var timestamp: Int32 + } + + struct PayloadDescription: Codable { + var id: UInt32 + var timestamp: Int32 + } + private let basePath: String private var audioServer: NamedPipeReader? @@ -460,7 +460,7 @@ public final class IpcGroupCallBufferAppContext { private func updateCallIsActive() { let timestamp = Int32(Date().timeIntervalSince1970) - let payloadDescription = PayloadDescription( + let payloadDescription = IpcGroupCallBufferAppContext.PayloadDescription( id: self.id, timestamp: timestamp ) @@ -477,7 +477,7 @@ public final class IpcGroupCallBufferAppContext { guard let keepaliveInfoData = try? Data(contentsOf: URL(fileURLWithPath: filePath)) else { return } - guard let keepaliveInfo = try? JSONDecoder().decode(KeepaliveInfo.self, from: keepaliveInfoData) else { + guard let keepaliveInfo = try? JSONDecoder().decode(IpcGroupCallBufferAppContext.KeepaliveInfo.self, from: keepaliveInfoData) else { return } if keepaliveInfo.id != self.id { @@ -587,7 +587,7 @@ public final class IpcGroupCallBufferBroadcastContext { return } - guard let payloadDescription = try? JSONDecoder().decode(PayloadDescription.self, from: payloadDescriptionData) else { + guard let payloadDescription = try? JSONDecoder().decode(IpcGroupCallBufferAppContext.PayloadDescription.self, from: payloadDescriptionData) else { self.statusPromise.set(.single(.finished(.error))) return } @@ -646,7 +646,7 @@ public final class IpcGroupCallBufferBroadcastContext { guard let currentId = self.currentId else { preconditionFailure() } - let keepaliveInfo = KeepaliveInfo( + let keepaliveInfo = IpcGroupCallBufferAppContext.KeepaliveInfo( id: currentId, timestamp: Int32(Date().timeIntervalSince1970) ) @@ -795,3 +795,319 @@ public func deserializePixelBuffer(data: Data) -> CVPixelBuffer? { } } } + +public final class IpcGroupCallEmbeddedAppContext { + public struct JoinPayload: Codable, Equatable { + public var id: UInt32 + public var data: String + public var ssrc: UInt32 + + public init(id: UInt32, data: String, ssrc: UInt32) { + self.id = id + self.data = data + self.ssrc = ssrc + } + } + + public struct JoinResponse: Codable, Equatable { + public var data: String + + public init(data: String) { + self.data = data + } + } + + struct KeepaliveInfo: Codable { + var id: UInt32 + var timestamp: Int32 + var joinPayload: JoinPayload? + + init(id: UInt32, timestamp: Int32, joinPayload: JoinPayload?) { + self.id = id + self.timestamp = timestamp + self.joinPayload = joinPayload + } + } + + struct PayloadDescription: Codable { + var id: UInt32 + var timestamp: Int32 + var activeRequestId: UInt32? + var joinResponse: JoinResponse? + + init(id: UInt32, timestamp: Int32, activeRequestId: UInt32?, joinResponse: JoinResponse?) { + self.id = id + self.timestamp = timestamp + self.activeRequestId = activeRequestId + self.joinResponse = joinResponse + } + } + + private let basePath: String + + private let id: UInt32 + + private let isActivePromise = ValuePromise(false, ignoreRepeated: true) + public var isActive: Signal { + return self.isActivePromise.get() + } + private var isActiveCheckTimer: SwiftSignalKit.Timer? + + private var joinPayloadValue: JoinPayload? { + didSet { + if let joinPayload = self.joinPayloadValue, joinPayload != oldValue { + self.joinPayloadPromise.set(.single(joinPayload)) + } + } + } + private let joinPayloadPromise = Promise() + public var joinPayload: Signal { + return self.joinPayloadPromise.get() + } + + private var nextActiveRequestId: UInt32 = 0 + private var activeRequestId: UInt32? { + didSet { + if self.activeRequestId != oldValue { + self.updateCallIsActive() + } + } + } + + public var joinResponse: JoinResponse? { + didSet { + if self.joinResponse != oldValue { + self.updateCallIsActive() + } + } + } + + private var callActiveInfoTimer: SwiftSignalKit.Timer? + + public init(basePath: String) { + self.basePath = basePath + let _ = try? FileManager.default.createDirectory(atPath: basePath, withIntermediateDirectories: true, attributes: nil) + + self.id = UInt32.random(in: 0 ..< UInt32.max) + + self.updateCallIsActive() + + let callActiveInfoTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in + self?.updateCallIsActive() + }, queue: .mainQueue()) + self.callActiveInfoTimer = callActiveInfoTimer + callActiveInfoTimer.start() + + let isActiveCheckTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in + self?.updateKeepaliveInfo() + }, queue: .mainQueue()) + self.isActiveCheckTimer = isActiveCheckTimer + isActiveCheckTimer.start() + } + + deinit { + self.callActiveInfoTimer?.invalidate() + self.isActiveCheckTimer?.invalidate() + } + + private func updateCallIsActive() { + let timestamp = Int32(Date().timeIntervalSince1970) + let payloadDescription = IpcGroupCallEmbeddedAppContext.PayloadDescription( + id: self.id, + timestamp: timestamp, + activeRequestId: self.activeRequestId, + joinResponse: self.joinResponse + ) + guard let payloadDescriptionData = try? JSONEncoder().encode(payloadDescription) else { + return + } + guard let _ = try? payloadDescriptionData.write(to: URL(fileURLWithPath: payloadDescriptionPath(basePath: self.basePath)), options: .atomic) else { + return + } + } + + private func updateKeepaliveInfo() { + let filePath = keepaliveInfoPath(basePath: self.basePath) + guard let keepaliveInfoData = try? Data(contentsOf: URL(fileURLWithPath: filePath)) else { + return + } + guard let keepaliveInfo = try? JSONDecoder().decode(KeepaliveInfo.self, from: keepaliveInfoData) else { + return + } + if keepaliveInfo.id != self.id { + self.isActivePromise.set(false) + return + } + let timestamp = Int32(Date().timeIntervalSince1970) + if keepaliveInfo.timestamp < timestamp - Int32(keepaliveTimeout) { + self.isActivePromise.set(false) + return + } + + self.isActivePromise.set(true) + + self.joinPayloadValue = keepaliveInfo.joinPayload + } + + public func startScreencast() -> UInt32? { + if self.activeRequestId == nil { + let id = self.nextActiveRequestId + self.nextActiveRequestId += 1 + self.activeRequestId = id + return id + } else { + return nil + } + } + + public func stopScreencast() { + self.activeRequestId = nil + + let timestamp = Int32(Date().timeIntervalSince1970) + let cutoffPayload = CutoffPayload( + id: self.id, + timestamp: timestamp + ) + guard let cutoffPayloadData = try? JSONEncoder().encode(cutoffPayload) else { + return + } + guard let _ = try? cutoffPayloadData.write(to: URL(fileURLWithPath: cutoffPayloadPath(basePath: self.basePath)), options: .atomic) else { + return + } + } +} + +public final class IpcGroupCallEmbeddedBroadcastContext { + public enum Status { + public enum FinishReason { + case screencastEnded + case callEnded + case error + } + case active(id: UInt32?, joinResponse: IpcGroupCallEmbeddedAppContext.JoinResponse?) + case finished(FinishReason) + } + + private let basePath: String + private var timer: SwiftSignalKit.Timer? + + private let statusPromise = Promise() + public var status: Signal { + return self.statusPromise.get() + } + + private var currentId: UInt32? + + private var callActiveInfoTimer: SwiftSignalKit.Timer? + private var keepaliveInfoTimer: SwiftSignalKit.Timer? + private var screencastCutoffTimer: SwiftSignalKit.Timer? + + public var joinPayload: IpcGroupCallEmbeddedAppContext.JoinPayload? { + didSet { + if self.joinPayload != oldValue { + self.writeKeepaliveInfo() + } + } + } + + public init(basePath: String) { + self.basePath = basePath + let _ = try? FileManager.default.createDirectory(atPath: basePath, withIntermediateDirectories: true, attributes: nil) + + let callActiveInfoTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in + self?.updateCallIsActive() + }, queue: .mainQueue()) + self.callActiveInfoTimer = callActiveInfoTimer + callActiveInfoTimer.start() + + let screencastCutoffTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in + self?.updateScreencastCutoff() + }, queue: .mainQueue()) + self.screencastCutoffTimer = screencastCutoffTimer + screencastCutoffTimer.start() + } + + deinit { + self.endActiveIndication() + + self.callActiveInfoTimer?.invalidate() + self.keepaliveInfoTimer?.invalidate() + self.screencastCutoffTimer?.invalidate() + } + + private func updateScreencastCutoff() { + let filePath = cutoffPayloadPath(basePath: self.basePath) + guard let cutoffPayloadData = try? Data(contentsOf: URL(fileURLWithPath: filePath)) else { + return + } + + guard let cutoffPayload = try? JSONDecoder().decode(CutoffPayload.self, from: cutoffPayloadData) else { + return + } + + let timestamp = Int32(Date().timeIntervalSince1970) + if let currentId = self.currentId, currentId == cutoffPayload.id && cutoffPayload.timestamp > timestamp - 10 { + self.statusPromise.set(.single(.finished(.screencastEnded))) + return + } + } + + private func updateCallIsActive() { + let filePath = payloadDescriptionPath(basePath: self.basePath) + guard let payloadDescriptionData = try? Data(contentsOf: URL(fileURLWithPath: filePath)) else { + self.statusPromise.set(.single(.finished(.error))) + return + } + + guard let payloadDescription = try? JSONDecoder().decode(IpcGroupCallEmbeddedAppContext.PayloadDescription.self, from: payloadDescriptionData) else { + self.statusPromise.set(.single(.finished(.error))) + return + } + let timestamp = Int32(Date().timeIntervalSince1970) + if payloadDescription.timestamp < timestamp - 4 { + self.statusPromise.set(.single(.finished(.callEnded))) + return + } + + if let currentId = self.currentId { + if currentId != payloadDescription.id { + self.statusPromise.set(.single(.finished(.callEnded))) + } else { + self.statusPromise.set(.single(.active(id: payloadDescription.activeRequestId, joinResponse: payloadDescription.joinResponse))) + } + } else { + self.currentId = payloadDescription.id + + self.writeKeepaliveInfo() + + let keepaliveInfoTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in + self?.writeKeepaliveInfo() + }, queue: .mainQueue()) + self.keepaliveInfoTimer = keepaliveInfoTimer + keepaliveInfoTimer.start() + + self.statusPromise.set(.single(.active(id: payloadDescription.activeRequestId, joinResponse: payloadDescription.joinResponse))) + } + } + + private func writeKeepaliveInfo() { + guard let currentId = self.currentId else { + preconditionFailure() + } + let keepaliveInfo = IpcGroupCallEmbeddedAppContext.KeepaliveInfo( + id: currentId, + timestamp: Int32(Date().timeIntervalSince1970), + joinPayload: self.joinPayload + ) + guard let keepaliveInfoData = try? JSONEncoder().encode(keepaliveInfo) else { + preconditionFailure() + } + guard let _ = try? keepaliveInfoData.write(to: URL(fileURLWithPath: keepaliveInfoPath(basePath: self.basePath)), options: .atomic) else { + preconditionFailure() + } + } + + private func endActiveIndication() { + let _ = try? FileManager.default.removeItem(atPath: keepaliveInfoPath(basePath: self.basePath)) + } +} diff --git a/submodules/TelegramVoip/Sources/OngoingCallContext.swift b/submodules/TelegramVoip/Sources/OngoingCallContext.swift index a5a20e20c4..94b9cd8825 100644 --- a/submodules/TelegramVoip/Sources/OngoingCallContext.swift +++ b/submodules/TelegramVoip/Sources/OngoingCallContext.swift @@ -7,13 +7,13 @@ import TelegramUIPreferences import TgVoip import TgVoipWebrtc -private let debugUseLegacyVersionForReflectors: Bool = { +private func debugUseLegacyVersionForReflectors() -> Bool { #if DEBUG && false return true #else return false #endif -}() +} private struct PeerTag: Hashable, CustomStringConvertible { var bytes: [UInt8] = Array(repeating: 0, count: 16) @@ -510,21 +510,21 @@ public final class OngoingCallVideoCapturer { self.impl.setIsVideoEnabled(value) } - public func injectPixelBuffer(_ pixelBuffer: CVPixelBuffer, rotation: CGImagePropertyOrientation) { + public func injectSampleBuffer(_ sampleBuffer: CMSampleBuffer, rotation: CGImagePropertyOrientation, completion: @escaping () -> Void) { var videoRotation: OngoingCallVideoOrientation = .rotation0 switch rotation { - case .up: - videoRotation = .rotation0 - case .left: - videoRotation = .rotation90 - case .right: - videoRotation = .rotation270 - case .down: - videoRotation = .rotation180 - default: - videoRotation = .rotation0 + case .up: + videoRotation = .rotation0 + case .left: + videoRotation = .rotation90 + case .right: + videoRotation = .rotation270 + case .down: + videoRotation = .rotation180 + default: + videoRotation = .rotation0 } - self.impl.submitPixelBuffer(pixelBuffer, rotation: videoRotation.orientation) + self.impl.submitSampleBuffer(sampleBuffer, rotation: videoRotation.orientation, completion: completion) } public func video() -> Signal { @@ -819,7 +819,7 @@ public final class OngoingCallContext { } #endif - if debugUseLegacyVersionForReflectors { + if debugUseLegacyVersionForReflectors() { return [(OngoingCallThreadLocalContext.version(), true)] } else { var result: [(version: String, supportsVideo: Bool)] = [(OngoingCallThreadLocalContext.version(), false)] @@ -860,9 +860,9 @@ public final class OngoingCallContext { var useModernImplementation = true var version = version var allowP2P = allowP2P - if debugUseLegacyVersionForReflectors { + if debugUseLegacyVersionForReflectors() { useModernImplementation = true - version = "5.0.0" + version = "12.0.0" allowP2P = false } else { useModernImplementation = version != OngoingCallThreadLocalContext.version() @@ -879,7 +879,23 @@ public final class OngoingCallContext { } } - let unfilteredConnections = [connections.primary] + connections.alternatives + var unfilteredConnections: [CallSessionConnection] + unfilteredConnections = [connections.primary] + connections.alternatives + + if version == "12.0.0" { + for connection in unfilteredConnections { + if case let .reflector(reflector) = connection { + unfilteredConnections.append(.reflector(CallSessionConnection.Reflector( + id: 123456, + ip: "91.108.9.38", + ipv6: "", + isTcp: true, + port: 595, + peerTag: reflector.peerTag + ))) + } + } + } var reflectorIdList: [Int64] = [] for connection in unfilteredConnections { @@ -911,11 +927,17 @@ public final class OngoingCallContext { switch connection { case let .reflector(reflector): if reflector.isTcp { - if signalingReflector == nil { - signalingReflector = OngoingCallConnectionDescriptionWebrtc(reflectorId: 0, hasStun: false, hasTurn: true, hasTcp: true, ip: reflector.ip, port: reflector.port, username: "reflector", password: hexString(reflector.peerTag)) + if version == "12.0.0" { + /*if signalingReflector == nil { + signalingReflector = OngoingCallConnectionDescriptionWebrtc(reflectorId: 0, hasStun: false, hasTurn: true, hasTcp: true, ip: reflector.ip, port: reflector.port, username: "reflector", password: hexString(reflector.peerTag)) + }*/ + } else { + if signalingReflector == nil { + signalingReflector = OngoingCallConnectionDescriptionWebrtc(reflectorId: 0, hasStun: false, hasTurn: true, hasTcp: true, ip: reflector.ip, port: reflector.port, username: "reflector", password: hexString(reflector.peerTag)) + } + + continue connectionsLoop } - - continue connectionsLoop } case .webRtcReflector: break @@ -962,22 +984,37 @@ public final class OngoingCallContext { directConnection = nil } - #if DEBUG && false + #if DEBUG && true var customParameters = customParameters if let initialCustomParameters = try? JSONSerialization.jsonObject(with: (customParameters ?? "{}").data(using: .utf8)!) as? [String: Any] { var customParametersValue: [String: Any] customParametersValue = initialCustomParameters - customParametersValue["network_standalone_reflectors"] = true as NSNumber - customParametersValue["network_use_mtproto"] = true as NSNumber - customParametersValue["network_skip_initial_ping"] = true as NSNumber - customParameters = String(data: try! JSONSerialization.data(withJSONObject: customParametersValue), encoding: .utf8)! + if version == "12.0.0" { + customParametersValue["network_use_tcponly"] = true as NSNumber + customParameters = String(data: try! JSONSerialization.data(withJSONObject: customParametersValue), encoding: .utf8)! + } - if let reflector = filteredConnections.first(where: { $0.username == "reflector" && $0.reflectorId == 1 }) { - filteredConnections = [reflector] + if let value = customParametersValue["network_use_tcponly"] as? Bool, value { + filteredConnections = filteredConnections.filter { connection in + if connection.hasTcp { + return true + } + return false + } + allowP2P = false } } #endif + /*#if DEBUG + if let initialCustomParameters = try? JSONSerialization.jsonObject(with: (customParameters ?? "{}").data(using: .utf8)!) as? [String: Any] { + var customParametersValue: [String: Any] + customParametersValue = initialCustomParameters + customParametersValue["network_kcp_experiment"] = true as NSNumber + customParameters = String(data: try! JSONSerialization.data(withJSONObject: customParametersValue), encoding: .utf8)! + } + #endif*/ + let context = OngoingCallThreadLocalContextWebrtc( version: version, customParameters: customParameters, diff --git a/submodules/TgVoipWebrtc/BUILD b/submodules/TgVoipWebrtc/BUILD index be9f8d91a2..42ad40833b 100644 --- a/submodules/TgVoipWebrtc/BUILD +++ b/submodules/TgVoipWebrtc/BUILD @@ -120,12 +120,15 @@ sources = glob([ "tgcalls/tgcalls/v2/InstanceV2Impl.cpp", "tgcalls/tgcalls/v2/InstanceV2ReferenceImpl.cpp", "tgcalls/tgcalls/v2/NativeNetworkingImpl.cpp", + "tgcalls/tgcalls/v2/RawTcpSocket.cpp", "tgcalls/tgcalls/v2/ReflectorPort.cpp", "tgcalls/tgcalls/v2/ReflectorRelayPortFactory.cpp", "tgcalls/tgcalls/v2/Signaling.cpp", "tgcalls/tgcalls/v2/SignalingConnection.cpp", "tgcalls/tgcalls/v2/SignalingEncryption.cpp", "tgcalls/tgcalls/v2/SignalingSctpConnection.cpp", + "tgcalls/tgcalls/v2/SignalingKcpConnection.cpp", + "tgcalls/tgcalls/v2/ikcp.cpp", ] objc_library( diff --git a/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h b/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h index 6255cbebbd..3135e0dc31 100644 --- a/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h +++ b/submodules/TgVoipWebrtc/PublicHeaders/TgVoipWebrtc/OngoingCallThreadLocalContext.h @@ -209,10 +209,10 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { - (void)setOnFatalError:(dispatch_block_t _Nullable)onError; - (void)setOnPause:(void (^ _Nullable)(bool))onPause; -- (void)setOnIsActiveUpdated:(void (^_Nonnull)(bool))onIsActiveUpdated; +- (void)setOnIsActiveUpdated:(void (^ _Nonnull)(bool))onIsActiveUpdated; #if TARGET_OS_IOS -- (void)submitPixelBuffer:(CVPixelBufferRef _Nonnull)pixelBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation; +- (void)submitSampleBuffer:(CMSampleBufferRef _Nonnull)sampleBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation completion:(void (^_Nonnull)())completion; #endif - (GroupCallDisposable * _Nonnull)addVideoOutput:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))sink; diff --git a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm index 0a8fcf3821..ea146373e4 100644 --- a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm +++ b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm @@ -667,8 +667,11 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls: } #if TARGET_OS_IOS -- (void)submitPixelBuffer:(CVPixelBufferRef _Nonnull)pixelBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation { - if (!pixelBuffer) { +- (void)submitSampleBuffer:(CMSampleBufferRef _Nonnull)sampleBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation completion:(void (^_Nonnull)())completion { + if (!sampleBuffer) { + if (completion) { + completion(); + } return; } @@ -688,19 +691,30 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls: break; } - if (_isProcessingCustomSampleBuffer.value) { + /*if (_isProcessingCustomSampleBuffer.value) { + if (completion) { + completion(); + } return; - } + }*/ _isProcessingCustomSampleBuffer.value = true; - tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask([interface = _interface, pixelBuffer = CFRetain(pixelBuffer), croppingBuffer = _croppingBuffer, videoRotation = videoRotation, isProcessingCustomSampleBuffer = _isProcessingCustomSampleBuffer]() { + void (^capturedCompletion)() = [completion copy]; + + tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask([interface = _interface, sampleBuffer = CFRetain(sampleBuffer), croppingBuffer = _croppingBuffer, videoRotation = videoRotation, isProcessingCustomSampleBuffer = _isProcessingCustomSampleBuffer, capturedCompletion]() { auto capture = GetVideoCaptureAssumingSameThread(interface.get()); auto source = capture->source(); if (source) { - [CustomExternalCapturer passPixelBuffer:(CVPixelBufferRef)pixelBuffer rotation:videoRotation toSource:source croppingBuffer:*croppingBuffer]; + CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer((CMSampleBufferRef)sampleBuffer); + + [CustomExternalCapturer passPixelBuffer:pixelBuffer sampleBufferReference:(CMSampleBufferRef)sampleBuffer rotation:videoRotation toSource:source croppingBuffer:*croppingBuffer]; } - CFRelease(pixelBuffer); + CFRelease(sampleBuffer); isProcessingCustomSampleBuffer.value = false; + + if (capturedCompletion) { + capturedCompletion(); + } }); }