diff --git a/submodules/Camera/Sources/Camera.swift b/submodules/Camera/Sources/Camera.swift index 7c599191f3..4aa87b597e 100644 --- a/submodules/Camera/Sources/Camera.swift +++ b/submodules/Camera/Sources/Camera.swift @@ -103,7 +103,7 @@ final class CameraDeviceContext { } switch DeviceModel.current { case .iPhone15ProMax, .iPhone14ProMax, .iPhone13ProMax, .iPhone16ProMax, .iPhone17Pro, .iPhone17ProMax: - return 60.0 + return 30.0 default: return 30.0 } diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift index d635a2c004..2cd076b905 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraScreen.swift @@ -1124,26 +1124,53 @@ private final class CameraScreenComponent: CombinedComponent { guard let controller = self.getController() else { return } - - let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } - let alertController = textAlertController( - context: self.context, - forceTheme: defaultDarkColorPresentationTheme, - title: "End Live Stream", - text: "Are you sure you want to end this live stream?", - actions: [ - TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), - TextAlertAction(type: .genericAction, title: "End", action: { [weak self, weak controller] in - guard let self, let controller else { - return - } - let _ = self.liveStreamCall?.leave(terminateIfPossible: true).startStandalone() - controller.dismiss(animated: true) - }) - ] - ) - controller.present(alertController, in: .window(.root)) + if case let .liveStream(livestream) = self.liveStreamStory?.media, livestream.kind == .rtmp { + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + let alertController = textAlertController( + context: self.context, + forceTheme: defaultDarkColorPresentationTheme, + title: "End Live Stream", + text: "Are you sure you want to end this live stream?", + actions: [ + TextAlertAction(type: .destructiveAction, title: "End", action: { [weak self, weak controller] in + guard let self, let controller else { + return + } + let _ = self.liveStreamCall?.leave(terminateIfPossible: true).startStandalone() + controller.dismiss(animated: true) + }), + TextAlertAction(type: .genericAction, title: "Leave", action: { [weak controller] in + guard let controller else { + return + } + controller.dismiss(animated: true) + }), + TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}) + ], + actionLayout: .vertical + ) + controller.present(alertController, in: .window(.root)) + } else { + let presentationData = self.context.sharedContext.currentPresentationData.with { $0 } + let alertController = textAlertController( + context: self.context, + forceTheme: defaultDarkColorPresentationTheme, + title: "End Live Stream", + text: "Are you sure you want to end this live stream?", + actions: [ + TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), + TextAlertAction(type: .destructiveAction, title: "End", action: { [weak self, weak controller] in + guard let self, let controller else { + return + } + let _ = self.liveStreamCall?.leave(terminateIfPossible: true).startStandalone() + controller.dismiss(animated: true) + }) + ] + ) + controller.present(alertController, in: .window(.root)) + } } func setupLiveStreamCamera(call: PresentationGroupCall) { @@ -1913,11 +1940,11 @@ private final class CameraScreenComponent: CombinedComponent { } let availableModes: [CameraMode] - #if DEBUG + //#if DEBUG availableModes = [.photo, .video, .live] - #else - availableModes = [.photo, .video] - #endif + //#else + //availableModes = [.photo, .video] + //#endif let modeControl = modeControl.update( component: ModeComponent( diff --git a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoSource.swift b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoSource.swift index 2778dd2ae2..b09891869d 100644 --- a/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoSource.swift +++ b/submodules/TelegramUI/Components/CameraScreen/Sources/CameraVideoSource.swift @@ -110,7 +110,8 @@ final class LiveStreamMediaSource { kCVPixelBufferPoolMinimumBufferCountKey as String: 3 as NSNumber ] let pixelBufferOptions: [String: Any] = [ - kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA as NSNumber, + //kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA as NSNumber, + kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange as NSNumber, kCVPixelBufferWidthKey as String: UInt32(width), kCVPixelBufferHeightKey as String: UInt32(height) ] @@ -168,7 +169,8 @@ final class LiveStreamMediaSource { outputDimensions: CGSize(width: 720.0, height: 1280.0), textScale: 1.0, videoDuration: nil, - additionalVideoDuration: nil + additionalVideoDuration: nil, + outputsYuvBuffers: true ) self.mainVideoOutput = CameraVideoOutput(sink: { [weak self] buffer, mirror in diff --git a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift index 602656dc59..eafbe03602 100644 --- a/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift +++ b/submodules/TelegramUI/Components/MediaEditor/Sources/MediaEditorComposer.swift @@ -97,6 +97,9 @@ public final class MediaEditorComposer { private let outputDimensions: CGSize private let textScale: CGFloat + private let outputsYuvBuffers: Bool + private let yuvPixelFormat: OSType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + private let renderer = MediaEditorRenderer() private let renderChain = MediaEditorRenderChain() @@ -112,12 +115,14 @@ public final class MediaEditorComposer { outputDimensions: CGSize, textScale: CGFloat, videoDuration: Double?, - additionalVideoDuration: Double? + additionalVideoDuration: Double?, + outputsYuvBuffers: Bool = false ) { self.values = values self.dimensions = dimensions self.outputDimensions = outputDimensions self.textScale = textScale + self.outputsYuvBuffers = outputsYuvBuffers let colorSpace = CGColorSpaceCreateDeviceRGB() self.colorSpace = colorSpace @@ -161,7 +166,7 @@ public final class MediaEditorComposer { self.renderChain.update(values: self.values) self.renderer.videoFinishPass.update(values: self.values, videoDuration: videoDuration, additionalVideoDuration: additionalVideoDuration) } - + var previousAdditionalInput: [Int: Input] = [:] public func process(main: Input, additional: [Input?], timestamp: CMTime, pool: CVPixelBufferPool?, completion: @escaping (CVPixelBuffer?) -> Void) { guard let pool, let ciContext = self.ciContext else { @@ -185,26 +190,35 @@ public final class MediaEditorComposer { if let resultTexture = self.renderer.resultTexture, var ciImage = CIImage(mtlTexture: resultTexture, options: [.colorSpace: self.colorSpace]) { ciImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height)) - + var pixelBuffer: CVPixelBuffer? CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pool, &pixelBuffer) - if let pixelBuffer { + guard let pixelBuffer else { + completion(nil) + return + } + + if self.outputsYuvBuffers { + let scale = self.outputDimensions.width / ciImage.extent.width + ciImage = ciImage.samplingLinear().transformed(by: CGAffineTransform(scaleX: scale, y: scale)) + + ciContext.render(ciImage, to: pixelBuffer) + completion(pixelBuffer) + } else { makeEditorImageFrameComposition(context: ciContext, inputImage: ciImage, drawingImage: self.drawingImage, maskImage: self.maskImage, dimensions: self.dimensions, values: self.values, entities: self.entities, time: timestamp, completion: { compositedImage in if var compositedImage { let scale = self.outputDimensions.width / compositedImage.extent.width compositedImage = compositedImage.samplingLinear().transformed(by: CGAffineTransform(scaleX: scale, y: scale)) - self.ciContext?.render(compositedImage, to: pixelBuffer) + ciContext.render(compositedImage, to: pixelBuffer) completion(pixelBuffer) } else { completion(nil) } }) - return } } - completion(nil) } private var cachedTextures: [Int: MTLTexture] = [:] diff --git a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift index b62e30f57c..a54b67f4e0 100644 --- a/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift +++ b/submodules/TelegramUI/Components/MessageInputPanelComponent/Sources/MessageInputPanelComponent.swift @@ -1032,7 +1032,7 @@ public final class MessageInputPanelComponent: Component { strings: component.strings, chatPeerId: component.chatLocation?.peerId ?? component.context.account.peerId, inlineActions: inlineActions, - leftAction: ChatTextInputPanelComponent.LeftAction(kind: .toggleExpanded(isVisible: component.liveChatState?.isEnabled == true, isExpanded: component.liveChatState?.isExpanded ?? true && component.liveChatState?.isEmpty == false, hasUnseen: component.liveChatState?.hasUnseenMessages ?? false), action: { [weak self] in + leftAction: ChatTextInputPanelComponent.LeftAction(kind: .toggleExpanded(isVisible: component.liveChatState?.isEnabled == true, isExpanded: component.liveChatState?.isExpanded ?? true && component.liveChatState?.isEmpty == false, hasUnseen: component.liveChatState?.hasUnseenMessages ?? false), action: { [weak self] in guard let self, let component = self.component else { return }