[WIP] Video chat UI

This commit is contained in:
Isaac 2024-09-10 19:33:13 +08:00
parent c6cf576694
commit ca5b6c0f0b
12 changed files with 85 additions and 304 deletions

View File

@ -1370,7 +1370,7 @@ public class BrowserScreen: ViewController, MinimizableController {
inputHeight: layout.inputHeight ?? 0.0,
metrics: layout.metrics,
deviceMetrics: layout.deviceMetrics,
orientation: nil,
orientation: layout.metrics.orientation,
isVisible: true,
theme: self.presentationData.theme,
strings: self.presentationData.strings,

View File

@ -65,7 +65,7 @@ open class ViewControllerComponentContainer: ViewController {
inputHeight: CGFloat,
metrics: LayoutMetrics,
deviceMetrics: DeviceMetrics,
orientation: UIInterfaceOrientation? = nil,
orientation: UIInterfaceOrientation?,
isVisible: Bool,
theme: PresentationTheme,
strings: PresentationStrings,
@ -177,6 +177,7 @@ open class ViewControllerComponentContainer: ViewController {
inputHeight: layout.inputHeight ?? 0.0,
metrics: layout.metrics,
deviceMetrics: layout.deviceMetrics,
orientation: layout.metrics.orientation,
isVisible: self.currentIsVisible,
theme: self.resolvedTheme,
strings: self.presentationData.strings,

View File

@ -481,6 +481,7 @@ public class ReplaceBoostScreen: ViewController {
inputHeight: layout.inputHeight ?? 0.0,
metrics: layout.metrics,
deviceMetrics: layout.deviceMetrics,
orientation: layout.metrics.orientation,
isVisible: self.currentIsVisible,
theme: self.presentationData.theme,
strings: self.presentationData.strings,

View File

@ -335,7 +335,7 @@ final class MediaStreamVideoComponent: Component {
stallTimer = _stallTimer
self.clipsToBounds = component.isFullscreen // or just true
if let videoView = self.videoRenderingContext.makeView(input: input, forceSampleBufferDisplayLayer: true) {
if let videoView = self.videoRenderingContext.makeView(input: input, blur: false, forceSampleBufferDisplayLayer: true) {
self.videoView = videoView
self.addSubview(videoView)
videoView.alpha = 0

View File

@ -288,20 +288,26 @@ final class VideoChatParticipantThumbnailComponent: Component {
var rotatedVideoResolution = videoResolution
var rotatedVideoFrame = videoFrame
var rotatedBlurredVideoFrame = blurredVideoFrame
var rotatedVideoBoundsSize = videoFrame.size
var rotatedBlurredVideoBoundsSize = blurredVideoFrame.size
if videoIsRotated {
rotatedVideoResolution = CGSize(width: rotatedVideoResolution.height, height: rotatedVideoResolution.width)
rotatedVideoBoundsSize = CGSize(width: rotatedVideoBoundsSize.height, height: rotatedVideoBoundsSize.width)
rotatedVideoFrame = rotatedVideoFrame.size.centered(around: rotatedVideoFrame.center)
rotatedBlurredVideoBoundsSize = CGSize(width: rotatedBlurredVideoBoundsSize.height, height: rotatedBlurredVideoBoundsSize.width)
rotatedBlurredVideoFrame = rotatedBlurredVideoFrame.size.centered(around: rotatedBlurredVideoFrame.center)
}
rotatedVideoResolution = rotatedVideoResolution.aspectFittedOrSmaller(CGSize(width: rotatedVideoFrame.width * UIScreenScale, height: rotatedVideoFrame.height * UIScreenScale))
transition.setPosition(layer: videoLayer, position: rotatedVideoFrame.center)
transition.setBounds(layer: videoLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoFrame.size))
transition.setBounds(layer: videoLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoBoundsSize))
transition.setTransform(layer: videoLayer, transform: CATransform3DMakeRotation(CGFloat(videoSpec.rotationAngle), 0.0, 0.0, 1.0))
videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)), edgeInset: 2)
transition.setPosition(layer: videoLayer.blurredLayer, position: rotatedBlurredVideoFrame.center)
transition.setBounds(layer: videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: rotatedBlurredVideoFrame.size))
transition.setBounds(layer: videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: rotatedBlurredVideoBoundsSize))
transition.setTransform(layer: videoLayer.blurredLayer, transform: CATransform3DMakeRotation(CGFloat(videoSpec.rotationAngle), 0.0, 0.0, 1.0))
}
} else {

View File

@ -3,24 +3,25 @@ import UIKit
import Display
import ComponentFlow
import TelegramPresentationData
import TelegramCore
final class VideoChatParticipantStatusComponent: Component {
let isMuted: Bool
let muteState: GroupCallParticipantsContext.Participant.MuteState?
let isSpeaking: Bool
let theme: PresentationTheme
init(
isMuted: Bool,
muteState: GroupCallParticipantsContext.Participant.MuteState?,
isSpeaking: Bool,
theme: PresentationTheme
) {
self.isMuted = isMuted
self.muteState = muteState
self.isSpeaking = isSpeaking
self.theme = theme
}
static func ==(lhs: VideoChatParticipantStatusComponent, rhs: VideoChatParticipantStatusComponent) -> Bool {
if lhs.isMuted != rhs.isMuted {
if lhs.muteState != rhs.muteState {
return false
}
if lhs.isSpeaking != rhs.isSpeaking {
@ -61,7 +62,7 @@ final class VideoChatParticipantStatusComponent: Component {
transition: transition,
component: AnyComponent(VideoChatMuteIconComponent(
color: .white,
content: .mute(isFilled: false, isMuted: component.isMuted && !component.isSpeaking)
content: .mute(isFilled: false, isMuted: component.muteState != nil && !component.isSpeaking)
)),
environment: {},
containerSize: CGSize(width: 36.0, height: 36.0)
@ -80,7 +81,22 @@ final class VideoChatParticipantStatusComponent: Component {
tintTransition = .immediate
}
if let iconView = muteStatusView.iconView {
tintTransition.setTintColor(layer: iconView.layer, color: component.isSpeaking ? UIColor(rgb: 0x33C758) : UIColor(white: 1.0, alpha: 0.4))
let iconTintColor: UIColor
if component.isSpeaking {
iconTintColor = UIColor(rgb: 0x33C758)
} else {
if let muteState = component.muteState {
if muteState.canUnmute {
iconTintColor = UIColor(white: 1.0, alpha: 0.4)
} else {
iconTintColor = UIColor(rgb: 0xFF3B30)
}
} else {
iconTintColor = UIColor(white: 1.0, alpha: 0.4)
}
}
tintTransition.setTintColor(layer: iconView.layer, color: iconTintColor)
}
}

View File

@ -43,6 +43,7 @@ final class VideoChatParticipantVideoComponent: Component {
let isUIHidden: Bool
let contentInsets: UIEdgeInsets
let controlInsets: UIEdgeInsets
let interfaceOrientation: UIInterfaceOrientation
weak var rootVideoLoadingEffectView: VideoChatVideoLoadingEffectView?
let action: (() -> Void)?
@ -55,6 +56,7 @@ final class VideoChatParticipantVideoComponent: Component {
isUIHidden: Bool,
contentInsets: UIEdgeInsets,
controlInsets: UIEdgeInsets,
interfaceOrientation: UIInterfaceOrientation,
rootVideoLoadingEffectView: VideoChatVideoLoadingEffectView?,
action: (() -> Void)?
) {
@ -66,6 +68,7 @@ final class VideoChatParticipantVideoComponent: Component {
self.isUIHidden = isUIHidden
self.contentInsets = contentInsets
self.controlInsets = controlInsets
self.interfaceOrientation = interfaceOrientation
self.rootVideoLoadingEffectView = rootVideoLoadingEffectView
self.action = action
}
@ -92,6 +95,9 @@ final class VideoChatParticipantVideoComponent: Component {
if lhs.controlInsets != rhs.controlInsets {
return false
}
if lhs.interfaceOrientation != rhs.interfaceOrientation {
return false
}
if (lhs.action == nil) != (rhs.action == nil) {
return false
}
@ -101,10 +107,12 @@ final class VideoChatParticipantVideoComponent: Component {
private struct VideoSpec: Equatable {
var resolution: CGSize
var rotationAngle: Float
var followsDeviceOrientation: Bool
init(resolution: CGSize, rotationAngle: Float) {
init(resolution: CGSize, rotationAngle: Float, followsDeviceOrientation: Bool) {
self.resolution = resolution
self.rotationAngle = rotationAngle
self.followsDeviceOrientation = followsDeviceOrientation
}
}
@ -320,7 +328,7 @@ final class VideoChatParticipantVideoComponent: Component {
videoLayer.video = videoOutput
if let videoOutput {
let videoSpec = VideoSpec(resolution: videoOutput.resolution, rotationAngle: videoOutput.rotationAngle)
let videoSpec = VideoSpec(resolution: videoOutput.resolution, rotationAngle: videoOutput.rotationAngle, followsDeviceOrientation: videoOutput.followsDeviceOrientation)
if self.videoSpec != videoSpec {
self.videoSpec = videoSpec
if !self.isUpdating {
@ -335,69 +343,6 @@ final class VideoChatParticipantVideoComponent: Component {
}
}
}
/*var notifyOrientationUpdated = false
var notifyIsMirroredUpdated = false
if !self.didReportFirstFrame {
notifyOrientationUpdated = true
notifyIsMirroredUpdated = true
}
if let currentOutput = videoOutput {
let currentAspect: CGFloat
if currentOutput.resolution.height > 0.0 {
currentAspect = currentOutput.resolution.width / currentOutput.resolution.height
} else {
currentAspect = 1.0
}
if self.currentAspect != currentAspect {
self.currentAspect = currentAspect
notifyOrientationUpdated = true
}
let currentOrientation: PresentationCallVideoView.Orientation
if currentOutput.followsDeviceOrientation {
currentOrientation = .rotation0
} else {
if abs(currentOutput.rotationAngle - 0.0) < .ulpOfOne {
currentOrientation = .rotation0
} else if abs(currentOutput.rotationAngle - Float.pi * 0.5) < .ulpOfOne {
currentOrientation = .rotation90
} else if abs(currentOutput.rotationAngle - Float.pi) < .ulpOfOne {
currentOrientation = .rotation180
} else if abs(currentOutput.rotationAngle - Float.pi * 3.0 / 2.0) < .ulpOfOne {
currentOrientation = .rotation270
} else {
currentOrientation = .rotation0
}
}
if self.currentOrientation != currentOrientation {
self.currentOrientation = currentOrientation
notifyOrientationUpdated = true
}
let currentIsMirrored = !currentOutput.mirrorDirection.isEmpty
if self.currentIsMirrored != currentIsMirrored {
self.currentIsMirrored = currentIsMirrored
notifyIsMirroredUpdated = true
}
}
if !self.didReportFirstFrame {
self.didReportFirstFrame = true
self.onFirstFrameReceived?(Float(self.currentAspect))
}
if notifyOrientationUpdated {
self.onOrientationUpdated?(self.currentOrientation, self.currentAspect)
}
if notifyIsMirroredUpdated {
self.onIsMirroredUpdated?(self.currentIsMirrored)
}*/
}
}
}
@ -407,9 +352,11 @@ final class VideoChatParticipantVideoComponent: Component {
if let videoSpec = self.videoSpec {
videoBackgroundLayer.isHidden = false
let rotationAngle = resolveCallVideoRotationAngle(angle: videoSpec.rotationAngle, followsDeviceOrientation: videoSpec.followsDeviceOrientation, interfaceOrientation: component.interfaceOrientation)
var rotatedResolution = videoSpec.resolution
var videoIsRotated = false
if abs(videoSpec.rotationAngle - Float.pi * 0.5) < .ulpOfOne || abs(videoSpec.rotationAngle - Float.pi * 3.0 / 2.0) < .ulpOfOne {
if abs(rotationAngle - Float.pi * 0.5) < .ulpOfOne || abs(rotationAngle - Float.pi * 3.0 / 2.0) < .ulpOfOne {
videoIsRotated = true
}
if videoIsRotated {
@ -426,22 +373,26 @@ final class VideoChatParticipantVideoComponent: Component {
var rotatedVideoResolution = videoResolution
var rotatedVideoFrame = videoFrame
var rotatedBlurredVideoFrame = blurredVideoFrame
var rotatedVideoBoundsSize = videoFrame.size
var rotatedBlurredVideoBoundsSize = blurredVideoFrame.size
if videoIsRotated {
rotatedVideoResolution = CGSize(width: rotatedVideoResolution.height, height: rotatedVideoResolution.width)
rotatedVideoBoundsSize = CGSize(width: rotatedVideoBoundsSize.height, height: rotatedVideoBoundsSize.width)
rotatedVideoFrame = rotatedVideoFrame.size.centered(around: rotatedVideoFrame.center)
rotatedBlurredVideoBoundsSize = CGSize(width: rotatedBlurredVideoBoundsSize.height, height: rotatedBlurredVideoBoundsSize.width)
rotatedBlurredVideoFrame = rotatedBlurredVideoFrame.size.centered(around: rotatedBlurredVideoFrame.center)
}
rotatedVideoResolution = rotatedVideoResolution.aspectFittedOrSmaller(CGSize(width: rotatedVideoFrame.width * UIScreenScale, height: rotatedVideoFrame.height * UIScreenScale))
transition.setPosition(layer: videoLayer, position: rotatedVideoFrame.center)
transition.setBounds(layer: videoLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoFrame.size))
transition.setTransform(layer: videoLayer, transform: CATransform3DMakeRotation(CGFloat(videoSpec.rotationAngle), 0.0, 0.0, 1.0))
transition.setBounds(layer: videoLayer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoBoundsSize))
transition.setTransform(layer: videoLayer, transform: CATransform3DMakeRotation(CGFloat(rotationAngle), 0.0, 0.0, 1.0))
videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)), edgeInset: 2)
transition.setPosition(layer: videoLayer.blurredLayer, position: rotatedBlurredVideoFrame.center)
transition.setBounds(layer: videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: rotatedBlurredVideoFrame.size))
transition.setTransform(layer: videoLayer.blurredLayer, transform: CATransform3DMakeRotation(CGFloat(videoSpec.rotationAngle), 0.0, 0.0, 1.0))
transition.setBounds(layer: videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: rotatedBlurredVideoBoundsSize))
transition.setTransform(layer: videoLayer.blurredLayer, transform: CATransform3DMakeRotation(CGFloat(rotationAngle), 0.0, 0.0, 1.0))
}
} else {
if let videoBackgroundLayer = self.videoBackgroundLayer {

View File

@ -114,6 +114,7 @@ final class VideoChatParticipantsComponent: Component {
let layout: Layout
let expandedInsets: UIEdgeInsets
let safeInsets: UIEdgeInsets
let interfaceOrientation: UIInterfaceOrientation
let openParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
let updateMainParticipant: (VideoParticipantKey?) -> Void
let updateIsMainParticipantPinned: (Bool) -> Void
@ -129,6 +130,7 @@ final class VideoChatParticipantsComponent: Component {
layout: Layout,
expandedInsets: UIEdgeInsets,
safeInsets: UIEdgeInsets,
interfaceOrientation: UIInterfaceOrientation,
openParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
updateMainParticipant: @escaping (VideoParticipantKey?) -> Void,
updateIsMainParticipantPinned: @escaping (Bool) -> Void,
@ -143,6 +145,7 @@ final class VideoChatParticipantsComponent: Component {
self.layout = layout
self.expandedInsets = expandedInsets
self.safeInsets = safeInsets
self.interfaceOrientation = interfaceOrientation
self.openParticipantContextMenu = openParticipantContextMenu
self.updateMainParticipant = updateMainParticipant
self.updateIsMainParticipantPinned = updateIsMainParticipantPinned
@ -174,6 +177,9 @@ final class VideoChatParticipantsComponent: Component {
if lhs.safeInsets != rhs.safeInsets {
return false
}
if lhs.interfaceOrientation != rhs.interfaceOrientation {
return false
}
return true
}
@ -857,6 +863,7 @@ final class VideoChatParticipantsComponent: Component {
isUIHidden: isItemUIHidden,
contentInsets: itemContentInsets,
controlInsets: itemControlInsets,
interfaceOrientation: component.interfaceOrientation,
rootVideoLoadingEffectView: self.rootVideoLoadingEffectView,
action: { [weak self] in
guard let self, let component = self.component else {
@ -991,7 +998,7 @@ final class VideoChatParticipantsComponent: Component {
}
let rightAccessoryComponent: AnyComponent<Empty> = AnyComponent(VideoChatParticipantStatusComponent(
isMuted: participant.muteState != nil,
muteState: participant.muteState,
isSpeaking: component.speakingParticipants.contains(participant.peer.id),
theme: component.theme
))

View File

@ -1545,7 +1545,7 @@ private final class VideoChatScreenComponent: Component {
var isFrontCamera = true
let videoCapturer = OngoingCallVideoCapturer()
let input = videoCapturer.video()
if let videoView = self.videoRenderingContext.makeView(input: input) {
if let videoView = self.videoRenderingContext.makeView(input: input, blur: false) {
videoView.updateIsEnabled(true)
let cameraNode = GroupVideoNode(videoView: videoView, backdropVideoView: nil)
@ -2143,6 +2143,7 @@ private final class VideoChatScreenComponent: Component {
layout: participantsLayout,
expandedInsets: participantsExpandedInsets,
safeInsets: participantsSafeInsets,
interfaceOrientation: environment.orientation ?? .portrait,
openParticipantContextMenu: { [weak self] id, sourceView, gesture in
guard let self else {
return

View File

@ -6,8 +6,6 @@ import SwiftSignalKit
import AccountContext
import TelegramVoip
import AVFoundation
import CallScreen
import MetalEngine
protocol VideoRenderingView: UIView {
func setOnFirstFrameReceived(_ f: @escaping (Float) -> Void)
@ -36,40 +34,30 @@ class VideoRenderingContext {
}
#endif
func makeView(input: Signal<OngoingGroupCallContext.VideoFrameData, NoError>, forceSampleBufferDisplayLayer: Bool = false) -> VideoRenderingView? {
if !forceSampleBufferDisplayLayer {
return CallScreenVideoView(input: input)
}
func makeView(input: Signal<OngoingGroupCallContext.VideoFrameData, NoError>, blur: Bool, forceSampleBufferDisplayLayer: Bool = false) -> VideoRenderingView? {
#if targetEnvironment(simulator)
if blur {
#if DEBUG
return SampleBufferVideoRenderingView(input: input)
#else
return nil
#endif
}
return SampleBufferVideoRenderingView(input: input)
#else
if #available(iOS 13.0, *), !forceSampleBufferDisplayLayer {
return MetalVideoRenderingView(renderingContext: self.metalContext, input: input, blur: false)
return MetalVideoRenderingView(renderingContext: self.metalContext, input: input, blur: blur)
} else {
if blur {
return nil
}
return SampleBufferVideoRenderingView(input: input)
}
#endif
}
func makeBlurView(input: Signal<OngoingGroupCallContext.VideoFrameData, NoError>, mainView: VideoRenderingView?, forceSampleBufferDisplayLayer: Bool = false) -> VideoRenderingView? {
if let mainView = mainView as? CallScreenVideoView {
return CallScreenVideoBlurView(mainView: mainView)
}
#if targetEnvironment(simulator)
#if DEBUG
return SampleBufferVideoRenderingView(input: input)
#else
return nil
#endif
#else
if #available(iOS 13.0, *), !forceSampleBufferDisplayLayer {
return MetalVideoRenderingView(renderingContext: self.metalContext, input: input, blur: true)
} else {
return nil
}
#endif
return self.makeView(input: input, blur: true, forceSampleBufferDisplayLayer: forceSampleBufferDisplayLayer)
}
func updateVisibility(isVisible: Bool) {
@ -96,194 +84,3 @@ extension PresentationCallVideoView.Orientation {
}
}
}
private final class CallScreenVideoView: UIView, VideoRenderingView {
private var isEnabled: Bool = false
private var onFirstFrameReceived: ((Float) -> Void)?
private var onOrientationUpdated: ((PresentationCallVideoView.Orientation, CGFloat) -> Void)?
private var onIsMirroredUpdated: ((Bool) -> Void)?
private var didReportFirstFrame: Bool = false
private var currentIsMirrored: Bool = false
private var currentOrientation: PresentationCallVideoView.Orientation = .rotation0
private var currentAspect: CGFloat = 1.0
fileprivate let videoSource: AdaptedCallVideoSource
private var disposable: Disposable?
fileprivate let videoLayer: PrivateCallVideoLayer
init(input: Signal<OngoingGroupCallContext.VideoFrameData, NoError>) {
self.videoLayer = PrivateCallVideoLayer()
self.videoLayer.masksToBounds = true
self.videoSource = AdaptedCallVideoSource(videoStreamSignal: input)
super.init(frame: CGRect())
self.layer.addSublayer(self.videoLayer)
self.disposable = self.videoSource.addOnUpdated { [weak self] in
guard let self else {
return
}
let videoOutput = self.videoSource.currentOutput
self.videoLayer.video = videoOutput
var notifyOrientationUpdated = false
var notifyIsMirroredUpdated = false
if !self.didReportFirstFrame {
notifyOrientationUpdated = true
notifyIsMirroredUpdated = true
}
if let currentOutput = videoOutput {
let currentAspect: CGFloat
if currentOutput.resolution.height > 0.0 {
currentAspect = currentOutput.resolution.width / currentOutput.resolution.height
} else {
currentAspect = 1.0
}
if self.currentAspect != currentAspect {
self.currentAspect = currentAspect
notifyOrientationUpdated = true
}
let currentOrientation: PresentationCallVideoView.Orientation
if currentOutput.followsDeviceOrientation {
currentOrientation = .rotation0
} else {
if abs(currentOutput.rotationAngle - 0.0) < .ulpOfOne {
currentOrientation = .rotation0
} else if abs(currentOutput.rotationAngle - Float.pi * 0.5) < .ulpOfOne {
currentOrientation = .rotation90
} else if abs(currentOutput.rotationAngle - Float.pi) < .ulpOfOne {
currentOrientation = .rotation180
} else if abs(currentOutput.rotationAngle - Float.pi * 3.0 / 2.0) < .ulpOfOne {
currentOrientation = .rotation270
} else {
currentOrientation = .rotation0
}
}
if self.currentOrientation != currentOrientation {
self.currentOrientation = currentOrientation
notifyOrientationUpdated = true
}
let currentIsMirrored = !currentOutput.mirrorDirection.isEmpty
if self.currentIsMirrored != currentIsMirrored {
self.currentIsMirrored = currentIsMirrored
notifyIsMirroredUpdated = true
}
}
if !self.didReportFirstFrame {
self.didReportFirstFrame = true
self.onFirstFrameReceived?(Float(self.currentAspect))
}
if notifyOrientationUpdated {
self.onOrientationUpdated?(self.currentOrientation, self.currentAspect)
}
if notifyIsMirroredUpdated {
self.onIsMirroredUpdated?(self.currentIsMirrored)
}
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.disposable?.dispose()
}
func setOnFirstFrameReceived(_ f: @escaping (Float) -> Void) {
self.onFirstFrameReceived = f
self.didReportFirstFrame = false
}
func setOnOrientationUpdated(_ f: @escaping (PresentationCallVideoView.Orientation, CGFloat) -> Void) {
self.onOrientationUpdated = f
}
func getOrientation() -> PresentationCallVideoView.Orientation {
return self.currentOrientation
}
func getAspect() -> CGFloat {
return self.currentAspect
}
func setOnIsMirroredUpdated(_ f: @escaping (Bool) -> Void) {
self.onIsMirroredUpdated = f
}
func updateIsEnabled(_ isEnabled: Bool) {
self.isEnabled = isEnabled
}
func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
if let currentOutput = self.videoSource.currentOutput {
let rotatedResolution = currentOutput.resolution
let videoSize = size
let videoResolution = rotatedResolution.aspectFittedOrSmaller(CGSize(width: 1280, height: 1280)).aspectFittedOrSmaller(CGSize(width: videoSize.width * 3.0, height: videoSize.height * 3.0))
let rotatedVideoResolution = videoResolution
transition.updateFrame(layer: self.videoLayer, frame: CGRect(origin: CGPoint(), size: size))
self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)), edgeInset: 2)
}
}
}
private final class CallScreenVideoBlurView: UIView, VideoRenderingView {
private weak var mainView: CallScreenVideoView?
private let blurredLayer: MetalEngineSubjectLayer
init(mainView: CallScreenVideoView) {
self.mainView = mainView
self.blurredLayer = mainView.videoLayer.blurredLayer
super.init(frame: CGRect())
self.layer.addSublayer(self.blurredLayer)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
}
func setOnFirstFrameReceived(_ f: @escaping (Float) -> Void) {
}
func setOnOrientationUpdated(_ f: @escaping (PresentationCallVideoView.Orientation, CGFloat) -> Void) {
}
func getOrientation() -> PresentationCallVideoView.Orientation {
return .rotation0
}
func getAspect() -> CGFloat {
return 1.0
}
func setOnIsMirroredUpdated(_ f: @escaping (Bool) -> Void) {
}
func updateIsEnabled(_ isEnabled: Bool) {
}
func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
transition.updateFrame(layer: self.blurredLayer, frame: CGRect(origin: CGPoint(), size: size))
}
}

View File

@ -2418,7 +2418,7 @@ final class VoiceChatControllerImpl: ViewController, VoiceChatController {
}
} else {
if let input = (strongSelf.call as! PresentationGroupCallImpl).video(endpointId: endpointId) {
if let videoView = strongSelf.videoRenderingContext.makeView(input: input) {
if let videoView = strongSelf.videoRenderingContext.makeView(input: input, blur: false) {
completion(GroupVideoNode(videoView: videoView, backdropVideoView: strongSelf.videoRenderingContext.makeBlurView(input: input, mainView: videoView)))
}
}
@ -3738,7 +3738,7 @@ final class VoiceChatControllerImpl: ViewController, VoiceChatController {
var isFrontCamera = true
let videoCapturer = OngoingCallVideoCapturer()
let input = videoCapturer.video()
if let videoView = strongSelf.videoRenderingContext.makeView(input: input) {
if let videoView = strongSelf.videoRenderingContext.makeView(input: input, blur: false) {
videoView.updateIsEnabled(true)
let cameraNode = GroupVideoNode(videoView: videoView, backdropVideoView: nil)
@ -5514,7 +5514,7 @@ final class VoiceChatControllerImpl: ViewController, VoiceChatController {
self.requestedVideoSources.insert(channel.endpointId)
let input = (self.call as! PresentationGroupCallImpl).video(endpointId: channel.endpointId)
if let input = input, let videoView = self.videoRenderingContext.makeView(input: input) {
if let input = input, let videoView = self.videoRenderingContext.makeView(input: input, blur: false) {
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: self.videoRenderingContext.makeBlurView(input: input, mainView: videoView))
self.readyVideoDisposables.set((combineLatest(videoNode.ready, .single(false) |> then(.single(true) |> delay(10.0, queue: Queue.mainQueue())))

View File

@ -730,6 +730,7 @@ public class TranslateScreen: ViewController {
inputHeight: layout.inputHeight ?? 0.0,
metrics: layout.metrics,
deviceMetrics: layout.deviceMetrics,
orientation: layout.metrics.orientation,
isVisible: self.currentIsVisible,
theme: self.theme ?? self.presentationData.theme,
strings: self.presentationData.strings,