[WIP] Call UI

This commit is contained in:
Isaac 2023-12-05 00:48:43 +04:00
parent e08c3402b5
commit a189174b54
18 changed files with 629 additions and 76 deletions

View File

@ -31,7 +31,8 @@ public final class ViewController: UIViewController {
audioOutput: .internalSpeaker,
isMicrophoneMuted: false,
localVideo: nil,
remoteVideo: nil
remoteVideo: nil,
isRemoteBatteryLow: false
)
private var currentLayout: (size: CGSize, insets: UIEdgeInsets)?
@ -143,6 +144,13 @@ public final class ViewController: UIViewController {
self.callState.localVideo = nil
self.update(transition: .spring(duration: 0.4))
}
callScreenView.backAction = { [weak self] in
guard let self else {
return
}
self.callState.isMicrophoneMuted = !self.callState.isMicrophoneMuted
self.update(transition: .spring(duration: 0.4))
}
}
private func update(transition: Transition) {

View File

@ -646,6 +646,7 @@ private final class VariableBlurView: UIVisualEffectView {
variableBlur.setValue(self.maxBlurRadius, forKey: "inputRadius")
variableBlur.setValue(gradientImageRef, forKey: "inputMaskImage")
variableBlur.setValue(true, forKey: "inputNormalizeEdges")
variableBlur.setValue(UIScreenScale, forKey: "scale")
let backdropLayer = self.subviews.first?.layer
backdropLayer?.filters = [variableBlur]

View File

@ -117,6 +117,12 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
}
self.endCall?()
}
self.callScreen.backAction = { [weak self] in
guard let self else {
return
}
self.back?()
}
self.callScreenState = PrivateCallScreen.State(
lifecycleState: .connecting,
@ -126,7 +132,8 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
audioOutput: .internalSpeaker,
isMicrophoneMuted: false,
localVideo: nil,
remoteVideo: nil
remoteVideo: nil,
isRemoteBatteryLow: false
)
if let peer = call.peer {
self.updatePeer(peer: peer)
@ -326,26 +333,32 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
mappedLifecycleState = .terminated(PrivateCallScreen.State.TerminatedState(duration: duration))
}
switch callState.remoteVideoState {
case .active, .paused:
if self.remoteVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: true) {
self.remoteVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
}
case .inactive:
self.remoteVideo = nil
}
switch callState.videoState {
case .active(let isScreencast), .paused(let isScreencast):
if isScreencast {
self.localVideo = nil
} else {
if self.localVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: false) {
self.localVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
}
}
case .inactive, .notAvailable:
switch callState.state {
case .terminating, .terminated:
self.localVideo = nil
self.remoteVideo = nil
default:
switch callState.videoState {
case .active(let isScreencast), .paused(let isScreencast):
if isScreencast {
self.localVideo = nil
} else {
if self.localVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: false) {
self.localVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
}
}
case .inactive, .notAvailable:
self.localVideo = nil
}
switch callState.remoteVideoState {
case .active, .paused:
if self.remoteVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: true) {
self.remoteVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
}
case .inactive:
self.remoteVideo = nil
}
}
if var callScreenState = self.callScreenState {
@ -353,6 +366,13 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
callScreenState.remoteVideo = self.remoteVideo
callScreenState.localVideo = self.localVideo
switch callState.remoteBatteryLevel {
case .low:
callScreenState.isRemoteBatteryLow = true
case .normal:
callScreenState.isRemoteBatteryLow = false
}
if self.callScreenState != callScreenState {
self.callScreenState = callScreenState
self.update(transition: .animated(duration: 0.35, curve: .spring))
@ -509,7 +529,7 @@ private final class AdaptedCallVideoSource: VideoSource {
}
let rotationAngle: Float
switch videoFrameData.orientation {
switch videoFrameData.deviceRelativeOrientation ?? videoFrameData.orientation {
case .rotation0:
rotationAngle = 0.0
case .rotation90:
@ -520,6 +540,47 @@ private final class AdaptedCallVideoSource: VideoSource {
rotationAngle = Float.pi * 3.0 / 2.0
}
var mirrorDirection: Output.MirrorDirection = []
var sourceId: Int = 0
if videoFrameData.mirrorHorizontally || videoFrameData.mirrorVertically {
sourceId = 1
}
if let deviceRelativeOrientation = videoFrameData.deviceRelativeOrientation, deviceRelativeOrientation != videoFrameData.orientation {
let shouldMirror = videoFrameData.mirrorHorizontally || videoFrameData.mirrorVertically
var mirrorHorizontally = false
var mirrorVertically = false
if shouldMirror {
switch deviceRelativeOrientation {
case .rotation0:
mirrorHorizontally = true
case .rotation90:
mirrorVertically = true
case .rotation180:
mirrorHorizontally = true
case .rotation270:
mirrorVertically = true
}
}
if mirrorHorizontally {
mirrorDirection.insert(.horizontal)
}
if mirrorVertically {
mirrorDirection.insert(.vertical)
}
} else {
if videoFrameData.mirrorHorizontally {
mirrorDirection.insert(.horizontal)
}
if videoFrameData.mirrorVertically {
mirrorDirection.insert(.vertical)
}
}
AdaptedCallVideoSource.queue.async { [weak self] in
let output: Output
switch videoFrameData.buffer {
@ -538,7 +599,14 @@ private final class AdaptedCallVideoSource: VideoSource {
return
}
output = Output(resolution: CGSize(width: CGFloat(yTexture.width), height: CGFloat(yTexture.height)), y: yTexture, uv: uvTexture, rotationAngle: rotationAngle, sourceId: videoFrameData.mirrorHorizontally || videoFrameData.mirrorVertically ? 1 : 0)
output = Output(
resolution: CGSize(width: CGFloat(yTexture.width), height: CGFloat(yTexture.height)),
y: yTexture,
uv: uvTexture,
rotationAngle: rotationAngle,
mirrorDirection: mirrorDirection,
sourceId: sourceId
)
default:
return
}

View File

@ -251,6 +251,7 @@ kernel void videoYUVToRGBA(
vertex QuadVertexOut mainVideoVertex(
const device Rectangle &rect [[ buffer(0) ]],
const device uint2 &mirror [[ buffer(1) ]],
unsigned int vid [[ vertex_id ]]
) {
float2 quadVertex = quadVertices[vid];
@ -262,6 +263,12 @@ vertex QuadVertexOut mainVideoVertex(
out.position.y = -1.0 + out.position.y * 2.0;
out.uv = float2(quadVertex.x, 1.0 - quadVertex.y);
if (mirror.x == 1) {
out.uv.x = 1.0 - out.uv.x;
}
if (mirror.y == 1) {
out.uv.y = 1.0 - out.uv.y;
}
return out;
}

View File

@ -0,0 +1,56 @@
import Foundation
import UIKit
import Display
final class BackButtonView: HighlightableButton {
private let iconView: UIImageView
private let textView: TextView
let size: CGSize
var pressAction: (() -> Void)?
init(text: String) {
self.iconView = UIImageView(image: NavigationBar.backArrowImage(color: .white))
self.iconView.isUserInteractionEnabled = false
self.textView = TextView()
self.textView.isUserInteractionEnabled = false
let spacing: CGFloat = 8.0
var iconSize: CGSize = self.iconView.image?.size ?? CGSize(width: 2.0, height: 2.0)
let iconScaleFactor: CGFloat = 0.9
iconSize.width = floor(iconSize.width * iconScaleFactor)
iconSize.height = floor(iconSize.height * iconScaleFactor)
let textSize = self.textView.update(string: text, fontSize: 17.0, fontWeight: UIFont.Weight.regular.rawValue, color: .white, constrainedWidth: 100.0, transition: .immediate)
self.size = CGSize(width: iconSize.width + spacing + textSize.width, height: textSize.height)
self.iconView.frame = CGRect(origin: CGPoint(x: 0.0, y: floorToScreenPixels((self.size.height - iconSize.height) * 0.5)), size: iconSize)
self.textView.frame = CGRect(origin: CGPoint(x: iconSize.width + spacing, y: floorToScreenPixels((self.size.height - textSize.height) * 0.5)), size: textSize)
super.init(frame: CGRect())
self.addSubview(self.iconView)
self.addSubview(self.textView)
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
@objc private func pressed() {
self.pressAction?()
}
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
if self.bounds.insetBy(dx: -8.0, dy: -4.0).contains(point) {
return super.hitTest(self.bounds.center, with: event)
} else {
return nil
}
}
}

View File

@ -46,9 +46,21 @@ final class ButtonGroupView: OverlayMaskContainerView {
}
}
final class Notice {
let id: AnyHashable
let text: String
init(id: AnyHashable, text: String) {
self.id = id
self.text = text
}
}
private var buttons: [Button]?
private var buttonViews: [Button.Content.Key: ContentOverlayButton] = [:]
private var noticeViews: [AnyHashable: NoticeView] = [:]
override init(frame: CGRect) {
super.init(frame: frame)
}
@ -67,20 +79,87 @@ final class ButtonGroupView: OverlayMaskContainerView {
return result
}
func update(size: CGSize, insets: UIEdgeInsets, controlsHidden: Bool, buttons: [Button], transition: Transition) -> CGFloat {
func update(size: CGSize, insets: UIEdgeInsets, controlsHidden: Bool, buttons: [Button], notices: [Notice], transition: Transition) -> CGFloat {
self.buttons = buttons
let buttonSize: CGFloat = 56.0
let buttonSpacing: CGFloat = 36.0
let buttonNoticeSpacing: CGFloat = 16.0
let controlsHiddenNoticeSpacing: CGFloat = 0.0
var nextNoticeY: CGFloat
if controlsHidden {
nextNoticeY = size.height - insets.bottom - 4.0
} else {
nextNoticeY = size.height - insets.bottom - 52.0 - buttonSize - buttonNoticeSpacing
}
let noticeSpacing: CGFloat = 8.0
var validNoticeIds: [AnyHashable] = []
var noticesHeight: CGFloat = 0.0
for notice in notices {
validNoticeIds.append(notice.id)
let noticeView: NoticeView
var noticeTransition = transition
var animateIn = false
if let current = self.noticeViews[notice.id] {
noticeView = current
} else {
noticeTransition = noticeTransition.withAnimation(.none)
animateIn = true
noticeView = NoticeView()
self.noticeViews[notice.id] = noticeView
self.addSubview(noticeView)
}
if noticesHeight != 0.0 {
noticesHeight += noticeSpacing
} else {
if controlsHidden {
noticesHeight += controlsHiddenNoticeSpacing
} else {
noticesHeight += buttonNoticeSpacing
}
}
let noticeSize = noticeView.update(text: notice.text, constrainedWidth: size.width - insets.left * 2.0 - 16.0 * 2.0, transition: noticeTransition)
let noticeFrame = CGRect(origin: CGPoint(x: floor((size.width - noticeSize.width) * 0.5), y: nextNoticeY - noticeSize.height), size: noticeSize)
noticesHeight += noticeSize.height
nextNoticeY -= noticeSize.height + noticeSpacing
noticeTransition.setFrame(view: noticeView, frame: noticeFrame)
if animateIn, !transition.animation.isImmediate {
noticeView.animateIn()
}
}
if noticesHeight != 0.0 {
noticesHeight += 5.0
}
var removedNoticeIds: [AnyHashable] = []
for (id, noticeView) in self.noticeViews {
if !validNoticeIds.contains(id) {
removedNoticeIds.append(id)
if !transition.animation.isImmediate {
noticeView.animateOut(completion: { [weak noticeView] in
noticeView?.removeFromSuperview()
})
} else {
noticeView.removeFromSuperview()
}
}
}
for id in removedNoticeIds {
self.noticeViews.removeValue(forKey: id)
}
let buttonY: CGFloat
let resultHeight: CGFloat
if controlsHidden {
buttonY = size.height + 12.0
resultHeight = insets.bottom + 4.0
resultHeight = insets.bottom + 4.0 + noticesHeight
} else {
buttonY = size.height - insets.bottom - 52.0 - buttonSize
resultHeight = size.height - buttonY
resultHeight = size.height - buttonY + noticesHeight
}
var buttonX: CGFloat = floor((size.width - buttonSize * CGFloat(buttons.count) - buttonSpacing * CGFloat(buttons.count - 1)) * 0.5)

View File

@ -2,16 +2,124 @@ import Foundation
import UIKit
import Display
final class EmojiTooltipView: UIView {
let size: CGSize
private func addRoundedRectPath(context: CGContext, rect: CGRect, radius: CGFloat) {
context.saveGState()
context.translateBy(x: rect.minX, y: rect.minY)
context.scaleBy(x: radius, y: radius)
let fw = rect.width / radius
let fh = rect.height / radius
context.move(to: CGPoint(x: fw, y: fh / 2.0))
context.addArc(tangent1End: CGPoint(x: fw, y: fh), tangent2End: CGPoint(x: fw/2, y: fh), radius: 1.0)
context.addArc(tangent1End: CGPoint(x: 0, y: fh), tangent2End: CGPoint(x: 0, y: fh/2), radius: 1)
context.addArc(tangent1End: CGPoint(x: 0, y: 0), tangent2End: CGPoint(x: fw/2, y: 0), radius: 1)
context.addArc(tangent1End: CGPoint(x: fw, y: 0), tangent2End: CGPoint(x: fw, y: fh/2), radius: 1)
context.closePath()
context.restoreGState()
}
final class EmojiTooltipView: OverlayMaskContainerView {
private struct Params: Equatable {
var constrainedWidth: CGFloat
var subjectWidth: CGFloat
init(constrainedWidth: CGFloat, subjectWidth: CGFloat) {
self.constrainedWidth = constrainedWidth
self.subjectWidth = subjectWidth
}
}
private struct Layout {
var params: Params
var size: CGSize
init(params: Params, size: CGSize) {
self.params = params
self.size = size
}
}
private let text: String
private let backgroundView: UIImageView
private let textView: TextView
private var currentLayout: Layout?
init(text: String) {
self.size = CGSize()
self.text = text
self.backgroundView = UIImageView()
self.textView = TextView()
super.init(frame: CGRect())
self.maskContents.addSubview(self.backgroundView)
self.addSubview(self.textView)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func animateIn() {
let anchorPoint = CGPoint(x: self.bounds.width - 46.0, y: 0.0)
self.layer.animateSpring(from: 0.001 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.5)
self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: anchorPoint.x - self.bounds.width * 0.5, y: anchorPoint.y - self.bounds.height * 0.5)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, additive: true)
}
func animateOut(completion: @escaping () -> Void) {
let anchorPoint = CGPoint(x: self.bounds.width - 46.0, y: 0.0)
self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
completion()
})
self.layer.animateScale(from: 1.0, to: 0.4, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false)
self.layer.animatePosition(from: CGPoint(), to: CGPoint(x: anchorPoint.x - self.bounds.width * 0.5, y: anchorPoint.y - self.bounds.height * 0.5), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true)
}
func update(constrainedWidth: CGFloat, subjectWidth: CGFloat) -> CGSize {
let params = Params(constrainedWidth: constrainedWidth, subjectWidth: subjectWidth)
if let currentLayout = self.currentLayout, currentLayout.params == params {
return currentLayout.size
}
let size = self.update(params: params)
self.currentLayout = Layout(params: params, size: size)
return size
}
private func update(params: Params) -> CGSize {
let horizontalInset: CGFloat = 12.0
let verticalInset: CGFloat = 10.0
let arrowHeight: CGFloat = 8.0
let textSize = self.textView.update(
string: self.text,
fontSize: 15.0,
fontWeight: 0.0,
color: .white,
constrainedWidth: params.constrainedWidth - horizontalInset * 2.0,
transition: .immediate
)
let size = CGSize(width: textSize.width + horizontalInset * 2.0, height: arrowHeight + textSize.height + verticalInset * 2.0)
self.textView.frame = CGRect(origin: CGPoint(x: horizontalInset, y: arrowHeight + verticalInset), size: textSize)
self.backgroundView.image = generateImage(size, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setFillColor(UIColor.white.cgColor)
addRoundedRectPath(context: context, rect: CGRect(origin: CGPoint(x: 0.0, y: arrowHeight), size: CGSize(width: size.width, height: size.height - arrowHeight)), radius: 14.0)
context.fillPath()
context.translateBy(x: size.width - floor(params.subjectWidth * 0.5) - 20.0, y: 0.0)
let _ = try? drawSvgPath(context, path: "M9.0981,1.1979 C9.547,0.6431 10.453,0.6431 10.9019,1.1979 C12.4041,3.0542 15.6848,6.5616 20,8 H-0.0002 C4.3151,6.5616 7.5959,3.0542 9.0981,1.1978 Z ")
})
self.backgroundView.frame = CGRect(origin: CGPoint(), size: size)
return size
}
}

View File

@ -0,0 +1,73 @@
import Foundation
import UIKit
import Display
import ComponentFlow
final class NoticeView: OverlayMaskContainerView {
private let backgroundView: RoundedCornersView
private let textContainer: UIView
private let textView: TextView
override init(frame: CGRect) {
self.backgroundView = RoundedCornersView(color: .white)
self.textContainer = UIView()
self.textContainer.clipsToBounds = true
self.textView = TextView()
super.init(frame: frame)
self.clipsToBounds = true
self.maskContents.addSubview(self.backgroundView)
self.textContainer.addSubview(self.textView)
self.addSubview(self.textContainer)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func animateIn() {
let delay: Double = 0.2
self.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
self.textView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: delay)
self.backgroundView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
self.backgroundView.layer.animateFrame(from: CGRect(origin: CGPoint(x: (self.bounds.width - self.bounds.height) * 0.5, y: 0.0), size: CGSize(width: self.bounds.height, height: self.bounds.height)), to: self.backgroundView.frame, duration: 0.5, delay: delay, timingFunction: kCAMediaTimingFunctionSpring)
self.textContainer.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, delay: delay)
self.textContainer.layer.cornerRadius = self.bounds.height * 0.5
self.textContainer.layer.animateFrame(from: CGRect(origin: CGPoint(x: (self.bounds.width - self.bounds.height) * 0.5, y: 0.0), size: CGSize(width: self.bounds.height, height: self.bounds.height)), to: self.textContainer.frame, duration: 0.5, delay: delay, timingFunction: kCAMediaTimingFunctionSpring, completion: { [weak self] completed in
guard let self, completed else {
return
}
self.textContainer.layer.cornerRadius = 0.0
})
}
func animateOut(completion: @escaping () -> Void) {
self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
completion()
})
self.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false)
}
func update(text: String, constrainedWidth: CGFloat, transition: Transition) -> CGSize {
let sideInset: CGFloat = 12.0
let verticalInset: CGFloat = 6.0
let textSize = self.textView.update(string: text, fontSize: 15.0, fontWeight: 0.0, color: .white, constrainedWidth: constrainedWidth - sideInset * 2.0, transition: .immediate)
let size = CGSize(width: textSize.width + sideInset * 2.0, height: textSize.height + verticalInset * 2.0)
transition.setFrame(view: self.backgroundView, frame: CGRect(origin: CGPoint(), size: size))
self.backgroundView.update(cornerRadius: floor(size.height * 0.5), transition: transition)
transition.setFrame(view: self.textContainer, frame: CGRect(origin: CGPoint(), size: size))
transition.setFrame(view: self.textView, frame: CGRect(origin: CGPoint(x: sideInset, y: verticalInset), size: textSize))
return size
}
}

View File

@ -5,38 +5,6 @@ import MetalPerformanceShaders
import Accelerate
import MetalEngine
func imageToCVPixelBuffer(image: UIImage) -> CVPixelBuffer? {
guard let cgImage = image.cgImage, let data = cgImage.dataProvider?.data, let bytes = CFDataGetBytePtr(data), let colorSpace = cgImage.colorSpace, case .rgb = colorSpace.model, cgImage.bitsPerPixel / cgImage.bitsPerComponent == 4 else {
return nil
}
let width = cgImage.width
let height = cgImage.width
var pixelBuffer: CVPixelBuffer? = nil
let _ = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, [
kCVPixelBufferIOSurfacePropertiesKey: NSDictionary()
] as CFDictionary, &pixelBuffer)
guard let pixelBuffer else {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
defer {
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
}
guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else {
return nil
}
var srcBuffer = vImage_Buffer(data: UnsafeMutableRawPointer(mutating: bytes), height: vImagePixelCount(height), width: vImagePixelCount(width), rowBytes: cgImage.bytesPerRow)
var dstBuffer = vImage_Buffer(data: UnsafeMutableRawPointer(mutating: baseAddress), height: vImagePixelCount(height), width: vImagePixelCount(width), rowBytes: CVPixelBufferGetBytesPerRow(pixelBuffer))
vImageCopyBuffer(&srcBuffer, &dstBuffer, 4, vImage_Flags(kvImageDoNotTile))
return pixelBuffer
}
final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
var internalData: MetalEngineSubjectInternalData?
@ -221,6 +189,13 @@ final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
var mirror = SIMD2<UInt32>(
videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
)
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
encoder.setFragmentTexture(blurredTexture, index: 0)
var brightness: Float = 1.0
@ -243,6 +218,13 @@ final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
var mirror = SIMD2<UInt32>(
videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
)
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
encoder.setFragmentTexture(rgbaTexture, index: 0)
var brightness: Float = 1.0

View File

@ -73,9 +73,11 @@ final class VideoContainerView: HighlightTrackingButton {
private final class FlipAnimationInfo {
let isForward: Bool
let previousRotationAngle: Float
init(isForward: Bool) {
init(isForward: Bool, previousRotationAngle: Float) {
self.isForward = isForward
self.previousRotationAngle = previousRotationAngle
}
}
@ -139,7 +141,7 @@ final class VideoContainerView: HighlightTrackingButton {
var videoMetrics: VideoMetrics?
if let currentOutput = self.video?.currentOutput {
if let previousVideo = self.videoLayer.video, previousVideo.sourceId != currentOutput.sourceId {
self.initiateVideoSourceSwitch(flipAnimationInfo: FlipAnimationInfo(isForward: previousVideo.sourceId < currentOutput.sourceId))
self.initiateVideoSourceSwitch(flipAnimationInfo: FlipAnimationInfo(isForward: previousVideo.sourceId < currentOutput.sourceId, previousRotationAngle: previousVideo.rotationAngle))
}
self.videoLayer.video = currentOutput
@ -564,13 +566,29 @@ final class VideoContainerView: HighlightTrackingButton {
if let flipAnimationInfo = disappearingVideoLayer.flipAnimationInfo {
var videoTransform = self.videoContainerLayer.transform
videoTransform = CATransform3DRotate(videoTransform, (flipAnimationInfo.isForward ? 1.0 : -1.0) * CGFloat.pi * 0.9999, 0.0, 1.0, 0.0)
var axis: (x: CGFloat, y: CGFloat, z: CGFloat) = (0.0, 0.0, 0.0)
let previousVideoScale: CGPoint
if flipAnimationInfo.previousRotationAngle == Float.pi * 0.5 {
axis.x = -1.0
previousVideoScale = CGPoint(x: 1.0, y: -1.0)
} else if flipAnimationInfo.previousRotationAngle == Float.pi {
axis.y = -1.0
previousVideoScale = CGPoint(x: -1.0, y: -1.0)
} else if flipAnimationInfo.previousRotationAngle == Float.pi * 3.0 / 2.0 {
axis.x = 1.0
previousVideoScale = CGPoint(x: 1.0, y: 1.0)
} else {
axis.y = 1.0
previousVideoScale = CGPoint(x: -1.0, y: 1.0)
}
videoTransform = CATransform3DRotate(videoTransform, (flipAnimationInfo.isForward ? 1.0 : -1.0) * CGFloat.pi * 0.9999, axis.x, axis.y, axis.z)
self.videoContainerLayer.transform = videoTransform
disappearingVideoLayer.videoLayer.zPosition = 1.0
transition.setZPosition(layer: disappearingVideoLayer.videoLayer, zPosition: -1.0)
disappearingVideoLayer.videoLayer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0)
disappearingVideoLayer.videoLayer.transform = CATransform3DMakeScale(previousVideoScale.x, previousVideoScale.y, 1.0)
animateFlipDisappearingVideo = disappearingVideoLayer
disappearingVideoLayer.videoLayer.blurredLayer.removeFromSuperlayer()

View File

@ -5,17 +5,30 @@ import Display
import SwiftSignalKit
public final class VideoSourceOutput {
public struct MirrorDirection: OptionSet {
public var rawValue: Int32
public init(rawValue: Int32) {
self.rawValue = rawValue
}
public static let horizontal = MirrorDirection(rawValue: 1 << 0)
public static let vertical = MirrorDirection(rawValue: 1 << 1)
}
public let resolution: CGSize
public let y: MTLTexture
public let uv: MTLTexture
public let rotationAngle: Float
public let mirrorDirection: MirrorDirection
public let sourceId: Int
public init(resolution: CGSize, y: MTLTexture, uv: MTLTexture, rotationAngle: Float, sourceId: Int) {
public init(resolution: CGSize, y: MTLTexture, uv: MTLTexture, rotationAngle: Float, mirrorDirection: MirrorDirection, sourceId: Int) {
self.resolution = resolution
self.y = y
self.uv = uv
self.rotationAngle = rotationAngle
self.mirrorDirection = mirrorDirection
self.sourceId = sourceId
}
}
@ -148,7 +161,7 @@ public final class FileVideoSource: VideoSource {
resolution.width = floor(resolution.width * self.sizeMultiplicator.x)
resolution.height = floor(resolution.height * self.sizeMultiplicator.y)
self.currentOutput = Output(resolution: resolution, y: yTexture, uv: uvTexture, rotationAngle: rotationAngle, sourceId: self.sourceId)
self.currentOutput = Output(resolution: resolution, y: yTexture, uv: uvTexture, rotationAngle: rotationAngle, mirrorDirection: [], sourceId: self.sourceId)
return true
}
}

View File

@ -60,7 +60,7 @@ public class OverlayMaskContainerView: UIView, OverlayMaskContainerViewProtocol
super.willRemoveSubview(subview)
if let view = subview as? OverlayMaskContainerViewProtocol {
if view.maskContents.superview === self {
if view.maskContents.superview === self.maskContents {
view.maskContents.removeFromSuperview()
}
}

View File

@ -103,6 +103,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
public var isMicrophoneMuted: Bool
public var localVideo: VideoSource?
public var remoteVideo: VideoSource?
public var isRemoteBatteryLow: Bool
public init(
lifecycleState: LifecycleState,
@ -112,7 +113,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
audioOutput: AudioOutput,
isMicrophoneMuted: Bool,
localVideo: VideoSource?,
remoteVideo: VideoSource?
remoteVideo: VideoSource?,
isRemoteBatteryLow: Bool
) {
self.lifecycleState = lifecycleState
self.name = name
@ -122,6 +124,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.isMicrophoneMuted = isMicrophoneMuted
self.localVideo = localVideo
self.remoteVideo = remoteVideo
self.isRemoteBatteryLow = isRemoteBatteryLow
}
public static func ==(lhs: State, rhs: State) -> Bool {
@ -149,6 +152,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
if lhs.remoteVideo !== rhs.remoteVideo {
return false
}
if lhs.isRemoteBatteryLow != rhs.isRemoteBatteryLow {
return false
}
return true
}
}
@ -178,11 +184,13 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
private let avatarTransformLayer: SimpleLayer
private let avatarLayer: AvatarLayer
private let titleView: TextView
private let backButtonView: BackButtonView
private var statusView: StatusView
private var weakSignalView: WeakSignalView?
private var emojiView: KeyEmojiView?
private var emojiTooltipView: EmojiTooltipView?
private var emojiExpandedInfoView: EmojiExpandedInfoView?
private let videoContainerBackgroundView: RoundedCornersView
@ -197,6 +205,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
private var waitingForFirstLocalVideoFrameDisposable: Disposable?
private var canAnimateAudioLevel: Bool = false
private var displayEmojiTooltip: Bool = false
private var isEmojiKeyExpanded: Bool = false
private var areControlsHidden: Bool = false
private var swapLocalAndRemoteVideo: Bool = false
@ -214,6 +223,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
public var videoAction: (() -> Void)?
public var microhoneMuteAction: (() -> Void)?
public var endCallAction: (() -> Void)?
public var backAction: (() -> Void)?
public override init(frame: CGRect) {
self.overlayContentsView = UIView()
@ -237,6 +247,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.titleView = TextView()
self.statusView = StatusView()
self.backButtonView = BackButtonView(text: "Back")
super.init(frame: frame)
self.clipsToBounds = true
@ -270,6 +282,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self?.update(transition: .immediate)
}
self.addSubview(self.backButtonView)
(self.layer as? SimpleLayer)?.didEnterHierarchy = { [weak self] in
guard let self else {
return
@ -289,6 +303,13 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
self.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
self.backButtonView.pressAction = { [weak self] in
guard let self else {
return
}
self.backAction?()
}
}
public required init?(coder: NSCoder) {
@ -346,8 +367,19 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
@objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
if case .ended = recognizer.state {
var update = false
if self.displayEmojiTooltip {
self.displayEmojiTooltip = false
update = true
}
if self.activeRemoteVideoSource != nil || self.activeLocalVideoSource != nil {
self.areControlsHidden = !self.areControlsHidden
update = true
}
if update {
self.update(transition: .spring(duration: 0.4))
}
}
@ -434,6 +466,15 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.areControlsHidden = false
}
if let previousParams = self.params, case .active = params.state.lifecycleState {
switch previousParams.state.lifecycleState {
case .connecting, .exchangingKeys, .ringing:
self.displayEmojiTooltip = true
default:
break
}
}
self.params = params
self.updateInternal(params: params, transition: transition)
}
@ -541,7 +582,19 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
self.speakerAction?()
}), at: 0)
}
let contentBottomInset = self.buttonGroupView.update(size: params.size, insets: params.insets, controlsHidden: currentAreControlsHidden, buttons: buttons, transition: transition)
var notices: [ButtonGroupView.Notice] = []
if params.state.isMicrophoneMuted {
notices.append(ButtonGroupView.Notice(id: AnyHashable(0 as Int), text: "Your microphone is turned off"))
}
if params.state.remoteVideo != nil && params.state.localVideo == nil {
notices.append(ButtonGroupView.Notice(id: AnyHashable(1 as Int), text: "Your camera is turned off"))
}
if params.state.isRemoteBatteryLow {
notices.append(ButtonGroupView.Notice(id: AnyHashable(2 as Int), text: "\(params.state.shortName)'s battery is low"))
}
let contentBottomInset = self.buttonGroupView.update(size: params.size, insets: params.insets, controlsHidden: currentAreControlsHidden, buttons: buttons, notices: notices, transition: transition)
var expandedEmojiKeyRect: CGRect?
if self.isEmojiKeyExpanded {
@ -606,6 +659,16 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
}
let backButtonY: CGFloat
if currentAreControlsHidden {
backButtonY = -self.backButtonView.size.height - 12.0
} else {
backButtonY = params.insets.top + 12.0
}
let backButtonFrame = CGRect(origin: CGPoint(x: params.insets.left + 10.0, y: backButtonY), size: self.backButtonView.size)
transition.setFrame(view: self.backButtonView, frame: backButtonFrame)
transition.setAlpha(view: self.backButtonView, alpha: currentAreControlsHidden ? 0.0 : 1.0)
if case let .active(activeState) = params.state.lifecycleState {
let emojiView: KeyEmojiView
var emojiTransition = transition
@ -623,6 +686,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
if !self.isEmojiKeyExpanded {
self.isEmojiKeyExpanded = true
self.displayEmojiTooltip = false
self.update(transition: .spring(duration: 0.4))
}
}
@ -650,6 +714,13 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
emojiTransition.setPosition(view: emojiView, position: emojiViewFrame.center)
}
emojiTransition.setBounds(view: emojiView, bounds: CGRect(origin: CGPoint(), size: emojiViewFrame.size))
if let emojiTooltipView = self.emojiTooltipView {
self.emojiTooltipView = nil
emojiTooltipView.animateOut(completion: { [weak emojiTooltipView] in
emojiTooltipView?.removeFromSuperview()
})
}
} else {
let emojiY: CGFloat
if currentAreControlsHidden {
@ -669,6 +740,34 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
}
emojiTransition.setBounds(view: emojiView, bounds: CGRect(origin: CGPoint(), size: emojiViewFrame.size))
emojiAlphaTransition.setAlpha(view: emojiView, alpha: currentAreControlsHidden ? 0.0 : 1.0)
if self.displayEmojiTooltip {
let emojiTooltipView: EmojiTooltipView
var emojiTooltipTransition = transition
var animateIn = false
if let current = self.emojiTooltipView {
emojiTooltipView = current
} else {
emojiTooltipTransition = emojiTooltipTransition.withAnimation(.none)
emojiTooltipView = EmojiTooltipView(text: "Encryption key of this call")
animateIn = true
self.emojiTooltipView = emojiTooltipView
self.addSubview(emojiTooltipView)
}
let emojiTooltipSize = emojiTooltipView.update(constrainedWidth: params.size.width - 32.0 * 2.0, subjectWidth: emojiViewSize.width - 20.0)
let emojiTooltipFrame = CGRect(origin: CGPoint(x: emojiViewFrame.maxX - emojiTooltipSize.width, y: emojiViewFrame.maxY + 8.0), size: emojiTooltipSize)
emojiTooltipTransition.setFrame(view: emojiTooltipView, frame: emojiTooltipFrame)
if animateIn && !transition.animation.isImmediate {
emojiTooltipView.animateIn()
}
} else if let emojiTooltipView = self.emojiTooltipView {
self.emojiTooltipView = nil
emojiTooltipView.animateOut(completion: { [weak emojiTooltipView] in
emojiTooltipView?.removeFromSuperview()
})
}
}
emojiAlphaTransition.setAlpha(view: emojiView, alpha: 1.0)
@ -679,6 +778,12 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
emojiView?.removeFromSuperview()
})
}
if let emojiTooltipView = self.emojiTooltipView {
self.emojiTooltipView = nil
emojiTooltipView.animateOut(completion: { [weak emojiTooltipView] in
emojiTooltipView?.removeFromSuperview()
})
}
}
let collapsedAvatarSize: CGFloat = 136.0
@ -1024,7 +1129,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
genericAlphaTransition.setAlpha(view: self.statusView, alpha: currentAreControlsHidden ? 0.0 : 1.0)
}
if case let .active(activeState) = params.state.lifecycleState, activeState.signalInfo.quality <= 0.2 {
if case let .active(activeState) = params.state.lifecycleState, activeState.signalInfo.quality <= 0.2, !self.isEmojiKeyExpanded, (!self.displayEmojiTooltip || !havePrimaryVideo) {
let weakSignalView: WeakSignalView
if let current = self.weakSignalView {
weakSignalView = current

View File

@ -375,6 +375,7 @@ public final class OngoingGroupCallContext {
public let width: Int
public let height: Int
public let orientation: OngoingCallVideoOrientation
public let deviceRelativeOrientation: OngoingCallVideoOrientation?
public let mirrorHorizontally: Bool
public let mirrorVertically: Bool
@ -392,6 +393,11 @@ public final class OngoingGroupCallContext {
self.width = Int(frameData.width)
self.height = Int(frameData.height)
self.orientation = OngoingCallVideoOrientation(frameData.orientation)
if frameData.hasDeviceRelativeOrientation {
self.deviceRelativeOrientation = OngoingCallVideoOrientation(frameData.deviceRelativeOrientation)
} else {
self.deviceRelativeOrientation = nil
}
self.mirrorHorizontally = frameData.mirrorHorizontally
self.mirrorVertically = frameData.mirrorVertically
}

View File

@ -185,6 +185,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@property (nonatomic, readonly) int width;
@property (nonatomic, readonly) int height;
@property (nonatomic, readonly) OngoingCallVideoOrientationWebrtc orientation;
@property (nonatomic, readonly) bool hasDeviceRelativeOrientation;
@property (nonatomic, readonly) OngoingCallVideoOrientationWebrtc deviceRelativeOrientation;
@property (nonatomic, readonly) bool mirrorHorizontally;
@property (nonatomic, readonly) bool mirrorVertically;

View File

@ -106,7 +106,7 @@ private:
}
if (storedSink && mappedBuffer) {
storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically]);
storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically hasDeviceRelativeVideoRotation:false deviceRelativeVideoRotation:OngoingCallVideoOrientation0]);
}
}));
}

View File

@ -53,6 +53,6 @@
@interface CallVideoFrameData (Initialization)
- (instancetype _Nonnull)initWithBuffer:(id<CallVideoFrameBuffer> _Nonnull)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically;
- (instancetype _Nonnull)initWithBuffer:(id<CallVideoFrameBuffer> _Nonnull)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically hasDeviceRelativeVideoRotation:(bool)hasDeviceRelativeVideoRotation deviceRelativeVideoRotation:(OngoingCallVideoOrientationWebrtc)deviceRelativeVideoRotation;
@end

View File

@ -476,7 +476,7 @@ private:
@implementation CallVideoFrameData
- (instancetype)initWithBuffer:(id<CallVideoFrameBuffer>)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically {
- (instancetype)initWithBuffer:(id<CallVideoFrameBuffer>)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically hasDeviceRelativeVideoRotation:(bool)hasDeviceRelativeVideoRotation deviceRelativeVideoRotation:(OngoingCallVideoOrientationWebrtc)deviceRelativeVideoRotation {
self = [super init];
if (self != nil) {
_buffer = buffer;
@ -506,6 +506,9 @@ private:
break;
}
}
_hasDeviceRelativeOrientation = hasDeviceRelativeVideoRotation;
_deviceRelativeOrientation = deviceRelativeVideoRotation;
_mirrorHorizontally = mirrorHorizontally;
_mirrorVertically = mirrorVertically;
@ -586,6 +589,9 @@ private:
bool mirrorHorizontally = false;
bool mirrorVertically = false;
bool hasDeviceRelativeVideoRotation = false;
OngoingCallVideoOrientationWebrtc deviceRelativeVideoRotation = OngoingCallVideoOrientation0;
if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) {
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> nativeBuffer = static_cast<webrtc::ObjCFrameBuffer *>(videoFrame.video_frame_buffer().get())->wrapped_frame_buffer();
@ -594,7 +600,8 @@ private:
mappedBuffer = [[CallVideoFrameNativePixelBuffer alloc] initWithPixelBuffer:pixelBuffer.pixelBuffer];
}
if ([nativeBuffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
if (((TGRTCCVPixelBuffer *)nativeBuffer).shouldBeMirrored) {
TGRTCCVPixelBuffer *tgNativeBuffer = (TGRTCCVPixelBuffer *)nativeBuffer;
if (tgNativeBuffer.shouldBeMirrored) {
switch (videoFrame.rotation()) {
case webrtc::kVideoRotation_0:
case webrtc::kVideoRotation_180:
@ -608,6 +615,26 @@ private:
break;
}
}
if (tgNativeBuffer.deviceRelativeVideoRotation != -1) {
hasDeviceRelativeVideoRotation = true;
switch (tgNativeBuffer.deviceRelativeVideoRotation) {
case webrtc::kVideoRotation_0:
deviceRelativeVideoRotation = OngoingCallVideoOrientation0;
break;
case webrtc::kVideoRotation_90:
deviceRelativeVideoRotation = OngoingCallVideoOrientation90;
break;
case webrtc::kVideoRotation_180:
deviceRelativeVideoRotation = OngoingCallVideoOrientation180;
break;
case webrtc::kVideoRotation_270:
deviceRelativeVideoRotation = OngoingCallVideoOrientation270;
break;
default:
deviceRelativeVideoRotation = OngoingCallVideoOrientation0;
break;
}
}
}
} else if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
rtc::scoped_refptr<webrtc::NV12BufferInterface> nv12Buffer(static_cast<webrtc::NV12BufferInterface *>(videoFrame.video_frame_buffer().get()));
@ -618,7 +645,7 @@ private:
}
if (storedSink && mappedBuffer) {
storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically]);
storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically hasDeviceRelativeVideoRotation:hasDeviceRelativeVideoRotation deviceRelativeVideoRotation:deviceRelativeVideoRotation]);
}
}));
}