[WIP] Call UI V2

This commit is contained in:
Ali 2023-11-14 11:26:57 +04:00
parent 2724d5db78
commit 54327a0a74
18 changed files with 589 additions and 116 deletions

View File

@ -373,6 +373,7 @@ swift_library(
"//submodules/DrawingUI:DrawingUIResources",
"//submodules/TelegramUI:TelegramUIResources",
"//submodules/TelegramUI:TelegramUIAssets",
"//submodules/TelegramUI/Components/Calls/CallScreen:Assets",
":GeneratedPresentationStrings/Resources/PresentationStrings.data",
],
deps = [

View File

@ -72,6 +72,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case skipReadHistory(PresentationTheme, Bool)
case unidirectionalSwipeToReply(Bool)
case dustEffect(Bool)
case callUIV2(Bool)
case crashOnSlowQueries(PresentationTheme, Bool)
case crashOnMemoryPressure(PresentationTheme, Bool)
case clearTips(PresentationTheme)
@ -120,7 +121,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return DebugControllerSection.logs.rawValue
case .logToFile, .logToConsole, .redactSensitiveData:
return DebugControllerSection.logging.rawValue
case .keepChatNavigationStack, .skipReadHistory, .unidirectionalSwipeToReply, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
case .keepChatNavigationStack, .skipReadHistory, .unidirectionalSwipeToReply, .dustEffect, .callUIV2, .crashOnSlowQueries, .crashOnMemoryPressure:
return DebugControllerSection.experiments.rawValue
case .clearTips, .resetNotifications, .crash, .resetData, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .resetWebViewCache, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .voiceConference, .experimentalCompatibility, .enableDebugDataDisplay, .acceleratedStickers, .inlineForums, .localTranscription, .enableReactionOverrides, .restorePurchases:
return DebugControllerSection.experiments.rawValue
@ -171,70 +172,72 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return 16
case .dustEffect:
return 17
case .crashOnSlowQueries:
case .callUIV2:
return 18
case .crashOnMemoryPressure:
case .crashOnSlowQueries:
return 19
case .clearTips:
case .crashOnMemoryPressure:
return 20
case .resetNotifications:
case .clearTips:
return 21
case .crash:
case .resetNotifications:
return 22
case .resetData:
case .crash:
return 23
case .resetDatabase:
case .resetData:
return 24
case .resetDatabaseAndCache:
case .resetDatabase:
return 25
case .resetHoles:
case .resetDatabaseAndCache:
return 26
case .reindexUnread:
case .resetHoles:
return 27
case .resetCacheIndex:
case .reindexUnread:
return 28
case .reindexCache:
case .resetCacheIndex:
return 29
case .resetBiometricsData:
case .reindexCache:
return 30
case .resetWebViewCache:
case .resetBiometricsData:
return 31
case .optimizeDatabase:
case .resetWebViewCache:
return 32
case .photoPreview:
case .optimizeDatabase:
return 33
case .knockoutWallpaper:
case .photoPreview:
return 34
case .experimentalCompatibility:
case .knockoutWallpaper:
return 35
case .enableDebugDataDisplay:
case .experimentalCompatibility:
return 36
case .acceleratedStickers:
case .enableDebugDataDisplay:
return 37
case .inlineForums:
case .acceleratedStickers:
return 38
case .localTranscription:
case .inlineForums:
return 39
case .enableReactionOverrides:
case .localTranscription:
return 40
case .restorePurchases:
case .enableReactionOverrides:
return 41
case .logTranslationRecognition:
case .restorePurchases:
return 42
case .resetTranslationStates:
case .logTranslationRecognition:
return 43
case .storiesExperiment:
case .resetTranslationStates:
return 44
case .storiesJpegExperiment:
case .storiesExperiment:
return 45
case .playlistPlayback:
case .storiesJpegExperiment:
return 46
case .enableQuickReactionSwitch:
case .playlistPlayback:
return 47
case .voiceConference:
case .enableQuickReactionSwitch:
return 48
case .voiceConference:
return 49
case let .preferredVideoCodec(index, _, _, _):
return 49 + index
return 50 + index
case .disableVideoAspectScaling:
return 100
case .enableNetworkFramework:
@ -950,6 +953,14 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return settings
}).start()
})
case let .callUIV2(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Call UI V2", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = updateExperimentalUISettingsInteractively(accountManager: arguments.sharedContext.accountManager, { settings in
var settings = settings
settings.callUIV2 = value
return settings
}).start()
})
case let .crashOnSlowQueries(_, value):
return ItemListSwitchItem(presentationData: presentationData, title: "Crash when slow", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = updateExperimentalUISettingsInteractively(accountManager: arguments.sharedContext.accountManager, { settings in
@ -1399,6 +1410,7 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
#endif
entries.append(.unidirectionalSwipeToReply(experimentalSettings.unidirectionalSwipeToReply))
entries.append(.dustEffect(experimentalSettings.dustEffect))
entries.append(.callUIV2(experimentalSettings.callUIV2))
}
entries.append(.crashOnSlowQueries(presentationData.theme, experimentalSettings.crashOnLongQueries))
entries.append(.crashOnMemoryPressure(presentationData.theme, experimentalSettings.crashOnMemoryPressure))

View File

@ -106,6 +106,10 @@ swift_library(
"//submodules/PeerInfoUI/CreateExternalMediaStreamScreen:CreateExternalMediaStreamScreen",
"//submodules/PhoneNumberFormat:PhoneNumberFormat",
"//submodules/TelegramUI/Components/Calls/CallScreen",
"//submodules/Components/ComponentDisplayAdapters",
"//submodules/TinyThumbnail",
"//submodules/ImageBlur",
"//submodules/MetalEngine",
],
visibility = [
"//visibility:public",

View File

@ -4,6 +4,6 @@
#import <Foundation/Foundation.h>
NSString *randomCallsEmoji();
NSString *stringForEmojiHashOfData(NSData *data, NSInteger count);
NSArray<NSString *> *stringForEmojiHashOfData(NSData *data, NSInteger count);
#endif /* CallsEmoji_h */

View File

@ -16,20 +16,21 @@ NSString *randomCallsEmoji() {
return emojis[arc4random() % emojis.count];
}
NSString *stringForEmojiHashOfData(NSData *data, NSInteger count) {
if (data.length != 32)
return @"";
NSArray<NSString *> *stringForEmojiHashOfData(NSData *data, NSInteger count) {
if (data.length != 32) {
return @[];
}
uint8_t bytes[32];
[data getBytes:bytes length:32];
NSArray *emojis = emojisArray();
NSString *result = @"";
NSMutableArray *result = [[NSMutableArray alloc] init];
for (int32_t i = 0; i < count; i++)
{
int32_t position = positionExtractor(bytes, i, (int32_t)emojis.count);
NSString *emoji = emojis[position];
result = [result stringByAppendingString:emoji];
[result addObject:emoji];
}
return result;

View File

@ -135,11 +135,11 @@ public final class CallController: ViewController {
}
override public func loadDisplayNode() {
#if DEBUG && false
self.displayNode = CallControllerNodeV2(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call)
#else
self.displayNode = CallControllerNode(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call)
#endif
if self.sharedContext.immediateExperimentalUISettings.callUIV2 {
self.displayNode = CallControllerNodeV2(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call)
} else {
self.displayNode = CallControllerNode(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call)
}
self.displayNodeDidLoad()
self.controllerNode.toggleMute = { [weak self] in

View File

@ -1105,7 +1105,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
isReconnecting = true
}
if self.keyTextData?.0 != keyVisualHash {
let text = stringForEmojiHashOfData(keyVisualHash, 4)!
let text = stringForEmojiHashOfData(keyVisualHash, 4)!.joined(separator: "")
self.keyTextData = (keyVisualHash, text)
self.keyButtonNode.key = text

View File

@ -8,6 +8,14 @@ import AccountContext
import TelegramPresentationData
import SwiftSignalKit
import CallScreen
import ComponentDisplayAdapters
import ComponentFlow
import CallsEmoji
import AvatarNode
import TinyThumbnail
import ImageBlur
import TelegramVoip
import MetalEngine
final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeProtocol {
private let sharedContext: SharedAccountContext
@ -16,7 +24,13 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
private let statusBar: StatusBar
private let call: PresentationCall
private let containerView: UIView
private let callScreen: PrivateCallScreen
private var callScreenState: PrivateCallScreen.State?
private var shouldStayHiddenUntilConnection: Bool = false
private var callStartTimestamp: Double?
var isMuted: Bool = false
@ -32,8 +46,18 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
var dismissedInteractively: (() -> Void)?
var dismissAllTooltips: (() -> Void)?
private var emojiKey: (data: Data, resolvedKey: [String])?
private var validLayout: (layout: ContainerViewLayout, navigationBarHeight: CGFloat)?
private var currentPeer: EnginePeer?
private var peerAvatarDisposable: Disposable?
private var availableAudioOutputs: [AudioSessionOutput]?
private var isMicrophoneMutedDisposable: Disposable?
private var audioLevelDisposable: Disposable?
private var remoteVideo: AdaptedCallVideoSource?
init(
sharedContext: SharedAccountContext,
account: Account,
@ -50,38 +74,278 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
self.statusBar = statusBar
self.call = call
self.containerView = UIView()
self.callScreen = PrivateCallScreen()
self.shouldStayHiddenUntilConnection = shouldStayHiddenUntilConnection
super.init()
self.view.addSubview(self.callScreen)
self.view.addSubview(self.containerView)
self.containerView.addSubview(self.callScreen)
self.callScreen.speakerAction = { [weak self] in
guard let self else {
return
}
self.beginAudioOuputSelection?(false)
}
self.callScreen.videoAction = { [weak self] in
guard let self else {
return
}
let _ = self
}
self.callScreen.microhoneMuteAction = { [weak self] in
guard let self else {
return
}
self.call.toggleIsMuted()
}
self.callScreen.endCallAction = { [weak self] in
guard let self else {
return
}
self.endCall?()
}
self.callScreenState = PrivateCallScreen.State(
lifecycleState: .connecting,
name: " ",
avatarImage: nil,
audioOutput: .internalSpeaker,
isMicrophoneMuted: false,
remoteVideo: nil
)
if let peer = call.peer {
self.updatePeer(peer: peer)
}
self.isMicrophoneMutedDisposable = (call.isMuted
|> deliverOnMainQueue).startStrict(next: { [weak self] isMuted in
guard let self, var callScreenState = self.callScreenState else {
return
}
self.isMuted = isMuted
if callScreenState.isMicrophoneMuted != isMuted {
callScreenState.isMicrophoneMuted = isMuted
self.callScreenState = callScreenState
self.update(transition: .animated(duration: 0.3, curve: .spring))
}
})
self.audioLevelDisposable = (call.audioLevel
|> deliverOnMainQueue).start(next: { [weak self] audioLevel in
guard let self else {
return
}
self.callScreen.addIncomingAudioLevel(value: audioLevel)
})
}
deinit {
self.peerAvatarDisposable?.dispose()
self.isMicrophoneMutedDisposable?.dispose()
self.audioLevelDisposable?.dispose()
}
func updateAudioOutputs(availableOutputs: [AudioSessionOutput], currentOutput: AudioSessionOutput?) {
self.availableAudioOutputs = availableOutputs
if var callScreenState = self.callScreenState {
let mappedOutput: PrivateCallScreen.State.AudioOutput
if let currentOutput {
switch currentOutput {
case .builtin:
mappedOutput = .internalSpeaker
case .speaker:
mappedOutput = .speaker
case .headphones, .port:
mappedOutput = .speaker
}
} else {
mappedOutput = .internalSpeaker
}
if callScreenState.audioOutput != mappedOutput {
callScreenState.audioOutput = mappedOutput
self.callScreenState = callScreenState
self.update(transition: .animated(duration: 0.3, curve: .spring))
}
}
}
private func resolvedEmojiKey(data: Data) -> [String] {
if let emojiKey = self.emojiKey, emojiKey.data == data {
return emojiKey.resolvedKey
}
let resolvedKey = stringForEmojiHashOfData(data, 4) ?? []
self.emojiKey = (data, resolvedKey)
return resolvedKey
}
func updateCallState(_ callState: PresentationCallState) {
if case let .terminated(id, _, reportRating) = callState.state, let callId = id {
if reportRating {
self.presentCallRating?(callId, self.call.isVideo)
let mappedLifecycleState: PrivateCallScreen.State.LifecycleState
switch callState.state {
case .waiting:
mappedLifecycleState = .connecting
case .ringing:
mappedLifecycleState = .ringing
case let .requesting(isRinging):
if isRinging {
mappedLifecycleState = .ringing
} else {
mappedLifecycleState = .connecting
}
case let .connecting(keyData):
let _ = keyData
mappedLifecycleState = .exchangingKeys
case let .active(startTime, signalQuality, keyData):
self.callStartTimestamp = startTime
let _ = keyData
mappedLifecycleState = .active(PrivateCallScreen.State.ActiveState(
startTime: startTime + kCFAbsoluteTimeIntervalSince1970,
signalInfo: PrivateCallScreen.State.SignalInfo(quality: Double(signalQuality ?? 0) / 4.0),
emojiKey: self.resolvedEmojiKey(data: keyData)
))
case let .reconnecting(startTime, _, keyData):
let _ = keyData
mappedLifecycleState = .active(PrivateCallScreen.State.ActiveState(
startTime: startTime + kCFAbsoluteTimeIntervalSince1970,
signalInfo: PrivateCallScreen.State.SignalInfo(quality: 0.0),
emojiKey: self.resolvedEmojiKey(data: keyData)
))
case .terminating, .terminated:
let duration: Double
if let callStartTimestamp = self.callStartTimestamp {
duration = CFAbsoluteTimeGetCurrent() - callStartTimestamp
} else {
duration = 0.0
}
mappedLifecycleState = .terminated(PrivateCallScreen.State.TerminatedState(duration: duration))
}
switch callState.remoteVideoState {
case .active, .paused:
if self.remoteVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: true) {
self.remoteVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
}
case .inactive:
self.remoteVideo = nil
}
if var callScreenState = self.callScreenState {
callScreenState.lifecycleState = mappedLifecycleState
callScreenState.remoteVideo = self.remoteVideo
if self.callScreenState != callScreenState {
self.callScreenState = callScreenState
self.update(transition: .animated(duration: 0.35, curve: .spring))
}
}
if case let .terminated(_, _, reportRating) = callState.state {
self.callEnded?(reportRating)
}
}
func updatePeer(accountPeer: Peer, peer: Peer, hasOther: Bool) {
self.updatePeer(peer: EnginePeer(peer))
}
func animateIn() {
private func updatePeer(peer: EnginePeer) {
guard var callScreenState = self.callScreenState else {
return
}
callScreenState.name = peer.displayTitle(strings: self.presentationData.strings, displayOrder: self.presentationData.nameDisplayOrder)
if self.currentPeer?.smallProfileImage != peer.smallProfileImage {
self.peerAvatarDisposable?.dispose()
if let smallProfileImage = peer.largeProfileImage, let peerReference = PeerReference(peer._asPeer()) {
if let thumbnailImage = smallProfileImage.immediateThumbnailData.flatMap(decodeTinyThumbnail).flatMap(UIImage.init(data:)), let cgImage = thumbnailImage.cgImage {
callScreenState.avatarImage = generateImage(CGSize(width: 128.0, height: 128.0), contextGenerator: { size, context in
context.draw(cgImage, in: CGRect(origin: CGPoint(), size: size))
}, scale: 1.0).flatMap { image in
return blurredImage(image, radius: 10.0)
}
}
let postbox = self.call.context.account.postbox
self.peerAvatarDisposable = (Signal<UIImage?, NoError> { subscriber in
let fetchDisposable = fetchedMediaResource(mediaBox: postbox.mediaBox, userLocation: .other, userContentType: .avatar, reference: .avatar(peer: peerReference, resource: smallProfileImage.resource)).start()
let dataDisposable = postbox.mediaBox.resourceData(smallProfileImage.resource).start(next: { data in
if data.complete, let image = UIImage(contentsOfFile: data.path)?.precomposed() {
subscriber.putNext(image)
subscriber.putCompletion()
}
})
return ActionDisposable {
fetchDisposable.dispose()
dataDisposable.dispose()
}
}
|> deliverOnMainQueue).start(next: { [weak self] image in
guard let self else {
return
}
if var callScreenState = self.callScreenState {
callScreenState.avatarImage = image
self.callScreenState = callScreenState
self.update(transition: .immediate)
}
})
} else {
self.peerAvatarDisposable?.dispose()
self.peerAvatarDisposable = nil
callScreenState.avatarImage = generateImage(CGSize(width: 512, height: 512), scale: 1.0, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
drawPeerAvatarLetters(context: context, size: size, font: Font.semibold(20.0), letters: peer.displayLetters, peerId: peer.id, nameColor: peer.nameColor)
})
}
}
self.currentPeer = peer
if callScreenState != self.callScreenState {
self.callScreenState = callScreenState
self.update(transition: .immediate)
}
}
func animateIn() {
if !self.containerView.alpha.isZero {
var bounds = self.bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.removeAnimation(forKey: "bounds")
self.statusBar.layer.removeAnimation(forKey: "opacity")
self.containerView.layer.removeAnimation(forKey: "opacity")
self.containerView.layer.removeAnimation(forKey: "scale")
self.statusBar.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
if !self.shouldStayHiddenUntilConnection {
self.containerView.layer.animateScale(from: 1.04, to: 1.0, duration: 0.3)
self.containerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
}
func animateOut(completion: @escaping () -> Void) {
self.statusBar.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false)
if !self.shouldStayHiddenUntilConnection || self.containerView.alpha > 0.0 {
self.containerView.layer.allowsGroupOpacity = true
self.containerView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak self] _ in
self?.containerView.layer.allowsGroupOpacity = false
})
self.containerView.layer.animateScale(from: 1.0, to: 1.04, duration: 0.3, removeOnCompletion: false, completion: { _ in
completion()
})
} else {
completion()
}
}
func expandFromPipIfPossible() {
@ -96,7 +360,87 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
}
func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) {
self.validLayout = (layout, navigationBarHeight)
transition.updateFrame(view: self.containerView, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(view: self.callScreen, frame: CGRect(origin: CGPoint(), size: layout.size))
self.callScreen.update(size: layout.size, insets: layout.insets(options: [.statusBar]))
if let callScreenState = self.callScreenState {
self.callScreen.update(
size: layout.size,
insets: layout.insets(options: [.statusBar]),
screenCornerRadius: layout.deviceMetrics.screenCornerRadius,
state: callScreenState,
transition: Transition(transition)
)
}
}
}
private final class AdaptedCallVideoSource: VideoSource {
private static let queue = Queue(name: "AdaptedCallVideoSource")
var updated: (() -> Void)?
private(set) var currentOutput: Output?
private var textureCache: CVMetalTextureCache?
private var videoFrameDisposable: Disposable?
init(videoStreamSignal: Signal<OngoingGroupCallContext.VideoFrameData, NoError>) {
CVMetalTextureCacheCreate(nil, nil, MetalEngine.shared.device, nil, &self.textureCache)
self.videoFrameDisposable = (videoStreamSignal
|> deliverOnMainQueue).start(next: { [weak self] videoFrameData in
guard let self, let textureCache = self.textureCache else {
return
}
let rotationAngle: Float
switch videoFrameData.orientation {
case .rotation0:
rotationAngle = 0.0
case .rotation90:
rotationAngle = Float.pi * 0.5
case .rotation180:
rotationAngle = Float.pi
case .rotation270:
rotationAngle = Float.pi * 3.0 / 2.0
}
AdaptedCallVideoSource.queue.async { [weak self] in
let output: Output
switch videoFrameData.buffer {
case let .native(nativeBuffer):
let width = CVPixelBufferGetWidth(nativeBuffer.pixelBuffer)
let height = CVPixelBufferGetHeight(nativeBuffer.pixelBuffer)
var cvMetalTextureY: CVMetalTexture?
var status = CVMetalTextureCacheCreateTextureFromImage(nil, textureCache, nativeBuffer.pixelBuffer, nil, .r8Unorm, width, height, 0, &cvMetalTextureY)
guard status == kCVReturnSuccess, let yTexture = CVMetalTextureGetTexture(cvMetalTextureY!) else {
return
}
var cvMetalTextureUV: CVMetalTexture?
status = CVMetalTextureCacheCreateTextureFromImage(nil, textureCache, nativeBuffer.pixelBuffer, nil, .rg8Unorm, width / 2, height / 2, 1, &cvMetalTextureUV)
guard status == kCVReturnSuccess, let uvTexture = CVMetalTextureGetTexture(cvMetalTextureUV!) else {
return
}
output = Output(y: yTexture, uv: uvTexture, rotationAngle: rotationAngle)
default:
return
}
DispatchQueue.main.async {
guard let self else {
return
}
self.currentOutput = output
self.updated?()
}
}
})
}
deinit {
self.videoFrameDisposable?.dispose()
}
}

View File

@ -253,7 +253,7 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
}
}, timestamp)
if self.keyTextData?.0 != keyVisualHash {
let text = stringForEmojiHashOfData(keyVisualHash, 4)!
let text = stringForEmojiHashOfData(keyVisualHash, 4)!.joined(separator: "")
self.keyTextData = (keyVisualHash, text)
self.keyButtonNode.setAttributedTitle(NSAttributedString(string: text, attributes: [NSAttributedString.Key.font: Font.regular(22.0), NSAttributedString.Key.kern: 2.5 as NSNumber]), for: [])

View File

@ -66,6 +66,7 @@ swift_library(
"//submodules/ComponentFlow",
"//submodules/SSignalKit/SwiftSignalKit",
"//submodules/TelegramUI/Components/AnimatedTextComponent",
"//submodules/AppBundle",
],
visibility = [
"//visibility:public",

View File

@ -22,25 +22,9 @@ final class AvatarLayer: SimpleLayer {
var image: UIImage? {
didSet {
if self.image !== image {
if self.image !== oldValue {
self.updateImage()
}
/*if let image = self.image {
let imageSize = CGSize(width: 136.0, height: 136.0)
let renderer = UIGraphicsImageRenderer(bounds: CGRect(origin: CGPoint(), size: imageSize), format: .preferred())
let image = renderer.image { context in
context.cgContext.addEllipse(in: CGRect(origin: CGPoint(), size: imageSize))
context.cgContext.clip()
context.cgContext.translateBy(x: imageSize.width * 0.5, y: imageSize.height * 0.5)
context.cgContext.scaleBy(x: 1.0, y: -1.0)
context.cgContext.translateBy(x: -imageSize.width * 0.5, y: -imageSize.height * 0.5)
context.cgContext.draw(image.cgImage!, in: CGRect(origin: CGPoint(), size: imageSize))
}
self.contents = image.cgImage
} else {
self.contents = nil
}*/
}
}

View File

@ -2,6 +2,7 @@ import Foundation
import UIKit
import Display
import ComponentFlow
import AppBundle
final class ButtonGroupView: UIView, ContentOverlayView {
final class Button {
@ -113,19 +114,19 @@ final class ButtonGroupView: UIView, ContentOverlayView {
switch button.content {
case let .speaker(isActiveValue):
title = "speaker"
image = UIImage(named: "Call/Speaker")
image = UIImage(bundleImageName: "Call/Speaker")
isActive = isActiveValue
case let .video(isActiveValue):
title = "video"
image = UIImage(named: "Call/Video")
image = UIImage(bundleImageName: "Call/Video")
isActive = isActiveValue
case let .microphone(isActiveValue):
title = "mute"
image = UIImage(named: "Call/Mute")
image = UIImage(bundleImageName: "Call/Mute")
isActive = isActiveValue
case .end:
title = "end"
image = UIImage(named: "Call/End")
image = UIImage(bundleImageName: "Call/End")
isActive = false
isDestructive = true
}

View File

@ -11,14 +11,12 @@ final class ContentView: UIView {
var insets: UIEdgeInsets
var screenCornerRadius: CGFloat
var state: PrivateCallScreen.State
var remoteVideo: VideoSource?
init(size: CGSize, insets: UIEdgeInsets, screenCornerRadius: CGFloat, state: PrivateCallScreen.State, remoteVideo: VideoSource?) {
init(size: CGSize, insets: UIEdgeInsets, screenCornerRadius: CGFloat, state: PrivateCallScreen.State) {
self.size = size
self.insets = insets
self.screenCornerRadius = screenCornerRadius
self.state = state
self.remoteVideo = remoteVideo
}
static func ==(lhs: Params, rhs: Params) -> Bool {
@ -34,9 +32,6 @@ final class ContentView: UIView {
if lhs.state != rhs.state {
return false
}
if lhs.remoteVideo !== rhs.remoteVideo {
return false
}
return true
}
}
@ -58,6 +53,13 @@ final class ContentView: UIView {
private var activeRemoteVideoSource: VideoSource?
private var waitingForFirstVideoFrameDisposable: Disposable?
private var processedInitialAudioLevelBump: Bool = false
private var audioLevelBump: Float = 0.0
private var targetAudioLevel: Float = 0.0
private var audioLevel: Float = 0.0
private var audioLevelUpdateSubscription: SharedDisplayLinkDriver.Link?
override init(frame: CGRect) {
self.blobLayer = CallBlobsLayer()
self.avatarLayer = AvatarLayer()
@ -78,6 +80,13 @@ final class ContentView: UIView {
self.statusView.requestLayout = { [weak self] in
self?.update(transition: .immediate)
}
self.audioLevelUpdateSubscription = SharedDisplayLinkDriver.shared.add(needsHighestFramerate: false, { [weak self] in
guard let self else {
return
}
self.attenuateAudioLevelStep()
})
}
required init?(coder: NSCoder) {
@ -88,23 +97,43 @@ final class ContentView: UIView {
self.waitingForFirstVideoFrameDisposable?.dispose()
}
func addIncomingAudioLevel(value: Float) {
self.targetAudioLevel = value
}
private func attenuateAudioLevelStep() {
self.audioLevel = self.audioLevel * 0.8 + (self.targetAudioLevel + self.audioLevelBump) * 0.2
if self.audioLevel <= 0.01 {
self.audioLevel = 0.0
}
self.updateAudioLevel()
}
private func updateAudioLevel() {
if self.activeRemoteVideoSource == nil {
let additionalAvatarScale = CGFloat(max(0.0, min(self.audioLevel, 5.0)) * 0.05)
self.avatarLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale, 1.0 + additionalAvatarScale, 1.0)
let blobAmplificationFactor: CGFloat = 2.0
self.blobLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0)
}
}
func update(
size: CGSize,
insets: UIEdgeInsets,
screenCornerRadius: CGFloat,
state: PrivateCallScreen.State,
remoteVideo: VideoSource?,
transition: Transition
) {
let params = Params(size: size, insets: insets, screenCornerRadius: screenCornerRadius, state: state, remoteVideo: remoteVideo)
let params = Params(size: size, insets: insets, screenCornerRadius: screenCornerRadius, state: state)
if self.params == params {
return
}
if self.params?.remoteVideo !== params.remoteVideo {
if self.params?.state.remoteVideo !== params.state.remoteVideo {
self.waitingForFirstVideoFrameDisposable?.dispose()
if let remoteVideo = params.remoteVideo {
if let remoteVideo = params.state.remoteVideo {
if remoteVideo.currentOutput != nil {
self.activeRemoteVideoSource = remoteVideo
} else {
@ -123,7 +152,7 @@ final class ContentView: UIView {
}
var shouldUpdate = false
self.waitingForFirstVideoFrameDisposable = (firstVideoFrameSignal
|> timeout(1.0, queue: .mainQueue(), alternate: .complete())
|> timeout(4.0, queue: .mainQueue(), alternate: .complete())
|> deliverOnMainQueue).startStrict(completed: { [weak self] in
guard let self else {
return
@ -176,11 +205,6 @@ final class ContentView: UIView {
}
}
//self.phase += 3.0 / 60.0
//self.phase = self.phase.truncatingRemainder(dividingBy: 1.0)
//var avatarScale: CGFloat = 0.05 * sin(CGFloat(0.0) * CGFloat.pi)
//avatarScale *= 1.0 - self.videoDisplayFraction.value
let collapsedAvatarSize: CGFloat = 136.0
let blobSize: CGFloat = collapsedAvatarSize + 40.0
@ -252,10 +276,17 @@ final class ContentView: UIView {
let blobFrame = CGRect(origin: CGPoint(x: floor(avatarFrame.midX - blobSize * 0.5), y: floor(avatarFrame.midY - blobSize * 0.5)), size: CGSize(width: blobSize, height: blobSize))
transition.setPosition(layer: self.blobLayer, position: CGPoint(x: blobFrame.midX, y: blobFrame.midY))
transition.setBounds(layer: self.blobLayer, bounds: CGRect(origin: CGPoint(), size: blobFrame.size))
//self.blobLayer.transform = CATransform3DMakeScale(1.0 + avatarScale * 2.0, 1.0 + avatarScale * 2.0, 1.0)
let titleString: String
switch params.state.lifecycleState {
case .terminated:
titleString = "Call Ended"
default:
titleString = params.state.name
}
let titleSize = self.titleView.update(
string: params.state.name,
string: titleString,
fontSize: self.activeRemoteVideoSource == nil ? 28.0 : 17.0,
fontWeight: self.activeRemoteVideoSource == nil ? 0.0 : 0.25,
color: .white,
@ -281,6 +312,19 @@ final class ContentView: UIView {
statusState = .waiting(.generatingKeys)
case let .active(activeState):
statusState = .active(StatusView.ActiveState(startTimestamp: activeState.startTime, signalStrength: activeState.signalInfo.quality))
if !self.processedInitialAudioLevelBump {
self.processedInitialAudioLevelBump = true
self.audioLevelBump = 2.0
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.2, execute: { [weak self] in
guard let self else {
return
}
self.audioLevelBump = 0.0
})
}
case let .terminated(terminatedState):
statusState = .terminated(StatusView.TerminatedState(duration: terminatedState.duration))
}
if let previousState = self.statusView.state, previousState.key != statusState.key {

View File

@ -179,14 +179,24 @@ final class StatusView: UIView {
}
}
struct TerminatedState: Equatable {
var duration: Double
init(duration: Double) {
self.duration = duration
}
}
enum State: Equatable {
enum Key: Equatable {
case waiting(WaitingState)
case active
case terminated
}
case waiting(WaitingState)
case active(ActiveState)
case terminated(TerminatedState)
var key: Key {
switch self {
@ -194,6 +204,8 @@ final class StatusView: UIView {
return .waiting(waitingState)
case .active:
return .active
case .terminated:
return .terminated
}
}
}
@ -297,6 +309,8 @@ final class StatusView: UIView {
let duration = timestamp - activeState.startTimestamp
textString = stringForDuration(Int(duration))
signalStrength = activeState.signalStrength
case let .terminated(terminatedState):
textString = stringForDuration(Int(terminatedState.duration))
}
var contentSize = CGSize()

View File

@ -25,6 +25,7 @@ final class TextView: UIView {
self.isOpaque = false
self.backgroundColor = nil
self.contentMode = .center
}
required init?(coder: NSCoder) {

View File

@ -3,11 +3,11 @@ import Metal
import CoreVideo
public final class VideoSourceOutput {
let y: MTLTexture
let uv: MTLTexture
let rotationAngle: Float
public let y: MTLTexture
public let uv: MTLTexture
public let rotationAngle: Float
init(y: MTLTexture, uv: MTLTexture, rotationAngle: Float) {
public init(y: MTLTexture, uv: MTLTexture, rotationAngle: Float) {
self.y = y
self.uv = uv
self.rotationAngle = rotationAngle

View File

@ -26,25 +26,70 @@ public final class PrivateCallScreen: UIView {
}
}
public struct TerminatedState: Equatable {
public var duration: Double
public init(duration: Double) {
self.duration = duration
}
}
public enum LifecycleState: Equatable {
case connecting
case ringing
case exchangingKeys
case active(ActiveState)
case terminated(TerminatedState)
}
public enum AudioOutput: Equatable {
case internalSpeaker
case speaker
}
public var lifecycleState: LifecycleState
public var name: String
public var avatarImage: UIImage?
public var audioOutput: AudioOutput
public var isMicrophoneMuted: Bool
public var remoteVideo: VideoSource?
public init(
lifecycleState: LifecycleState,
name: String,
avatarImage: UIImage?
avatarImage: UIImage?,
audioOutput: AudioOutput,
isMicrophoneMuted: Bool,
remoteVideo: VideoSource?
) {
self.lifecycleState = lifecycleState
self.name = name
self.avatarImage = avatarImage
self.audioOutput = audioOutput
self.isMicrophoneMuted = isMicrophoneMuted
self.remoteVideo = remoteVideo
}
public static func ==(lhs: State, rhs: State) -> Bool {
if lhs.lifecycleState != rhs.lifecycleState {
return false
}
if lhs.name != rhs.name {
return false
}
if lhs.avatarImage != rhs.avatarImage {
return false
}
if lhs.audioOutput != rhs.audioOutput {
return false
}
if lhs.isMicrophoneMuted != rhs.isMicrophoneMuted {
return false
}
if lhs.remoteVideo !== rhs.remoteVideo {
return false
}
return true
}
}
@ -75,12 +120,13 @@ public final class PrivateCallScreen: UIView {
private var params: Params?
private var remoteVideo: VideoSource?
private var isSpeakerOn: Bool = false
private var isMicrophoneMuted: Bool = false
private var isVideoOn: Bool = false
public var speakerAction: (() -> Void)?
public var videoAction: (() -> Void)?
public var microhoneMuteAction: (() -> Void)?
public var endCallAction: (() -> Void)?
public override init(frame: CGRect) {
self.blurContentsLayer = SimpleLayer()
@ -173,6 +219,10 @@ public final class PrivateCallScreen: UIView {
return result
}
public func addIncomingAudioLevel(value: Float) {
self.contentView.addIncomingAudioLevel(value: value)
}
public func update(size: CGSize, insets: UIEdgeInsets, screenCornerRadius: CGFloat, state: State, transition: Transition) {
let params = Params(size: size, insets: insets, screenCornerRadius: screenCornerRadius, state: state)
if self.params == params {
@ -216,6 +266,8 @@ public final class PrivateCallScreen: UIView {
} else {
backgroundStateIndex = 1
}
case .terminated:
backgroundStateIndex = 0
}
self.backgroundLayer.update(stateIndex: backgroundStateIndex, transition: transition)
@ -231,12 +283,13 @@ public final class PrivateCallScreen: UIView {
self.buttonGroupView.frame = CGRect(origin: CGPoint(), size: params.size)
let buttons: [ButtonGroupView.Button] = [
ButtonGroupView.Button(content: .speaker(isActive: self.isSpeakerOn), action: { [weak self] in
guard let self, var params = self.params else {
ButtonGroupView.Button(content: .speaker(isActive: params.state.audioOutput != .internalSpeaker), action: { [weak self] in
guard let self else {
return
}
self.speakerAction?()
self.isSpeakerOn = !self.isSpeakerOn
/*self.isSpeakerOn = !self.isSpeakerOn
switch params.state.lifecycleState {
case .connecting:
@ -259,13 +312,14 @@ public final class PrivateCallScreen: UIView {
}
self.params = params
self.update(transition: .spring(duration: 0.3))
self.update(transition: .spring(duration: 0.3))*/
}),
ButtonGroupView.Button(content: .video(isActive: self.isVideoOn), action: { [weak self] in
guard let self else {
return
}
if self.remoteVideo == nil {
self.videoAction?()
/*if self.remoteVideo == nil {
if let url = Bundle.main.url(forResource: "test2", withExtension: "mp4") {
self.remoteVideo = FileVideoSource(device: MetalEngine.shared.device, url: url)
}
@ -275,12 +329,19 @@ public final class PrivateCallScreen: UIView {
self.isVideoOn = !self.isVideoOn
self.update(transition: .spring(duration: 0.3))
self.update(transition: .spring(duration: 0.3))*/
}),
ButtonGroupView.Button(content: .microphone(isMuted: self.isMicrophoneMuted), action: {
ButtonGroupView.Button(content: .microphone(isMuted: params.state.isMicrophoneMuted), action: { [weak self] in
guard let self else {
return
}
self.microhoneMuteAction?()
}),
ButtonGroupView.Button(content: .end, action: {
ButtonGroupView.Button(content: .end, action: { [weak self] in
guard let self else {
return
}
self.endCallAction?()
})
]
self.buttonGroupView.update(size: params.size, buttons: buttons, transition: transition)
@ -291,7 +352,6 @@ public final class PrivateCallScreen: UIView {
insets: params.insets,
screenCornerRadius: params.screenCornerRadius,
state: params.state,
remoteVideo: remoteVideo,
transition: transition
)
}

View File

@ -55,6 +55,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
public var crashOnMemoryPressure: Bool
public var unidirectionalSwipeToReply: Bool
public var dustEffect: Bool
public var callUIV2: Bool
public static var defaultSettings: ExperimentalUISettings {
return ExperimentalUISettings(
@ -87,7 +88,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
storiesJpegExperiment: false,
crashOnMemoryPressure: false,
unidirectionalSwipeToReply: false,
dustEffect: false
dustEffect: false,
callUIV2: false
)
}
@ -121,7 +123,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
storiesJpegExperiment: Bool,
crashOnMemoryPressure: Bool,
unidirectionalSwipeToReply: Bool,
dustEffect: Bool
dustEffect: Bool,
callUIV2: Bool
) {
self.keepChatNavigationStack = keepChatNavigationStack
self.skipReadHistory = skipReadHistory
@ -153,6 +156,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.crashOnMemoryPressure = crashOnMemoryPressure
self.unidirectionalSwipeToReply = unidirectionalSwipeToReply
self.dustEffect = dustEffect
self.callUIV2 = callUIV2
}
public init(from decoder: Decoder) throws {
@ -188,6 +192,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.crashOnMemoryPressure = try container.decodeIfPresent(Bool.self, forKey: "crashOnMemoryPressure") ?? false
self.unidirectionalSwipeToReply = try container.decodeIfPresent(Bool.self, forKey: "unidirectionalSwipeToReply") ?? false
self.dustEffect = try container.decodeIfPresent(Bool.self, forKey: "dustEffect") ?? false
self.callUIV2 = try container.decodeIfPresent(Bool.self, forKey: "callUIV2") ?? false
}
public func encode(to encoder: Encoder) throws {
@ -223,6 +228,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
try container.encode(self.crashOnMemoryPressure, forKey: "crashOnMemoryPressure")
try container.encode(self.unidirectionalSwipeToReply, forKey: "unidirectionalSwipeToReply")
try container.encode(self.dustEffect, forKey: "dustEffect")
try container.encode(self.callUIV2, forKey: "callUIV2")
}
}