diff --git a/submodules/AccountContext/Sources/MediaManager.swift b/submodules/AccountContext/Sources/MediaManager.swift index 528f8fa8ac..ba033b358d 100644 --- a/submodules/AccountContext/Sources/MediaManager.swift +++ b/submodules/AccountContext/Sources/MediaManager.swift @@ -27,6 +27,7 @@ public protocol MediaManager: class { func playlistControl(_ control: SharedMediaPlayerControlAction, type: MediaManagerPlayerType?) func filteredPlaylistState(accountId: AccountRecordId, playlistId: SharedMediaPlaylistId, itemId: SharedMediaPlaylistItemId, type: MediaManagerPlayerType) -> Signal + func filteredPlayerAudioLevelEvents(accountId: AccountRecordId, playlistId: SharedMediaPlaylistId, itemId: SharedMediaPlaylistItemId, type: MediaManagerPlayerType) -> Signal func setOverlayVideoNode(_ node: OverlayMediaItemNode?) func hasOverlayVideoNode(_ node: OverlayMediaItemNode) -> Bool diff --git a/submodules/AccountContext/Sources/PresentationCallManager.swift b/submodules/AccountContext/Sources/PresentationCallManager.swift index aedbdc6b8b..e1e1f40397 100644 --- a/submodules/AccountContext/Sources/PresentationCallManager.swift +++ b/submodules/AccountContext/Sources/PresentationCallManager.swift @@ -90,6 +90,7 @@ public protocol PresentationCall: class { func toggleIsMuted() func setIsMuted(_ value: Bool) func setEnableVideo(_ value: Bool) + func setOutgoingVideoIsPaused(_ isPaused: Bool) func switchVideoCamera() func setCurrentAudioOutput(_ output: AudioSessionOutput) func debugInfo() -> Signal<(String, String), NoError> diff --git a/submodules/Display/Source/CAAnimationUtils.swift b/submodules/Display/Source/CAAnimationUtils.swift index 6eb000add6..c683fdf4f8 100644 --- a/submodules/Display/Source/CAAnimationUtils.swift +++ b/submodules/Display/Source/CAAnimationUtils.swift @@ -238,14 +238,14 @@ public extension CALayer { self.animate(from: NSValue(cgPoint: from), to: NSValue(cgPoint: to), keyPath: "position", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion) } - func animateBounds(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { + func animateBounds(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { if from == to && !force { if let completion = completion { completion(true) } return } - self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion) + self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion) } func animateBoundsOriginXAdditive(from: CGFloat, to: CGFloat, duration: Double, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) { @@ -268,7 +268,7 @@ public extension CALayer { self.animateKeyframes(values: values.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position") } - func animateFrame(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { + func animateFrame(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { if from == to && !force { if let completion = completion { completion(true) @@ -302,14 +302,14 @@ public extension CALayer { toBounds = CGRect() } - self.animatePosition(from: fromPosition, to: toPosition, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in + self.animatePosition(from: fromPosition, to: toPosition, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in if !value { interrupted = true } completedPosition = true partialCompletion() }) - self.animateBounds(from: fromBounds, to: toBounds, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in + self.animateBounds(from: fromBounds, to: toBounds, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in if !value { interrupted = true } diff --git a/submodules/Display/Source/ContainedViewLayoutTransition.swift b/submodules/Display/Source/ContainedViewLayoutTransition.swift index 0ce89df155..1d4c1867a6 100644 --- a/submodules/Display/Source/ContainedViewLayoutTransition.swift +++ b/submodules/Display/Source/ContainedViewLayoutTransition.swift @@ -63,7 +63,7 @@ public enum ContainedViewLayoutTransition { } public extension ContainedViewLayoutTransition { - func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, completion: ((Bool) -> Void)? = nil) { + func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) { if node.frame.equalTo(frame) && !force { completion?(true) } else { @@ -81,7 +81,7 @@ public extension ContainedViewLayoutTransition { previousFrame = node.frame } node.frame = frame - node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in + node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, delay: delay, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in if let completion = completion { completion(result) } diff --git a/submodules/LegacyComponents/Sources/TGModernConversationInputMicButton.m b/submodules/LegacyComponents/Sources/TGModernConversationInputMicButton.m index b47a97ccb1..a6b92c6eb7 100644 --- a/submodules/LegacyComponents/Sources/TGModernConversationInputMicButton.m +++ b/submodules/LegacyComponents/Sources/TGModernConversationInputMicButton.m @@ -124,7 +124,9 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius CGFloat _currentScale; CGFloat _currentTranslation; CGFloat _targetTranslation; + CGFloat _cancelTranslation; + CGFloat _cancelTargetTranslation; CFAbsoluteTime _animationStartTime; @@ -505,6 +507,8 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius _currentLevel = 0.0f; _currentTranslation = 0.0f; _targetTranslation = 0.0f; + _cancelTranslation = 0; + _cancelTargetTranslation = 0; _currentScale = 1.0f; [UIView animateWithDuration:0.18 animations:^{ _innerIconWrapperView.transform = CGAffineTransformMakeScale(0.2f, 0.2f); @@ -531,10 +535,9 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius [_presentation dismiss]; _presentation = nil; - _cancelTranslation = 0; id delegate = _delegate; if ([delegate respondsToSelector:@selector(micButtonInteractionUpdateCancelTranslation:)]) - [delegate micButtonInteractionUpdateCancelTranslation:-_cancelTranslation]; + [delegate micButtonInteractionUpdateCancelTranslation:-_cancelTargetTranslation]; } if (_previousIcon != nil) @@ -565,17 +568,16 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius [self setIcon:TGTintedImage(TGComponentsImageNamed(@"RecordSendIcon"), _pallete != nil ? _pallete.iconColor : [UIColor whiteColor])]; _currentScale = 1; - _cancelTranslation = 0; + _cancelTargetTranslation = 0; id delegate = _delegate; if ([delegate respondsToSelector:@selector(micButtonInteractionUpdateCancelTranslation:)]) - [delegate micButtonInteractionUpdateCancelTranslation:-_cancelTranslation]; + [delegate micButtonInteractionUpdateCancelTranslation:-_cancelTargetTranslation]; _innerIconView.transform = CGAffineTransformMakeScale(0.3f, 0.3f); _innerIconView.alpha = 0.0f; [UIView animateWithDuration:0.3 delay:0.0 options:7 << 16 animations:^ { _innerIconView.transform = CGAffineTransformIdentity; - _decoration.transform = CGAffineTransformIdentity; snapshotView.transform = CGAffineTransformMakeScale(0.001f, 0.001f); } completion:^(__unused BOOL finished) { [snapshotView removeFromSuperview]; @@ -695,7 +697,7 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius _currentScale = scale; _targetTranslation = distanceY; - _cancelTranslation = distanceX; + _cancelTargetTranslation = distanceX; CGFloat targetLockness = _locked ? 1.0f : MIN(1.0f, fabs(_targetTranslation) / 105.0f); [_lock updateLockness:targetLockness]; _lockView.lockness = targetLockness; @@ -713,7 +715,7 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius id delegate = _delegate; if ([delegate respondsToSelector:@selector(micButtonInteractionUpdateCancelTranslation:)]) - [delegate micButtonInteractionUpdateCancelTranslation:-_cancelTranslation]; + [delegate micButtonInteractionUpdateCancelTranslation:-_cancelTargetTranslation]; if (distanceX < -150.0f) { id delegate = _delegate; @@ -837,11 +839,14 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius _innerCircleView.image = nil; } NSTimeInterval t = CACurrentMediaTime(); + + _currentLevel = _currentLevel * 0.9f + _inputLevel * 0.1f; + [_decoration tick:_currentLevel]; + + _currentTranslation = MIN(0.0, _currentTranslation * 0.7f + _targetTranslation * 0.3f); + _cancelTranslation = MIN(0.0, _cancelTranslation * 0.7f + _cancelTargetTranslation * 0.3f); + if (t > _animationStartTime) { - _currentLevel = _currentLevel * 0.8f + _inputLevel * 0.2f; - - _currentTranslation = MIN(0.0, _currentTranslation * 0.7f + _targetTranslation * 0.3f); - CGFloat outerScale = outerCircleMinScale + _currentLevel * (1.0f - outerCircleMinScale); CGAffineTransform translation = CGAffineTransformMakeTranslation(0, _currentTranslation); CGAffineTransform transform = CGAffineTransformScale(translation, outerScale, outerScale); @@ -857,8 +862,6 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius _innerCircleView.transform = transform; _innerIconWrapperView.transform = transform; _decoration.transform = transform; - - [_decoration tick:_currentLevel]; } } diff --git a/submodules/MediaPlayer/Sources/MediaPlayer.swift b/submodules/MediaPlayer/Sources/MediaPlayer.swift index 0bd6b3bc18..4c53584893 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayer.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayer.swift @@ -126,17 +126,19 @@ private final class MediaPlayerContext { private var lastStatusUpdateTimestamp: Double? private let playerStatus: Promise private let playerStatusValue = Atomic(value: nil) + private let audioLevelPipe: ValuePipe fileprivate var actionAtEnd: MediaPlayerActionAtEnd = .stop private var stoppedAtEnd = false - init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise, postbox: Postbox, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool) { + init(queue: Queue, audioSessionManager: ManagedAudioSession, playerStatus: Promise, audioLevelPipe: ValuePipe, postbox: Postbox, resourceReference: MediaResourceReference, tempFilePath: String?, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, playAndRecord: Bool, keepAudioSessionWhilePaused: Bool, continuePlayingWithoutSoundOnLostAudioSession: Bool) { assert(queue.isCurrent()) self.queue = queue self.audioSessionManager = audioSessionManager self.playerStatus = playerStatus + self.audioLevelPipe = audioLevelPipe self.postbox = postbox self.resourceReference = resourceReference self.tempFilePath = tempFilePath @@ -366,7 +368,7 @@ private final class MediaPlayerContext { self.audioRenderer = nil let queue = self.queue - renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), playAndRecord: self.playAndRecord, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, updatedRate: { [weak self] in + renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), playAndRecord: self.playAndRecord, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in queue.async { if let strongSelf = self { strongSelf.tick() @@ -444,7 +446,7 @@ private final class MediaPlayerContext { self.lastStatusUpdateTimestamp = nil if self.enableSound { let queue = self.queue - let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), playAndRecord: self.playAndRecord, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, updatedRate: { [weak self] in + let renderer = MediaPlayerAudioRenderer(audioSession: .manager(self.audioSessionManager), playAndRecord: self.playAndRecord, forceAudioToSpeaker: self.forceAudioToSpeaker, baseRate: self.baseRate, audioLevelPipe: self.audioLevelPipe, updatedRate: { [weak self] in queue.async { if let strongSelf = self { strongSelf.tick() @@ -966,6 +968,11 @@ public final class MediaPlayer { return self.statusValue.get() } + private let audioLevelPipe = ValuePipe() + public var audioLevelEvents: Signal { + return self.audioLevelPipe.signal() + } + public var actionAtEnd: MediaPlayerActionAtEnd = .stop { didSet { let value = self.actionAtEnd @@ -978,8 +985,9 @@ public final class MediaPlayer { } public init(audioSessionManager: ManagedAudioSession, postbox: Postbox, resourceReference: MediaResourceReference, tempFilePath: String? = nil, streamable: MediaPlayerStreaming, video: Bool, preferSoftwareDecoding: Bool, playAutomatically: Bool = false, enableSound: Bool, baseRate: Double = 1.0, fetchAutomatically: Bool, playAndRecord: Bool = false, keepAudioSessionWhilePaused: Bool = false, continuePlayingWithoutSoundOnLostAudioSession: Bool = false) { + let audioLevelPipe = self.audioLevelPipe self.queue.async { - let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, postbox: postbox, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession) + let context = MediaPlayerContext(queue: self.queue, audioSessionManager: audioSessionManager, playerStatus: self.statusValue, audioLevelPipe: audioLevelPipe, postbox: postbox, resourceReference: resourceReference, tempFilePath: tempFilePath, streamable: streamable, video: video, preferSoftwareDecoding: preferSoftwareDecoding, playAutomatically: playAutomatically, enableSound: enableSound, baseRate: baseRate, fetchAutomatically: fetchAutomatically, playAndRecord: playAndRecord, keepAudioSessionWhilePaused: keepAudioSessionWhilePaused, continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession) self.contextRef = Unmanaged.passRetained(context) } } diff --git a/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift b/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift index f8e3c7efe7..7da7527b03 100644 --- a/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift +++ b/submodules/MediaPlayer/Sources/MediaPlayerAudioRenderer.swift @@ -15,21 +15,26 @@ private final class AudioPlayerRendererBufferContext { var state: AudioPlayerRendererState = .paused let timebase: CMTimebase let buffer: RingByteBuffer + var audioLevelPeak: Int16 = 0 + var audioLevelPeakCount: Int = 0 + var audioLevelPeakUpdate: Double = 0.0 var bufferMaxChannelSampleIndex: Int64 = 0 var lowWaterSize: Int var notifyLowWater: () -> Void var updatedRate: () -> Void + var updatedLevel: (Float) -> Void var notifiedLowWater = false var overflowData = Data() var overflowDataMaxChannelSampleIndex: Int64 = 0 var renderTimestampTick: Int64 = 0 - init(timebase: CMTimebase, buffer: RingByteBuffer, lowWaterSize: Int, notifyLowWater: @escaping () -> Void, updatedRate: @escaping () -> Void) { + init(timebase: CMTimebase, buffer: RingByteBuffer, lowWaterSize: Int, notifyLowWater: @escaping () -> Void, updatedRate: @escaping () -> Void, updatedLevel: @escaping (Float) -> Void) { self.timebase = timebase self.buffer = buffer self.lowWaterSize = lowWaterSize self.notifyLowWater = notifyLowWater self.updatedRate = updatedRate + self.updatedLevel = updatedLevel } } @@ -125,6 +130,31 @@ private func rendererInputProc(refCon: UnsafeMutableRawPointer, ioActionFlags: U let consumeCount = bufferDataSize - dataOffset let actualConsumedCount = rendererBuffer.dequeue(bufferData.advanced(by: dataOffset), count: consumeCount) + + var samplePtr = bufferData.advanced(by: dataOffset).assumingMemoryBound(to: Int16.self) + for _ in 0 ..< actualConsumedCount / 4 { + let sample: Int16 = abs(samplePtr.pointee) + samplePtr = samplePtr.advanced(by: 2) + + if context.audioLevelPeak < sample { + context.audioLevelPeak = sample + } + context.audioLevelPeakCount += 1 + + if context.audioLevelPeakCount >= 1200 { + let level = Float(context.audioLevelPeak) / (4000.0) + /*let timestamp = CFAbsoluteTimeGetCurrent() + if !context.audioLevelPeakUpdate.isZero { + let delta = timestamp - context.audioLevelPeakUpdate + print("level = \(level), delta = \(delta)") + } + context.audioLevelPeakUpdate = timestamp*/ + context.updatedLevel(level) + context.audioLevelPeak = 0 + context.audioLevelPeakCount = 0 + } + } + rendererFillOffset.1 += actualConsumedCount if actualConsumedCount == 0 { @@ -188,6 +218,8 @@ private final class AudioPlayerRendererContext { var paused = true var baseRate: Double + let audioLevelPipe: ValuePipe + var audioGraph: AUGraph? var timePitchAudioUnit: AudioComponentInstance? var outputAudioUnit: AudioComponentInstance? @@ -210,12 +242,13 @@ private final class AudioPlayerRendererContext { } } - init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, forceAudioToSpeaker: Bool, baseRate: Double, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { + init(controlTimebase: CMTimebase, audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { assert(audioPlayerRendererQueue.isCurrent()) self.audioSession = audioSession self.forceAudioToSpeaker = forceAudioToSpeaker self.baseRate = baseRate + self.audioLevelPipe = audioLevelPipe self.controlTimebase = controlTimebase self.updatedRate = updatedRate @@ -234,6 +267,8 @@ private final class AudioPlayerRendererContext { notifyLowWater() }, updatedRate: { updatedRate() + }, updatedLevel: { level in + audioLevelPipe.putNext(level) })) self.bufferContextId = registerPlayerRendererBufferContext(self.bufferContext) @@ -709,7 +744,7 @@ public final class MediaPlayerAudioRenderer { private let audioClock: CMClock public let audioTimebase: CMTimebase - public init(audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, forceAudioToSpeaker: Bool, baseRate: Double, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { + public init(audioSession: MediaPlayerAudioSessionControl, playAndRecord: Bool, forceAudioToSpeaker: Bool, baseRate: Double, audioLevelPipe: ValuePipe, updatedRate: @escaping () -> Void, audioPaused: @escaping () -> Void) { var audioClock: CMClock? CMAudioClockCreate(allocator: nil, clockOut: &audioClock) if audioClock == nil { @@ -722,7 +757,7 @@ public final class MediaPlayerAudioRenderer { self.audioTimebase = audioTimebase! audioPlayerRendererQueue.async { - let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, playAndRecord: playAndRecord, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, updatedRate: updatedRate, audioPaused: audioPaused) + let context = AudioPlayerRendererContext(controlTimebase: audioTimebase!, audioSession: audioSession, playAndRecord: playAndRecord, forceAudioToSpeaker: forceAudioToSpeaker, baseRate: baseRate, audioLevelPipe: audioLevelPipe, updatedRate: updatedRate, audioPaused: audioPaused) self.contextRef = Unmanaged.passRetained(context) } } diff --git a/submodules/TelegramApi/Sources/Api0.swift b/submodules/TelegramApi/Sources/Api0.swift index 1448006137..734280d303 100644 --- a/submodules/TelegramApi/Sources/Api0.swift +++ b/submodules/TelegramApi/Sources/Api0.swift @@ -254,7 +254,6 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = { dict[-1512627963] = { return Api.Update.parse_updateDialogFilterOrder($0) } dict[889491791] = { return Api.Update.parse_updateDialogFilters($0) } dict[643940105] = { return Api.Update.parse_updatePhoneCallSignalingData($0) } - dict[1708307556] = { return Api.Update.parse_updateChannelParticipant($0) } dict[136574537] = { return Api.messages.VotesList.parse_votesList($0) } dict[1558266229] = { return Api.PopularContact.parse_popularContact($0) } dict[-373643672] = { return Api.FolderPeer.parse_folderPeer($0) } @@ -534,8 +533,8 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = { dict[1933519201] = { return Api.PeerSettings.parse_peerSettings($0) } dict[1577067778] = { return Api.auth.SentCode.parse_sentCode($0) } dict[480546647] = { return Api.InputChatPhoto.parse_inputChatPhotoEmpty($0) } - dict[-1837345356] = { return Api.InputChatPhoto.parse_inputChatUploadedPhoto($0) } dict[-1991004873] = { return Api.InputChatPhoto.parse_inputChatPhoto($0) } + dict[-968723890] = { return Api.InputChatPhoto.parse_inputChatUploadedPhoto($0) } dict[-368917890] = { return Api.PaymentCharge.parse_paymentCharge($0) } dict[-1387279939] = { return Api.MessageInteractionCounters.parse_messageInteractionCounters($0) } dict[-1107852396] = { return Api.stats.BroadcastStats.parse_broadcastStats($0) } @@ -576,7 +575,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = { dict[-177732982] = { return Api.BankCardOpenUrl.parse_bankCardOpenUrl($0) } dict[307276766] = { return Api.account.Authorizations.parse_authorizations($0) } dict[935395612] = { return Api.ChatPhoto.parse_chatPhotoEmpty($0) } - dict[1197267925] = { return Api.ChatPhoto.parse_chatPhoto($0) } + dict[-770990276] = { return Api.ChatPhoto.parse_chatPhoto($0) } dict[1869903447] = { return Api.PageCaption.parse_pageCaption($0) } dict[1062645411] = { return Api.payments.PaymentForm.parse_paymentForm($0) } dict[1342771681] = { return Api.payments.PaymentReceipt.parse_paymentReceipt($0) } @@ -782,7 +781,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = { dict[-1282352120] = { return Api.PageRelatedArticle.parse_pageRelatedArticle($0) } dict[313694676] = { return Api.StickerPack.parse_stickerPack($0) } dict[1326562017] = { return Api.UserProfilePhoto.parse_userProfilePhotoEmpty($0) } - dict[-321430132] = { return Api.UserProfilePhoto.parse_userProfilePhoto($0) } + dict[1775479590] = { return Api.UserProfilePhoto.parse_userProfilePhoto($0) } dict[-74456004] = { return Api.payments.SavedInfo.parse_savedInfo($0) } dict[1041346555] = { return Api.updates.ChannelDifference.parse_channelDifferenceEmpty($0) } dict[543450958] = { return Api.updates.ChannelDifference.parse_channelDifference($0) } diff --git a/submodules/TelegramApi/Sources/Api1.swift b/submodules/TelegramApi/Sources/Api1.swift index 018fbe13a9..779642624a 100644 --- a/submodules/TelegramApi/Sources/Api1.swift +++ b/submodules/TelegramApi/Sources/Api1.swift @@ -6037,7 +6037,6 @@ public extension Api { case updateDialogFilterOrder(order: [Int32]) case updateDialogFilters case updatePhoneCallSignalingData(phoneCallId: Int64, data: Buffer) - case updateChannelParticipant(flags: Int32, channelId: Int32, date: Int32, userId: Int32, prevParticipant: Api.ChannelParticipant?, newParticipant: Api.ChannelParticipant?, qts: Int32) public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) { switch self { @@ -6718,18 +6717,6 @@ public extension Api { serializeInt64(phoneCallId, buffer: buffer, boxed: false) serializeBytes(data, buffer: buffer, boxed: false) break - case .updateChannelParticipant(let flags, let channelId, let date, let userId, let prevParticipant, let newParticipant, let qts): - if boxed { - buffer.appendInt32(1708307556) - } - serializeInt32(flags, buffer: buffer, boxed: false) - serializeInt32(channelId, buffer: buffer, boxed: false) - serializeInt32(date, buffer: buffer, boxed: false) - serializeInt32(userId, buffer: buffer, boxed: false) - if Int(flags) & Int(1 << 0) != 0 {prevParticipant!.serialize(buffer, true)} - if Int(flags) & Int(1 << 1) != 0 {newParticipant!.serialize(buffer, true)} - serializeInt32(qts, buffer: buffer, boxed: false) - break } } @@ -6897,8 +6884,6 @@ public extension Api { return ("updateDialogFilters", []) case .updatePhoneCallSignalingData(let phoneCallId, let data): return ("updatePhoneCallSignalingData", [("phoneCallId", phoneCallId), ("data", data)]) - case .updateChannelParticipant(let flags, let channelId, let date, let userId, let prevParticipant, let newParticipant, let qts): - return ("updateChannelParticipant", [("flags", flags), ("channelId", channelId), ("date", date), ("userId", userId), ("prevParticipant", prevParticipant), ("newParticipant", newParticipant), ("qts", qts)]) } } @@ -8244,39 +8229,6 @@ public extension Api { return nil } } - public static func parse_updateChannelParticipant(_ reader: BufferReader) -> Update? { - var _1: Int32? - _1 = reader.readInt32() - var _2: Int32? - _2 = reader.readInt32() - var _3: Int32? - _3 = reader.readInt32() - var _4: Int32? - _4 = reader.readInt32() - var _5: Api.ChannelParticipant? - if Int(_1!) & Int(1 << 0) != 0 {if let signature = reader.readInt32() { - _5 = Api.parse(reader, signature: signature) as? Api.ChannelParticipant - } } - var _6: Api.ChannelParticipant? - if Int(_1!) & Int(1 << 1) != 0 {if let signature = reader.readInt32() { - _6 = Api.parse(reader, signature: signature) as? Api.ChannelParticipant - } } - var _7: Int32? - _7 = reader.readInt32() - let _c1 = _1 != nil - let _c2 = _2 != nil - let _c3 = _3 != nil - let _c4 = _4 != nil - let _c5 = (Int(_1!) & Int(1 << 0) == 0) || _5 != nil - let _c6 = (Int(_1!) & Int(1 << 1) == 0) || _6 != nil - let _c7 = _7 != nil - if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 { - return Api.Update.updateChannelParticipant(flags: _1!, channelId: _2!, date: _3!, userId: _4!, prevParticipant: _5, newParticipant: _6, qts: _7!) - } - else { - return nil - } - } } public enum PopularContact: TypeConstructorDescription { @@ -15303,8 +15255,8 @@ public extension Api { } public enum InputChatPhoto: TypeConstructorDescription { case inputChatPhotoEmpty - case inputChatUploadedPhoto(file: Api.InputFile) case inputChatPhoto(id: Api.InputPhoto) + case inputChatUploadedPhoto(flags: Int32, file: Api.InputFile?, video: Api.InputFile?, videoStartTs: Double?) public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) { switch self { @@ -15313,12 +15265,6 @@ public extension Api { buffer.appendInt32(480546647) } - break - case .inputChatUploadedPhoto(let file): - if boxed { - buffer.appendInt32(-1837345356) - } - file.serialize(buffer, true) break case .inputChatPhoto(let id): if boxed { @@ -15326,6 +15272,15 @@ public extension Api { } id.serialize(buffer, true) break + case .inputChatUploadedPhoto(let flags, let file, let video, let videoStartTs): + if boxed { + buffer.appendInt32(-968723890) + } + serializeInt32(flags, buffer: buffer, boxed: false) + if Int(flags) & Int(1 << 0) != 0 {file!.serialize(buffer, true)} + if Int(flags) & Int(1 << 1) != 0 {video!.serialize(buffer, true)} + if Int(flags) & Int(1 << 2) != 0 {serializeDouble(videoStartTs!, buffer: buffer, boxed: false)} + break } } @@ -15333,29 +15288,16 @@ public extension Api { switch self { case .inputChatPhotoEmpty: return ("inputChatPhotoEmpty", []) - case .inputChatUploadedPhoto(let file): - return ("inputChatUploadedPhoto", [("file", file)]) case .inputChatPhoto(let id): return ("inputChatPhoto", [("id", id)]) + case .inputChatUploadedPhoto(let flags, let file, let video, let videoStartTs): + return ("inputChatUploadedPhoto", [("flags", flags), ("file", file), ("video", video), ("videoStartTs", videoStartTs)]) } } public static func parse_inputChatPhotoEmpty(_ reader: BufferReader) -> InputChatPhoto? { return Api.InputChatPhoto.inputChatPhotoEmpty } - public static func parse_inputChatUploadedPhoto(_ reader: BufferReader) -> InputChatPhoto? { - var _1: Api.InputFile? - if let signature = reader.readInt32() { - _1 = Api.parse(reader, signature: signature) as? Api.InputFile - } - let _c1 = _1 != nil - if _c1 { - return Api.InputChatPhoto.inputChatUploadedPhoto(file: _1!) - } - else { - return nil - } - } public static func parse_inputChatPhoto(_ reader: BufferReader) -> InputChatPhoto? { var _1: Api.InputPhoto? if let signature = reader.readInt32() { @@ -15369,6 +15311,30 @@ public extension Api { return nil } } + public static func parse_inputChatUploadedPhoto(_ reader: BufferReader) -> InputChatPhoto? { + var _1: Int32? + _1 = reader.readInt32() + var _2: Api.InputFile? + if Int(_1!) & Int(1 << 0) != 0 {if let signature = reader.readInt32() { + _2 = Api.parse(reader, signature: signature) as? Api.InputFile + } } + var _3: Api.InputFile? + if Int(_1!) & Int(1 << 1) != 0 {if let signature = reader.readInt32() { + _3 = Api.parse(reader, signature: signature) as? Api.InputFile + } } + var _4: Double? + if Int(_1!) & Int(1 << 2) != 0 {_4 = reader.readDouble() } + let _c1 = _1 != nil + let _c2 = (Int(_1!) & Int(1 << 0) == 0) || _2 != nil + let _c3 = (Int(_1!) & Int(1 << 1) == 0) || _3 != nil + let _c4 = (Int(_1!) & Int(1 << 2) == 0) || _4 != nil + if _c1 && _c2 && _c3 && _c4 { + return Api.InputChatPhoto.inputChatUploadedPhoto(flags: _1!, file: _2, video: _3, videoStartTs: _4) + } + else { + return nil + } + } } public enum PaymentCharge: TypeConstructorDescription { @@ -16505,7 +16471,7 @@ public extension Api { } public enum ChatPhoto: TypeConstructorDescription { case chatPhotoEmpty - case chatPhoto(photoSmall: Api.FileLocation, photoBig: Api.FileLocation, dcId: Int32) + case chatPhoto(flags: Int32, photoSmall: Api.FileLocation, photoBig: Api.FileLocation, dcId: Int32) public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) { switch self { @@ -16515,10 +16481,11 @@ public extension Api { } break - case .chatPhoto(let photoSmall, let photoBig, let dcId): + case .chatPhoto(let flags, let photoSmall, let photoBig, let dcId): if boxed { - buffer.appendInt32(1197267925) + buffer.appendInt32(-770990276) } + serializeInt32(flags, buffer: buffer, boxed: false) photoSmall.serialize(buffer, true) photoBig.serialize(buffer, true) serializeInt32(dcId, buffer: buffer, boxed: false) @@ -16530,8 +16497,8 @@ public extension Api { switch self { case .chatPhotoEmpty: return ("chatPhotoEmpty", []) - case .chatPhoto(let photoSmall, let photoBig, let dcId): - return ("chatPhoto", [("photoSmall", photoSmall), ("photoBig", photoBig), ("dcId", dcId)]) + case .chatPhoto(let flags, let photoSmall, let photoBig, let dcId): + return ("chatPhoto", [("flags", flags), ("photoSmall", photoSmall), ("photoBig", photoBig), ("dcId", dcId)]) } } @@ -16539,21 +16506,24 @@ public extension Api { return Api.ChatPhoto.chatPhotoEmpty } public static func parse_chatPhoto(_ reader: BufferReader) -> ChatPhoto? { - var _1: Api.FileLocation? - if let signature = reader.readInt32() { - _1 = Api.parse(reader, signature: signature) as? Api.FileLocation - } + var _1: Int32? + _1 = reader.readInt32() var _2: Api.FileLocation? if let signature = reader.readInt32() { _2 = Api.parse(reader, signature: signature) as? Api.FileLocation } - var _3: Int32? - _3 = reader.readInt32() + var _3: Api.FileLocation? + if let signature = reader.readInt32() { + _3 = Api.parse(reader, signature: signature) as? Api.FileLocation + } + var _4: Int32? + _4 = reader.readInt32() let _c1 = _1 != nil let _c2 = _2 != nil let _c3 = _3 != nil - if _c1 && _c2 && _c3 { - return Api.ChatPhoto.chatPhoto(photoSmall: _1!, photoBig: _2!, dcId: _3!) + let _c4 = _4 != nil + if _c1 && _c2 && _c3 && _c4 { + return Api.ChatPhoto.chatPhoto(flags: _1!, photoSmall: _2!, photoBig: _3!, dcId: _4!) } else { return nil @@ -21767,7 +21737,7 @@ public extension Api { } public enum UserProfilePhoto: TypeConstructorDescription { case userProfilePhotoEmpty - case userProfilePhoto(photoId: Int64, photoSmall: Api.FileLocation, photoBig: Api.FileLocation, dcId: Int32) + case userProfilePhoto(flags: Int32, photoId: Int64, photoSmall: Api.FileLocation, photoBig: Api.FileLocation, dcId: Int32) public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) { switch self { @@ -21777,10 +21747,11 @@ public extension Api { } break - case .userProfilePhoto(let photoId, let photoSmall, let photoBig, let dcId): + case .userProfilePhoto(let flags, let photoId, let photoSmall, let photoBig, let dcId): if boxed { - buffer.appendInt32(-321430132) + buffer.appendInt32(1775479590) } + serializeInt32(flags, buffer: buffer, boxed: false) serializeInt64(photoId, buffer: buffer, boxed: false) photoSmall.serialize(buffer, true) photoBig.serialize(buffer, true) @@ -21793,8 +21764,8 @@ public extension Api { switch self { case .userProfilePhotoEmpty: return ("userProfilePhotoEmpty", []) - case .userProfilePhoto(let photoId, let photoSmall, let photoBig, let dcId): - return ("userProfilePhoto", [("photoId", photoId), ("photoSmall", photoSmall), ("photoBig", photoBig), ("dcId", dcId)]) + case .userProfilePhoto(let flags, let photoId, let photoSmall, let photoBig, let dcId): + return ("userProfilePhoto", [("flags", flags), ("photoId", photoId), ("photoSmall", photoSmall), ("photoBig", photoBig), ("dcId", dcId)]) } } @@ -21802,24 +21773,27 @@ public extension Api { return Api.UserProfilePhoto.userProfilePhotoEmpty } public static func parse_userProfilePhoto(_ reader: BufferReader) -> UserProfilePhoto? { - var _1: Int64? - _1 = reader.readInt64() - var _2: Api.FileLocation? - if let signature = reader.readInt32() { - _2 = Api.parse(reader, signature: signature) as? Api.FileLocation - } + var _1: Int32? + _1 = reader.readInt32() + var _2: Int64? + _2 = reader.readInt64() var _3: Api.FileLocation? if let signature = reader.readInt32() { _3 = Api.parse(reader, signature: signature) as? Api.FileLocation } - var _4: Int32? - _4 = reader.readInt32() + var _4: Api.FileLocation? + if let signature = reader.readInt32() { + _4 = Api.parse(reader, signature: signature) as? Api.FileLocation + } + var _5: Int32? + _5 = reader.readInt32() let _c1 = _1 != nil let _c2 = _2 != nil let _c3 = _3 != nil let _c4 = _4 != nil - if _c1 && _c2 && _c3 && _c4 { - return Api.UserProfilePhoto.userProfilePhoto(photoId: _1!, photoSmall: _2!, photoBig: _3!, dcId: _4!) + let _c5 = _5 != nil + if _c1 && _c2 && _c3 && _c4 && _c5 { + return Api.UserProfilePhoto.userProfilePhoto(flags: _1!, photoId: _2!, photoSmall: _3!, photoBig: _4!, dcId: _5!) } else { return nil diff --git a/submodules/TelegramCallsUI/Sources/CallController.swift b/submodules/TelegramCallsUI/Sources/CallController.swift index 02db8c1132..52089bd548 100644 --- a/submodules/TelegramCallsUI/Sources/CallController.swift +++ b/submodules/TelegramCallsUI/Sources/CallController.swift @@ -178,8 +178,8 @@ public final class CallController: ViewController { let _ = self?.call.hangUp() } - self.controllerNode.toggleVideo = { [weak self] in - let _ = self?.call.setEnableVideo(true) + self.controllerNode.setIsVideoPaused = { [weak self] isPaused in + self?.call.setOutgoingVideoIsPaused(isPaused) } self.controllerNode.back = { [weak self] in diff --git a/submodules/TelegramCallsUI/Sources/CallControllerButton.swift b/submodules/TelegramCallsUI/Sources/CallControllerButton.swift index 7a3c6b9bc1..bc7c296f6b 100644 --- a/submodules/TelegramCallsUI/Sources/CallControllerButton.swift +++ b/submodules/TelegramCallsUI/Sources/CallControllerButton.swift @@ -5,245 +5,218 @@ import AsyncDisplayKit import SwiftSignalKit import AppBundle -enum CallControllerButtonType { - case mute - case end - case accept - case speaker - case bluetooth - case switchCamera -} +private let labelFont = Font.regular(13.0) -private let buttonSize = CGSize(width: 75.0, height: 75.0) - -private func generateEmptyButtonImage(icon: UIImage?, strokeColor: UIColor?, fillColor: UIColor, knockout: Bool = false, angle: CGFloat = 0.0) -> UIImage? { - return generateImage(buttonSize, contextGenerator: { size, context in - context.clear(CGRect(origin: CGPoint(), size: size)) - context.setBlendMode(.copy) - if let strokeColor = strokeColor { - context.setFillColor(strokeColor.cgColor) - context.fillEllipse(in: CGRect(origin: CGPoint(), size: size)) - context.setFillColor(fillColor.cgColor) - context.fillEllipse(in: CGRect(origin: CGPoint(x: 1.5, y: 1.5), size: CGSize(width: size.width - 3.0, height: size.height - 3.0))) - } else { - context.setFillColor(fillColor.cgColor) - context.fillEllipse(in: CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.height))) - } - - if let icon = icon { - if !angle.isZero { - context.translateBy(x: size.width / 2.0, y: size.height / 2.0) - context.rotate(by: angle) - context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0) - } - let imageSize = icon.size - let imageRect = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floor((size.width - imageSize.height) / 2.0)), size: imageSize) - if knockout { - context.setBlendMode(.copy) - context.clip(to: imageRect, mask: icon.cgImage!) - context.setFillColor(UIColor.clear.cgColor) - context.fill(imageRect) - } else { - context.setBlendMode(.normal) - context.draw(icon.cgImage!, in: imageRect) +final class CallControllerButtonItemNode: HighlightTrackingButtonNode { + struct Content: Equatable { + enum Appearance: Equatable { + enum Color { + case red + case green } + + case blurred(isFilled: Bool) + case color(Color) } - }) -} - -private func generateFilledButtonImage(color: UIColor, icon: UIImage?, angle: CGFloat = 0.0) -> UIImage? { - return generateImage(buttonSize, contextGenerator: { size, context in - context.clear(CGRect(origin: CGPoint(), size: size)) - context.setBlendMode(.normal) - context.setFillColor(color.cgColor) - context.fillEllipse(in: CGRect(origin: CGPoint(), size: size)) - if let icon = icon { - if !angle.isZero { - context.translateBy(x: size.width / 2.0, y: size.height / 2.0) - context.rotate(by: angle) - context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0) - } - context.draw(icon.cgImage!, in: CGRect(origin: CGPoint(x: floor((size.width - icon.size.width) / 2.0), y: floor((size.height - icon.size.height) / 2.0)), size: icon.size)) + enum Image { + case camera + case mute + case flipCamera + case bluetooth + case speaker + case accept + case end } - }) -} - -private let emptyStroke = UIColor(white: 1.0, alpha: 0.8) -private let emptyHighlightedFill = UIColor(white: 1.0, alpha: 0.3) -private let invertedFill = UIColor(white: 1.0, alpha: 1.0) - -private let labelFont = Font.regular(14.5) - -final class CallControllerButtonNode: HighlightTrackingButtonNode { - private var type: CallControllerButtonType + + var appearance: Appearance + var image: Image + } - private var regularImage: UIImage? - private var highlightedImage: UIImage? - private var filledImage: UIImage? + private let contentContainer: ASDisplayNode + private let effectView: UIVisualEffectView + private let contentNode: ASImageNode + private let overlayHighlightNode: ASImageNode + private let textNode: ImmediateTextNode - private let backgroundNode: ASImageNode - private let labelNode: ASTextNode? + private let largeButtonSize: CGFloat = 72.0 - init(type: CallControllerButtonType, label: String?) { - self.type = type + private(set) var currentContent: Content? + private(set) var currentText: String = "" + + init() { + self.contentContainer = ASDisplayNode() - self.backgroundNode = ASImageNode() - self.backgroundNode.isLayerBacked = true - self.backgroundNode.displayWithoutProcessing = false - self.backgroundNode.displaysAsynchronously = false + self.effectView = UIVisualEffectView() + self.effectView.effect = UIBlurEffect(style: .light) + self.effectView.layer.cornerRadius = self.largeButtonSize / 2.0 + self.effectView.clipsToBounds = true + self.effectView.isUserInteractionEnabled = false - if let label = label { - let labelNode = ASTextNode() - labelNode.attributedText = NSAttributedString(string: label, font: labelFont, textColor: .white) - self.labelNode = labelNode - } else { - self.labelNode = nil - } + self.contentNode = ASImageNode() + self.contentNode.isUserInteractionEnabled = false - var regularImage: UIImage? - var highlightedImage: UIImage? - var filledImage: UIImage? + self.overlayHighlightNode = ASImageNode() + self.overlayHighlightNode.isUserInteractionEnabled = false + self.overlayHighlightNode.alpha = 0.0 - switch type { - case .mute: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .accept: - regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) - highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) - case .end: - regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) - highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) - case .speaker: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .bluetooth: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .switchCamera: - let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white) - regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true) - } + self.textNode = ImmediateTextNode() + self.textNode.displaysAsynchronously = false + self.textNode.isUserInteractionEnabled = false - self.regularImage = regularImage - self.highlightedImage = highlightedImage - self.filledImage = filledImage + super.init(pointerStyle: nil) - super.init() + self.addSubnode(self.contentContainer) + self.contentContainer.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) - self.addSubnode(self.backgroundNode) + self.addSubnode(self.textNode) - if let labelNode = self.labelNode { - self.addSubnode(labelNode) - } - - self.backgroundNode.image = regularImage - self.currentImage = regularImage + self.contentContainer.view.addSubview(self.effectView) + self.contentContainer.addSubnode(self.contentNode) + self.contentContainer.addSubnode(self.overlayHighlightNode) self.highligthedChanged = { [weak self] highlighted in - if let strongSelf = self { - strongSelf.internalHighlighted = highlighted - strongSelf.updateState(highlighted: highlighted, selected: strongSelf.isSelected) + guard let strongSelf = self else { + return } - } - } - - private var internalHighlighted = false - - override var isSelected: Bool { - didSet { - self.updateState(highlighted: self.internalHighlighted, selected: self.isSelected) - } - } - - private var currentImage: UIImage? - - private func updateState(highlighted: Bool, selected: Bool) { - let image: UIImage? - if selected { - image = self.filledImage - } else if highlighted { - image = self.highlightedImage - } else { - image = self.regularImage - } - - if self.currentImage !== image { - let currentContents = self.backgroundNode.layer.contents - self.backgroundNode.layer.removeAnimation(forKey: "contents") - if let currentContents = currentContents, let image = image { - self.backgroundNode.image = image - self.backgroundNode.layer.animate(from: currentContents as AnyObject, to: image.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: image === self.currentImage || image === self.filledImage ? 0.25 : 0.15) + if highlighted { + strongSelf.overlayHighlightNode.alpha = 1.0 } else { - self.backgroundNode.image = image + strongSelf.overlayHighlightNode.alpha = 0.0 + strongSelf.overlayHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) } - self.currentImage = image } } - func updateType(_ type: CallControllerButtonType) { - if self.type == type { - return - } - self.type = type - var regularImage: UIImage? - var highlightedImage: UIImage? - var filledImage: UIImage? + func update(size: CGSize, content: Content, text: String, transition: ContainedViewLayoutTransition) { + let scaleFactor = size.width / self.largeButtonSize - switch type { - case .mute: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .accept: - regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) - highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) - case .end: - regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) - highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) - case .speaker: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .bluetooth: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .switchCamera: - let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white) - regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true) + self.effectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) + self.contentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) + self.overlayHighlightNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) + + if self.currentContent != content { + self.currentContent = content + + switch content.appearance { + case .blurred: + self.effectView.isHidden = false + case .color: + self.effectView.isHidden = true + } + + let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in + context.clear(CGRect(origin: CGPoint(), size: size)) + + var fillColor: UIColor = .clear + var drawOverMask = false + context.setBlendMode(.normal) + var imageScale: CGFloat = 1.0 + switch content.appearance { + case let .blurred(isFilled): + if isFilled { + fillColor = .white + drawOverMask = true + context.setBlendMode(.copy) + } + let smallButtonSize: CGFloat = 60.0 + imageScale = self.largeButtonSize / smallButtonSize + case let .color(color): + switch color { + case .red: + fillColor = UIColor(rgb: 0xd92326) + case .green: + fillColor = UIColor(rgb: 0x74db58) + } + } + + context.setFillColor(fillColor.cgColor) + context.fillEllipse(in: CGRect(origin: CGPoint(), size: size)) + + var image: UIImage? + + switch content.image { + case .camera: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: .white) + case .mute: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: .white) + case .flipCamera: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white) + case .bluetooth: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: .white) + case .speaker: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: .white) + case .accept: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: .white) + case .end: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: .white) + } + + if let image = image { + context.translateBy(x: size.width / 2.0, y: size.height / 2.0) + context.scaleBy(x: imageScale, y: imageScale) + context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0) + + let imageRect = CGRect(origin: CGPoint(x: floor((size.width - image.size.width) / 2.0), y: floor((size.height - image.size.height) / 2.0)), size: image.size) + if drawOverMask { + context.clip(to: imageRect, mask: image.cgImage!) + context.setBlendMode(.copy) + context.setFillColor(UIColor.clear.cgColor) + context.fill(CGRect(origin: CGPoint(), size: size)) + } else { + context.draw(image.cgImage!, in: imageRect) + } + } + }) + if transition.isAnimated, let contentImage = contentImage, let previousContent = self.contentNode.image { + self.contentNode.image = contentImage + self.contentNode.layer.animate(from: previousContent.cgImage!, to: contentImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) + } else { + self.contentNode.image = contentImage + } + + self.overlayHighlightNode.image = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in + context.clear(CGRect(origin: CGPoint(), size: size)) + + let fillColor: UIColor + context.setBlendMode(.normal) + switch content.appearance { + case let .blurred(isFilled): + if isFilled { + fillColor = UIColor(white: 0.0, alpha: 0.1) + } else { + fillColor = UIColor(white: 1.0, alpha: 0.2) + } + case let .color(color): + switch color { + case .red: + fillColor = UIColor(rgb: 0xd92326).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2) + case .green: + fillColor = UIColor(rgb: 0x74db58).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2) + } + } + + context.setFillColor(fillColor.cgColor) + context.fillEllipse(in: CGRect(origin: CGPoint(), size: size)) + }) } - self.regularImage = regularImage - self.highlightedImage = highlightedImage - self.filledImage = filledImage + transition.updatePosition(node: self.contentContainer, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0)) + transition.updateSublayerTransformScale(node: self.contentContainer, scale: scaleFactor) - self.updateState(highlighted: self.isHighlighted, selected: self.isSelected) - } - - func animateRollTransition() { - self.backgroundNode.layer.animate(from: 0.0 as NSNumber, to: (-CGFloat.pi * 5 / 4) as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3, removeOnCompletion: false) - self.labelNode?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false) - } - - override func layout() { - super.layout() - - let size = self.bounds.size - - self.backgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.width)) - - if let labelNode = self.labelNode { - let labelSize = labelNode.measure(CGSize(width: 200.0, height: 100.0)) - labelNode.frame = CGRect(origin: CGPoint(x: floor((size.width - labelSize.width) / 2.0), y: 81.0), size: labelSize) + if self.currentText != text { + self.textNode.attributedText = NSAttributedString(string: text, font: labelFont, textColor: .white) } + let textSize = self.textNode.updateLayout(CGSize(width: 150.0, height: 100.0)) + let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) / 2.0), y: size.height + 5.0), size: textSize) + if self.currentText.isEmpty { + self.textNode.frame = textFrame + if transition.isAnimated { + self.textNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15) + } + } else { + transition.updateFrameAdditiveToCenter(node: self.textNode, frame: textFrame) + } + self.currentText = text } } diff --git a/submodules/TelegramCallsUI/Sources/CallControllerButtonsNode.swift b/submodules/TelegramCallsUI/Sources/CallControllerButtonsNode.swift index 7a90fd8b3f..769d077f3c 100644 --- a/submodules/TelegramCallsUI/Sources/CallControllerButtonsNode.swift +++ b/submodules/TelegramCallsUI/Sources/CallControllerButtonsNode.swift @@ -22,27 +22,66 @@ enum CallControllerButtonsMode: Equatable { } case active(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState) - case incoming + case incoming(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState) + case outgoingRinging(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState) +} + +private enum ButtonDescription: Equatable { + enum Key: Hashable { + case accept + case end + case enableCamera + case switchCamera + case soundOutput + case mute + } + + enum SoundOutput { + case builtin + case speaker + case bluetooth + } + + enum EndType { + case outgoing + case decline + case end + } + + case accept + case end(EndType) + case enableCamera(Bool) + case switchCamera + case soundOutput(SoundOutput) + case mute(Bool) + + var key: Key { + switch self { + case .accept: + return .accept + case .end: + return .end + case .enableCamera: + return .enableCamera + case .switchCamera: + return .switchCamera + case .soundOutput: + return .soundOutput + case .mute: + return .mute + } + } } final class CallControllerButtonsNode: ASDisplayNode { - private let acceptButton: CallControllerButtonNode - private let declineButton: CallControllerButtonNode - - private let muteButton: CallControllerButtonNode - private let endButton: CallControllerButtonNode - private let speakerButton: CallControllerButtonNode - private let swichCameraButton: CallControllerButtonNode + private var buttonNodes: [ButtonDescription.Key: CallControllerButtonItemNode] = [:] private var mode: CallControllerButtonsMode? private var validLayout: CGFloat? - var isMuted = false { - didSet { - self.muteButton.isSelected = self.isMuted - } - } + var isMuted = false + var isCameraPaused = false var accept: (() -> Void)? var mute: (() -> Void)? @@ -52,57 +91,30 @@ final class CallControllerButtonsNode: ASDisplayNode { var rotateCamera: (() -> Void)? init(strings: PresentationStrings) { - self.acceptButton = CallControllerButtonNode(type: .accept, label: strings.Call_Accept) - self.acceptButton.alpha = 0.0 - self.declineButton = CallControllerButtonNode(type: .end, label: strings.Call_Decline) - self.declineButton.alpha = 0.0 - - self.muteButton = CallControllerButtonNode(type: .mute, label: nil) - self.muteButton.alpha = 0.0 - self.endButton = CallControllerButtonNode(type: .end, label: nil) - self.endButton.alpha = 0.0 - self.speakerButton = CallControllerButtonNode(type: .speaker, label: nil) - self.speakerButton.alpha = 0.0 - self.swichCameraButton = CallControllerButtonNode(type: .switchCamera, label: nil) - self.swichCameraButton.alpha = 0.0 - super.init() - - self.addSubnode(self.acceptButton) - self.addSubnode(self.declineButton) - self.addSubnode(self.muteButton) - self.addSubnode(self.endButton) - self.addSubnode(self.speakerButton) - self.addSubnode(self.swichCameraButton) - - self.acceptButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.declineButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.muteButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.endButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.speakerButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.swichCameraButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) } - func updateLayout(constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) { - let previousLayout = self.validLayout + func updateLayout(strings: PresentationStrings, constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) { self.validLayout = constrainedWidth - if let mode = self.mode, previousLayout != self.validLayout { - self.updateButtonsLayout(mode: mode, width: constrainedWidth, animated: false) + if let mode = self.mode { + self.updateButtonsLayout(strings: strings, mode: mode, width: constrainedWidth, animated: transition.isAnimated) } } - func updateMode(_ mode: CallControllerButtonsMode) { + func updateMode(strings: PresentationStrings, mode: CallControllerButtonsMode) { if self.mode != mode { let previousMode = self.mode self.mode = mode if let validLayout = self.validLayout { - self.updateButtonsLayout(mode: mode, width: validLayout, animated: previousMode != nil) + self.updateButtonsLayout(strings: strings, mode: mode, width: validLayout, animated: previousMode != nil) } } } - private func updateButtonsLayout(mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) { + private var appliedMode: CallControllerButtonsMode? + + private func updateButtonsLayout(strings: PresentationStrings, mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) { let transition: ContainedViewLayoutTransition if animated { transition = .animated(duration: 0.3, curve: .spring) @@ -110,147 +122,279 @@ final class CallControllerButtonsNode: ASDisplayNode { transition = .immediate } - let threeButtonSpacing: CGFloat = 28.0 - let twoButtonSpacing: CGFloat = 105.0 - let buttonSize = CGSize(width: 75.0, height: 75.0) - - let threeButtonsWidth = 3.0 * buttonSize.width + 2.0 * threeButtonSpacing - let twoButtonsWidth = 2.0 * buttonSize.width + 1.0 * twoButtonSpacing + let previousMode = self.appliedMode + self.appliedMode = mode - var origin = CGPoint(x: floor((width - threeButtonsWidth) / 2.0), y: 0.0) + var animatePositionsWithDelay = false + if let previousMode = previousMode { + switch previousMode { + case .incoming, .outgoingRinging: + if case .active = mode { + animatePositionsWithDelay = true + } + default: + break + } + } - for button in [self.muteButton, self.endButton, self.speakerButton] { - transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize)) - if button === self.speakerButton { - transition.updateFrame(node: self.swichCameraButton, frame: CGRect(origin: origin, size: buttonSize)) + let minSmallButtonSideInset: CGFloat = 34.0 + let maxSmallButtonSpacing: CGFloat = 34.0 + let smallButtonSize: CGFloat = 60.0 + let topBottomSpacing: CGFloat = 84.0 + + let maxLargeButtonSpacing: CGFloat = 115.0 + let largeButtonSize: CGFloat = 72.0 + let minLargeButtonSideInset: CGFloat = minSmallButtonSideInset - 6.0 + + struct PlacedButton { + let button: ButtonDescription + let frame: CGRect + } + + var buttons: [PlacedButton] = [] + switch mode { + case .incoming(let speakerMode, let videoState), .outgoingRinging(let speakerMode, let videoState): + var topButtons: [ButtonDescription] = [] + var bottomButtons: [ButtonDescription] = [] + + let soundOutput: ButtonDescription.SoundOutput + switch speakerMode { + case .none, .builtin: + soundOutput = .builtin + case .speaker: + soundOutput = .speaker + case .headphones: + soundOutput = .bluetooth + case .bluetooth: + soundOutput = .bluetooth } - origin.x += buttonSize.width + threeButtonSpacing + switch videoState { + case .active, .available: + topButtons.append(.enableCamera(!self.isCameraPaused)) + topButtons.append(.mute(self.isMuted)) + topButtons.append(.switchCamera) + case .notAvailable: + topButtons.append(.mute(self.isMuted)) + topButtons.append(.soundOutput(soundOutput)) + } + + let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize + let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0 + let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1)) + let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing + var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0) + for button in topButtons { + buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: 0.0), size: CGSize(width: smallButtonSize, height: smallButtonSize)))) + topButtonsLeftOffset += smallButtonSize + topButtonsSpacing + } + + if case .incoming = mode { + bottomButtons.append(.end(.decline)) + bottomButtons.append(.accept) + } else { + bottomButtons.append(.end(.outgoing)) + } + + let bottomButtonsContentWidth = CGFloat(bottomButtons.count) * largeButtonSize + let bottomButtonsAvailableSpacingWidth = width - bottomButtonsContentWidth - minLargeButtonSideInset * 2.0 + let bottomButtonsSpacing = min(maxLargeButtonSpacing, bottomButtonsAvailableSpacingWidth / CGFloat(bottomButtons.count - 1)) + let bottomButtonsWidth = CGFloat(bottomButtons.count) * largeButtonSize + CGFloat(bottomButtons.count - 1) * bottomButtonsSpacing + var bottomButtonsLeftOffset = floor((width - bottomButtonsWidth) / 2.0) + for button in bottomButtons { + buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: bottomButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: largeButtonSize, height: largeButtonSize)))) + bottomButtonsLeftOffset += largeButtonSize + bottomButtonsSpacing + } + case let .active(speakerMode, videoState): + var topButtons: [ButtonDescription] = [] + + let soundOutput: ButtonDescription.SoundOutput + switch speakerMode { + case .none, .builtin: + soundOutput = .builtin + case .speaker: + soundOutput = .speaker + case .headphones: + soundOutput = .builtin + case .bluetooth: + soundOutput = .bluetooth + } + + switch videoState { + case .active, .available: + topButtons.append(.enableCamera(!self.isCameraPaused)) + topButtons.append(.mute(isMuted)) + topButtons.append(.switchCamera) + case .notAvailable: + topButtons.append(.mute(isMuted)) + topButtons.append(.soundOutput(soundOutput)) + } + + topButtons.append(.end(.end)) + + let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize + let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0 + let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1)) + let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing + var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0) + for button in topButtons { + buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: smallButtonSize, height: smallButtonSize)))) + topButtonsLeftOffset += smallButtonSize + topButtonsSpacing + } } - origin = CGPoint(x: floor((width - twoButtonsWidth) / 2.0), y: 0.0) - for button in [self.declineButton, self.acceptButton] { - transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize)) - origin.x += buttonSize.width + twoButtonSpacing + let delayIncrement = 0.015 + var validKeys: [ButtonDescription.Key] = [] + for button in buttons { + validKeys.append(button.button.key) + var buttonTransition = transition + var animateButtonIn = false + let buttonNode: CallControllerButtonItemNode + if let current = self.buttonNodes[button.button.key] { + buttonNode = current + } else { + buttonNode = CallControllerButtonItemNode() + self.buttonNodes[button.button.key] = buttonNode + self.addSubnode(buttonNode) + buttonNode.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) + buttonTransition = .immediate + animateButtonIn = transition.isAnimated + } + let buttonContent: CallControllerButtonItemNode.Content + let buttonText: String + switch button.button { + case .accept: + buttonContent = CallControllerButtonItemNode.Content( + appearance: .color(.green), + image: .accept + ) + buttonText = strings.Call_Accept + case let .end(type): + buttonContent = CallControllerButtonItemNode.Content( + appearance: .color(.red), + image: .end + ) + switch type { + case .outgoing: + buttonText = "" + case .decline: + buttonText = strings.Call_Decline + case .end: + buttonText = strings.Call_End + } + case let .enableCamera(isEnabled): + buttonContent = CallControllerButtonItemNode.Content( + appearance: .blurred(isFilled: isEnabled), + image: .camera + ) + buttonText = strings.Call_Camera + case .switchCamera: + buttonContent = CallControllerButtonItemNode.Content( + appearance: .blurred(isFilled: false), + image: .flipCamera + ) + buttonText = strings.Call_Flip + case let .soundOutput(value): + let image: CallControllerButtonItemNode.Content.Image + var isFilled = false + switch value { + case .builtin: + image = .speaker + case .speaker: + image = .speaker + isFilled = true + case .bluetooth: + image = .bluetooth + } + buttonContent = CallControllerButtonItemNode.Content( + appearance: .blurred(isFilled: isFilled), + image: image + ) + buttonText = strings.Call_Speaker + case let .mute(isMuted): + buttonContent = CallControllerButtonItemNode.Content( + appearance: .blurred(isFilled: isMuted), + image: .mute + ) + buttonText = strings.Call_Mute + } + var buttonDelay = 0.0 + if animatePositionsWithDelay { + switch button.button.key { + case .enableCamera: + buttonDelay = 0.0 + case .mute: + buttonDelay = delayIncrement * 1.0 + case .switchCamera: + buttonDelay = delayIncrement * 2.0 + case .end: + buttonDelay = delayIncrement * 3.0 + default: + break + } + } + buttonTransition.updateFrame(node: buttonNode, frame: button.frame, delay: buttonDelay) + buttonNode.update(size: button.frame.size, content: buttonContent, text: buttonText, transition: buttonTransition) + if animateButtonIn { + buttonNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } } - switch mode { - case .incoming: - for button in [self.declineButton, self.acceptButton] { - button.alpha = 1.0 - } - for button in [self.muteButton, self.endButton, self.speakerButton, self.swichCameraButton] { - button.alpha = 0.0 - } - case let .active(speakerMode, videoState): - for button in [self.muteButton] { - if animated && button.alpha.isZero { - button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) - } - button.alpha = 1.0 - } - switch videoState { - case .active, .available: - for button in [self.speakerButton] { - if animated && !button.alpha.isZero { - button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3) - } - button.alpha = 0.0 - } - for button in [self.swichCameraButton] { - if animated && button.alpha.isZero { - button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) - } - button.alpha = 1.0 - } - case .notAvailable: - for button in [self.swichCameraButton] { - if animated && !button.alpha.isZero { - button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3) - } - button.alpha = 0.0 - } - for button in [self.speakerButton] { - if animated && button.alpha.isZero { - button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) - } - button.alpha = 1.0 - } - } - var animatingAcceptButton = false - if self.endButton.alpha.isZero { - if animated { - if !self.acceptButton.alpha.isZero { - animatingAcceptButton = true - self.endButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) - self.acceptButton.animateRollTransition() - self.endButton.layer.animate(from: (CGFloat.pi * 5 / 4) as NSNumber, to: 0.0 as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3) - self.acceptButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { [weak self] _ in - if let strongSelf = self { - strongSelf.acceptButton.alpha = 0.0 - strongSelf.acceptButton.layer.removeAnimation(forKey: "position") - strongSelf.acceptButton.layer.removeAnimation(forKey: "transform.rotation.z") - } + var removedKeys: [ButtonDescription.Key] = [] + for (key, button) in self.buttonNodes { + if !validKeys.contains(key) { + removedKeys.append(key) + if animated { + if case .accept = key { + if let endButton = self.buttonNodes[.end] { + transition.updateFrame(node: button, frame: endButton.frame) + if let content = button.currentContent { + button.update(size: endButton.frame.size, content: content, text: button.currentText, transition: transition) + } + transition.updateTransformScale(node: button, scale: 0.1) + transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in + button?.removeFromSupernode() }) } - self.endButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } else { + transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in + button?.removeFromSupernode() + }) } - self.endButton.alpha = 1.0 + } else { + button.removeFromSupernode() } - - if !self.declineButton.alpha.isZero { - if animated { - self.declineButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) - } - self.declineButton.alpha = 0.0 - } - - if self.acceptButton.alpha.isZero && !animatingAcceptButton { - self.acceptButton.alpha = 0.0 - } - - self.speakerButton.isSelected = speakerMode == .speaker - self.speakerButton.isHidden = speakerMode == .none - let speakerButtonType: CallControllerButtonType - switch speakerMode { - case .none, .builtin, .speaker: - speakerButtonType = .speaker - case .headphones: - speakerButtonType = .bluetooth - case .bluetooth: - speakerButtonType = .bluetooth - } - self.speakerButton.updateType(speakerButtonType) + } + } + for key in removedKeys { + self.buttonNodes.removeValue(forKey: key) } } - @objc func buttonPressed(_ button: CallControllerButtonNode) { - if button === self.muteButton { - self.mute?() - } else if button === self.endButton || button === self.declineButton { - self.end?() - } else if button === self.speakerButton { - self.speaker?() - } else if button === self.acceptButton { - self.accept?() - } else if button === self.swichCameraButton { - self.rotateCamera?() + @objc func buttonPressed(_ button: CallControllerButtonItemNode) { + for (key, listButton) in self.buttonNodes { + if button === listButton { + switch key { + case .accept: + self.accept?() + case .end: + self.end?() + case .enableCamera: + self.toggleVideo?() + case .switchCamera: + self.rotateCamera?() + case .soundOutput: + self.speaker?() + case .mute: + self.mute?() + } + break + } } } override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { - let buttons = [ - self.acceptButton, - self.declineButton, - self.muteButton, - self.endButton, - self.speakerButton, - self.swichCameraButton - ] - for button in buttons { - if button.isHidden || button.alpha.isZero { - continue - } + for (_, button) in self.buttonNodes { if let result = button.view.hitTest(self.view.convert(point, to: button.view), with: event) { return result } diff --git a/submodules/TelegramCallsUI/Sources/CallControllerNode.swift b/submodules/TelegramCallsUI/Sources/CallControllerNode.swift index 35a5100543..e204fea8d9 100644 --- a/submodules/TelegramCallsUI/Sources/CallControllerNode.swift +++ b/submodules/TelegramCallsUI/Sources/CallControllerNode.swift @@ -56,34 +56,91 @@ private final class IncomingVideoNode: ASDisplayNode { } private final class OutgoingVideoNode: ASDisplayNode { + private let videoTransformContainer: ASDisplayNode private let videoView: UIView - private let switchCameraButton: HighlightableButtonNode - private let switchCamera: () -> Void + private let buttonNode: HighlightTrackingButtonNode - init(videoView: UIView, switchCamera: @escaping () -> Void) { + private var effectView: UIVisualEffectView? + private var isBlurred: Bool = false + private var isExpanded: Bool = false + + var tapped: (() -> Void)? + + init(videoView: UIView) { + self.videoTransformContainer = ASDisplayNode() + self.videoTransformContainer.clipsToBounds = true self.videoView = videoView - self.switchCameraButton = HighlightableButtonNode() - self.switchCamera = switchCamera + self.videoView.layer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0) + + self.buttonNode = HighlightTrackingButtonNode() super.init() - self.view.addSubview(self.videoView) - self.addSubnode(self.switchCameraButton) - self.switchCameraButton.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside) + self.videoTransformContainer.view.addSubview(self.videoView) + self.addSubnode(self.videoTransformContainer) + //self.addSubnode(self.buttonNode) + + self.buttonNode.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside) } - @objc private func buttonPressed() { - self.switchCamera() + @objc func buttonPressed() { + self.tapped?() } func updateLayout(size: CGSize, isExpanded: Bool, transition: ContainedViewLayoutTransition) { - transition.updateFrame(view: self.videoView, frame: CGRect(origin: CGPoint(), size: size)) - transition.updateCornerRadius(layer: self.videoView.layer, cornerRadius: isExpanded ? 0.0 : 16.0) - self.switchCameraButton.frame = CGRect(origin: CGPoint(), size: size) + let videoFrame = CGRect(origin: CGPoint(), size: size) + self.buttonNode.frame = videoFrame + self.isExpanded = isExpanded + + let previousVideoFrame = self.videoTransformContainer.frame + self.videoTransformContainer.frame = videoFrame + if transition.isAnimated && !videoFrame.height.isZero && !previousVideoFrame.height.isZero { + transition.animatePositionAdditive(node: self.videoTransformContainer, offset: CGPoint(x: previousVideoFrame.midX - videoFrame.midX, y: previousVideoFrame.midY - videoFrame.midY)) + transition.animateTransformScale(node: self.videoTransformContainer, from: previousVideoFrame.height / videoFrame.height) + } + + self.videoView.frame = videoFrame + + transition.updateCornerRadius(layer: self.videoTransformContainer.layer, cornerRadius: isExpanded ? 0.0 : 16.0) + if let effectView = self.effectView { + transition.updateCornerRadius(layer: effectView.layer, cornerRadius: isExpanded ? 0.0 : 16.0) + } + } + + func updateIsBlurred(isBlurred: Bool) { + if self.isBlurred == isBlurred { + return + } + self.isBlurred = isBlurred + + if isBlurred { + if self.effectView == nil { + let effectView = UIVisualEffectView() + effectView.clipsToBounds = true + effectView.layer.cornerRadius = self.isExpanded ? 0.0 : 16.0 + self.effectView = effectView + effectView.frame = self.videoView.frame + self.view.addSubview(effectView) + } + UIView.animate(withDuration: 0.3, animations: { + self.effectView?.effect = UIBlurEffect(style: .dark) + }) + } else if let effectView = self.effectView { + UIView.animate(withDuration: 0.3, animations: { + effectView.effect = nil + }) + } } } final class CallControllerNode: ASDisplayNode { + private enum VideoNodeCorner { + case topLeft + case topRight + case bottomLeft + case bottomRight + } + private let sharedContext: SharedAccountContext private let account: Account @@ -104,6 +161,8 @@ final class CallControllerNode: ASDisplayNode { private var incomingVideoViewRequested: Bool = false private var outgoingVideoNode: OutgoingVideoNode? private var outgoingVideoViewRequested: Bool = false + private var outgoingVideoExplicitelyFullscreen: Bool = false + private var outgoingVideoNodeCorner: VideoNodeCorner = .bottomRight private let backButtonArrowNode: ASImageNode private let backButtonNode: HighlightableButtonNode private let statusNode: CallControllerStatusNode @@ -121,6 +180,9 @@ final class CallControllerNode: ASDisplayNode { var isMuted: Bool = false { didSet { self.buttonsNode.isMuted = self.isMuted + if let (layout, navigationBarHeight) = self.validLayout { + self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + } } } @@ -134,12 +196,15 @@ final class CallControllerNode: ASDisplayNode { var beginAudioOuputSelection: (() -> Void)? var acceptCall: (() -> Void)? var endCall: (() -> Void)? - var toggleVideo: (() -> Void)? + var setIsVideoPaused: ((Bool) -> Void)? var back: (() -> Void)? var presentCallRating: ((CallId) -> Void)? var callEnded: ((Bool) -> Void)? var dismissedInteractively: (() -> Void)? + private var isUIHidden: Bool = false + private var isVideoPaused: Bool = false + init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) { self.sharedContext = sharedContext self.account = account @@ -229,7 +294,17 @@ final class CallControllerNode: ASDisplayNode { } self.buttonsNode.toggleVideo = { [weak self] in - self?.toggleVideo?() + guard let strongSelf = self else { + return + } + strongSelf.isVideoPaused = !strongSelf.isVideoPaused + strongSelf.outgoingVideoNode?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused) + strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused + strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused) + + if let (layout, navigationBarHeight) = strongSelf.validLayout { + strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + } } self.buttonsNode.rotateCamera = { [weak self] in @@ -302,17 +377,21 @@ final class CallControllerNode: ASDisplayNode { return } if let incomingVideoView = incomingVideoView { - strongSelf.setCurrentAudioOutput?(.speaker) let incomingVideoNode = IncomingVideoNode(videoView: incomingVideoView) strongSelf.incomingVideoNode = incomingVideoNode strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode) - strongSelf.statusNode.isHidden = true if let (layout, navigationBarHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) + strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring)) } } }) } + default: + break + } + + switch callState.videoState { + case .active, .activeOutgoing: if !self.outgoingVideoViewRequested { self.outgoingVideoViewRequested = true self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in @@ -322,13 +401,15 @@ final class CallControllerNode: ASDisplayNode { if let outgoingVideoView = outgoingVideoView { outgoingVideoView.backgroundColor = .black outgoingVideoView.clipsToBounds = true - strongSelf.setCurrentAudioOutput?(.speaker) - let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: { - guard let strongSelf = self else { - return + if let audioOutputState = strongSelf.audioOutputState, let currentOutput = audioOutputState.currentOutput { + switch currentOutput { + case .speaker, .builtin: + break + default: + strongSelf.setCurrentAudioOutput?(.speaker) } - strongSelf.call.switchVideoCamera() - }) + } + let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView) strongSelf.outgoingVideoNode = outgoingVideoNode if let incomingVideoNode = strongSelf.incomingVideoNode { strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode) @@ -336,38 +417,17 @@ final class CallControllerNode: ASDisplayNode { strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode) } if let (layout, navigationBarHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) + strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring)) } - } - }) - } - case .activeOutgoing: - if !self.outgoingVideoViewRequested { - self.outgoingVideoViewRequested = true - self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in - guard let strongSelf = self else { - return - } - if let outgoingVideoView = outgoingVideoView { - outgoingVideoView.backgroundColor = .black - outgoingVideoView.clipsToBounds = true - outgoingVideoView.layer.cornerRadius = 16.0 - strongSelf.setCurrentAudioOutput?(.speaker) - let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: { + /*outgoingVideoNode.tapped = { guard let strongSelf = self else { return } - strongSelf.call.switchVideoCamera() - }) - strongSelf.outgoingVideoNode = outgoingVideoNode - if let incomingVideoNode = strongSelf.incomingVideoNode { - strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode) - } else { - strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode) - } - if let (layout, navigationBarHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) - } + strongSelf.outgoingVideoExplicitelyFullscreen = !strongSelf.outgoingVideoExplicitelyFullscreen + if let (layout, navigationBarHeight) = strongSelf.validLayout { + strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring)) + } + }*/ } }) } @@ -438,7 +498,7 @@ final class CallControllerNode: ASDisplayNode { if isReconnecting { return strings.Call_StatusConnecting } else { - return strings.Call_StatusOngoing(value).0 + return value } }, timestamp) if self.keyTextData?.0 != keyVisualHash { @@ -501,43 +561,60 @@ final class CallControllerNode: ASDisplayNode { } } + private var buttonsTerminationMode: CallControllerButtonsMode? + private func updateButtonsMode() { guard let callState = self.callState else { return } + var mode: CallControllerButtonsSpeakerMode = .none + if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { + switch currentOutput { + case .builtin: + mode = .builtin + case .speaker: + mode = .speaker + case .headphones: + mode = .headphones + case .port: + mode = .bluetooth + } + if availableOutputs.count <= 1 { + mode = .none + } + } + let mappedVideoState: CallControllerButtonsMode.VideoState + switch callState.videoState { + case .notAvailable: + mappedVideoState = .notAvailable + case .available: + mappedVideoState = .available(true) + case .active: + mappedVideoState = .active + case .activeOutgoing: + mappedVideoState = .active + } + switch callState.state { - case .ringing: - self.buttonsNode.updateMode(.incoming) - default: - var mode: CallControllerButtonsSpeakerMode = .none - if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { - switch currentOutput { - case .builtin: - mode = .builtin - case .speaker: - mode = .speaker - case .headphones: - mode = .headphones - case .port: - mode = .bluetooth - } - if availableOutputs.count <= 1 { - mode = .none - } - } - let mappedVideoState: CallControllerButtonsMode.VideoState - switch callState.videoState { - case .notAvailable: - mappedVideoState = .notAvailable - case .available: - mappedVideoState = .available(true) - case .active: - mappedVideoState = .active - case .activeOutgoing: - mappedVideoState = .active - } - self.buttonsNode.updateMode(.active(speakerMode: mode, videoState: mappedVideoState)) + case .ringing: + let buttonsMode: CallControllerButtonsMode = .incoming(speakerMode: mode, videoState: mappedVideoState) + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode) + self.buttonsTerminationMode = buttonsMode + case .waiting, .requesting: + let buttonsMode: CallControllerButtonsMode = .outgoingRinging(speakerMode: mode, videoState: mappedVideoState) + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode) + self.buttonsTerminationMode = buttonsMode + case .active, .connecting, .reconnecting: + let buttonsMode: CallControllerButtonsMode = .active(speakerMode: mode, videoState: mappedVideoState) + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode) + self.buttonsTerminationMode = buttonsMode + case .terminating, .terminated: + if let buttonsTerminationMode = self.buttonsTerminationMode { + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsTerminationMode) + } else { + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: .active(speakerMode: mode, videoState: mappedVideoState)) + } } } @@ -568,9 +645,69 @@ final class CallControllerNode: ASDisplayNode { } } + private func calculatePreviewVideoRect(layout: ContainerViewLayout, navigationHeight: CGFloat) -> CGRect { + let buttonsHeight: CGFloat = 190.0 + let buttonsOffset: CGFloat + if layout.size.width.isEqual(to: 320.0) { + if layout.size.height.isEqual(to: 480.0) { + buttonsOffset = 60.0 + } else { + buttonsOffset = 73.0 + } + } else { + buttonsOffset = 83.0 + } + + let buttonsOriginY: CGFloat + if self.isUIHidden { + buttonsOriginY = layout.size.height + 40.0 - 80.0 + } else { + buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom + } + + let previewVideoSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0)) + let previewVideoY: CGFloat + let previewVideoX: CGFloat + + switch self.outgoingVideoNodeCorner { + case .topLeft: + previewVideoX = 20.0 + if self.isUIHidden { + previewVideoY = layout.insets(options: .statusBar).top + 8.0 + } else { + previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0 + } + case .topRight: + previewVideoX = layout.size.width - previewVideoSize.width - 20.0 + if self.isUIHidden { + previewVideoY = layout.insets(options: .statusBar).top + 8.0 + } else { + previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0 + } + case .bottomLeft: + previewVideoX = 20.0 + if self.isUIHidden { + previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height + } else { + previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height + } + case .bottomRight: + previewVideoX = layout.size.width - previewVideoSize.width - 20.0 + if self.isUIHidden { + previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height + } else { + previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height + } + } + + return CGRect(origin: CGPoint(x: previewVideoX, y: previewVideoY), size: previewVideoSize) + } + func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) { self.validLayout = (layout, navigationBarHeight) + let overlayAlpha: CGFloat = self.isUIHidden ? 0.0 : 1.0 + transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size)) transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size)) @@ -592,6 +729,9 @@ final class CallControllerNode: ASDisplayNode { } transition.updateFrame(node: self.backButtonNode, frame: CGRect(origin: CGPoint(x: 29.0, y: navigationOffset + 11.0), size: backSize)) + transition.updateAlpha(node: self.backButtonArrowNode, alpha: overlayAlpha) + transition.updateAlpha(node: self.backButtonNode, alpha: overlayAlpha) + var statusOffset: CGFloat if layout.metrics.widthClass == .regular && layout.metrics.heightClass == .regular { if layout.size.height.isEqual(to: 1366.0) { @@ -611,7 +751,7 @@ final class CallControllerNode: ASDisplayNode { statusOffset += layout.safeInsets.top - let buttonsHeight: CGFloat = 75.0 + let buttonsHeight: CGFloat = 190.0 let buttonsOffset: CGFloat if layout.size.width.isEqual(to: 320.0) { if layout.size.height.isEqual(to: 480.0) { @@ -625,36 +765,60 @@ final class CallControllerNode: ASDisplayNode { let statusHeight = self.statusNode.updateLayout(constrainedWidth: layout.size.width, transition: transition) transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight))) + transition.updateAlpha(node: self.statusNode, alpha: overlayAlpha) let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0)) transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize)) - self.buttonsNode.updateLayout(constrainedWidth: layout.size.width, transition: transition) - let buttonsOriginY: CGFloat = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom + self.buttonsNode.updateLayout(strings: self.presentationData.strings, constrainedWidth: layout.size.width, transition: transition) + let buttonsOriginY: CGFloat + if self.isUIHidden { + buttonsOriginY = layout.size.height + 40.0 - 80.0 + } else { + buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom + } transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight))) + transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha) + + let fullscreenVideoFrame = CGRect(origin: CGPoint(), size: layout.size) + + let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight) - var outgoingVideoTransition = transition if let incomingVideoNode = self.incomingVideoNode { - if incomingVideoNode.frame.width.isZero, let outgoingVideoNode = self.outgoingVideoNode, !outgoingVideoNode.frame.width.isZero, !transition.isAnimated { - outgoingVideoTransition = .animated(duration: 0.3, curve: .easeInOut) + var incomingVideoTransition = transition + if incomingVideoNode.frame.isEmpty { + incomingVideoTransition = .immediate } - incomingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size) - incomingVideoNode.updateLayout(size: layout.size) + if self.outgoingVideoExplicitelyFullscreen { + incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: previewVideoFrame) + } else { + incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: fullscreenVideoFrame) + } + incomingVideoNode.updateLayout(size: incomingVideoNode.frame.size) } if let outgoingVideoNode = self.outgoingVideoNode { + var outgoingVideoTransition = transition + if outgoingVideoNode.frame.isEmpty { + outgoingVideoTransition = .immediate + } if self.incomingVideoNode == nil { - outgoingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size) - outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: transition) + outgoingVideoNode.frame = fullscreenVideoFrame + outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: outgoingVideoTransition) } else { - let outgoingSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0)) - let outgoingFrame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: buttonsOriginY - 32.0 - outgoingSize.height), size: outgoingSize) - outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: outgoingFrame) - outgoingVideoNode.updateLayout(size: outgoingFrame.size, isExpanded: false, transition: outgoingVideoTransition) + if self.minimizedVideoDraggingPosition == nil { + if self.outgoingVideoExplicitelyFullscreen { + outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: fullscreenVideoFrame) + } else { + outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: previewVideoFrame) + } + outgoingVideoNode.updateLayout(size: outgoingVideoNode.frame.size, isExpanded: self.outgoingVideoExplicitelyFullscreen, transition: outgoingVideoTransition) + } } } let keyTextSize = self.keyButtonNode.frame.size transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize)) + transition.updateAlpha(node: self.keyButtonNode, alpha: overlayAlpha) if let debugNode = self.debugNode { transition.updateFrame(node: debugNode, frame: CGRect(origin: CGPoint(), size: layout.size)) @@ -700,26 +864,33 @@ final class CallControllerNode: ASDisplayNode { if let _ = self.keyPreviewNode { self.backPressed() } else { - let point = recognizer.location(in: recognizer.view) - if self.statusNode.frame.contains(point) { - if self.easyDebugAccess { - self.presentDebugNode() - } else { - let timestamp = CACurrentMediaTime() - if self.debugTapCounter.0 < timestamp - 0.75 { - self.debugTapCounter.0 = timestamp - self.debugTapCounter.1 = 0 - } - - if self.debugTapCounter.0 >= timestamp - 0.75 { - self.debugTapCounter.0 = timestamp - self.debugTapCounter.1 += 1 - } - - if self.debugTapCounter.1 >= 10 { - self.debugTapCounter.1 = 0 - + if self.incomingVideoNode != nil || self.outgoingVideoNode != nil { + self.isUIHidden = !self.isUIHidden + if let (layout, navigationBarHeight) = self.validLayout { + self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + } + } else { + let point = recognizer.location(in: recognizer.view) + if self.statusNode.frame.contains(point) { + if self.easyDebugAccess { self.presentDebugNode() + } else { + let timestamp = CACurrentMediaTime() + if self.debugTapCounter.0 < timestamp - 0.75 { + self.debugTapCounter.0 = timestamp + self.debugTapCounter.1 = 0 + } + + if self.debugTapCounter.0 >= timestamp - 0.75 { + self.debugTapCounter.0 = timestamp + self.debugTapCounter.1 += 1 + } + + if self.debugTapCounter.1 >= 10 { + self.debugTapCounter.1 = 0 + + self.presentDebugNode() + } } } } @@ -749,36 +920,170 @@ final class CallControllerNode: ASDisplayNode { } } - @objc func panGesture(_ recognizer: UIPanGestureRecognizer) { - switch recognizer.state { - case .changed: - let offset = recognizer.translation(in: self.view).y - var bounds = self.bounds - bounds.origin.y = -offset - self.bounds = bounds - case .ended: - let velocity = recognizer.velocity(in: self.view).y - if abs(velocity) < 100.0 { - var bounds = self.bounds - let previous = bounds - bounds.origin = CGPoint() - self.bounds = bounds - self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + private var minimizedVideoInitialPosition: CGPoint? + private var minimizedVideoDraggingPosition: CGPoint? + + private func nodeLocationForPosition(layout: ContainerViewLayout, position: CGPoint, velocity: CGPoint) -> VideoNodeCorner { + let layoutInsets = UIEdgeInsets() + var result = CGPoint() + if position.x < layout.size.width / 2.0 { + result.x = 0.0 + } else { + result.x = 1.0 + } + if position.y < layoutInsets.top + (layout.size.height - layoutInsets.bottom - layoutInsets.top) / 2.0 { + result.y = 0.0 + } else { + result.y = 1.0 + } + + let currentPosition = result + + let angleEpsilon: CGFloat = 30.0 + var shouldHide = false + + if (velocity.x * velocity.x + velocity.y * velocity.y) >= 500.0 * 500.0 { + let x = velocity.x + let y = velocity.y + + var angle = atan2(y, x) * 180.0 / CGFloat.pi * -1.0 + if angle < 0.0 { + angle += 360.0 + } + + if currentPosition.x.isZero && currentPosition.y.isZero { + if ((angle > 0 && angle < 90 - angleEpsilon) || angle > 360 - angleEpsilon) { + result.x = 1.0 + result.y = 0.0 + } else if (angle > 180 + angleEpsilon && angle < 270 + angleEpsilon) { + result.x = 0.0 + result.y = 1.0 + } else if (angle > 270 + angleEpsilon && angle < 360 - angleEpsilon) { + result.x = 1.0 + result.y = 1.0 } else { - var bounds = self.bounds - let previous = bounds - bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height) - self.bounds = bounds - self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in - self?.dismissedInteractively?() - }) + shouldHide = true + } + } else if !currentPosition.x.isZero && currentPosition.y.isZero { + if (angle > 90 + angleEpsilon && angle < 180 + angleEpsilon) { + result.x = 0.0 + result.y = 0.0 + } + else if (angle > 270 - angleEpsilon && angle < 360 - angleEpsilon) { + result.x = 1.0 + result.y = 1.0 + } + else if (angle > 180 + angleEpsilon && angle < 270 - angleEpsilon) { + result.x = 0.0 + result.y = 1.0 + } + else { + shouldHide = true + } + } else if currentPosition.x.isZero && !currentPosition.y.isZero { + if (angle > 90 - angleEpsilon && angle < 180 - angleEpsilon) { + result.x = 0.0 + result.y = 0.0 + } + else if (angle < angleEpsilon || angle > 270 + angleEpsilon) { + result.x = 1.0 + result.y = 1.0 + } + else if (angle > angleEpsilon && angle < 90 - angleEpsilon) { + result.x = 1.0 + result.y = 0.0 + } + else if (!shouldHide) { + shouldHide = true + } + } else if !currentPosition.x.isZero && !currentPosition.y.isZero { + if (angle > angleEpsilon && angle < 90 + angleEpsilon) { + result.x = 1.0 + result.y = 0.0 + } + else if (angle > 180 - angleEpsilon && angle < 270 - angleEpsilon) { + result.x = 0.0 + result.y = 1.0 + } + else if (angle > 90 + angleEpsilon && angle < 180 - angleEpsilon) { + result.x = 0.0 + result.y = 0.0 + } + else if (!shouldHide) { + shouldHide = true + } + } + } + + if result.x.isZero { + if result.y.isZero { + return .topLeft + } else { + return .bottomLeft + } + } else { + if result.y.isZero { + return .topRight + } else { + return .bottomRight + } + } + } + + @objc private func panGesture(_ recognizer: UIPanGestureRecognizer) { + switch recognizer.state { + case .began: + let location = recognizer.location(in: self.view) + //let translation = recognizer.translation(in: self.view) + //location.x += translation.x + //location.y += translation.y + if let _ = self.incomingVideoNode, let outgoingVideoNode = self.outgoingVideoNode, outgoingVideoNode.frame.contains(location) { + self.minimizedVideoInitialPosition = outgoingVideoNode.position + } else { + self.minimizedVideoInitialPosition = nil + } + case .changed: + if let outgoingVideoNode = self.outgoingVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition { + let translation = recognizer.translation(in: self.view) + let minimizedVideoDraggingPosition = CGPoint(x: minimizedVideoInitialPosition.x + translation.x, y: minimizedVideoInitialPosition.y + translation.y) + self.minimizedVideoDraggingPosition = minimizedVideoDraggingPosition + outgoingVideoNode.position = minimizedVideoDraggingPosition + } else { + let offset = recognizer.translation(in: self.view).y + var bounds = self.bounds + bounds.origin.y = -offset + self.bounds = bounds + } + case .cancelled, .ended: + if let outgoingVideoNode = self.outgoingVideoNode, let _ = self.minimizedVideoInitialPosition, let minimizedVideoDraggingPosition = self.minimizedVideoDraggingPosition { + self.minimizedVideoInitialPosition = nil + self.minimizedVideoDraggingPosition = nil + + if let (layout, navigationHeight) = self.validLayout { + self.outgoingVideoNodeCorner = self.nodeLocationForPosition(layout: layout, position: minimizedVideoDraggingPosition, velocity: recognizer.velocity(in: self.view)) + + let videoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationHeight) + outgoingVideoNode.frame = videoFrame + outgoingVideoNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: minimizedVideoDraggingPosition.x - videoFrame.midX, y: minimizedVideoDraggingPosition.y - videoFrame.midY)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, delay: 0.0, initialVelocity: 0.0, damping: 110.0, removeOnCompletion: true, additive: true, completion: nil) + } + } else { + let velocity = recognizer.velocity(in: self.view).y + if abs(velocity) < 100.0 { + var bounds = self.bounds + let previous = bounds + bounds.origin = CGPoint() + self.bounds = bounds + self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + } else { + var bounds = self.bounds + let previous = bounds + bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height) + self.bounds = bounds + self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in + self?.dismissedInteractively?() + }) + } } - case .cancelled: - var bounds = self.bounds - let previous = bounds - bounds.origin = CGPoint() - self.bounds = bounds - self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) default: break } diff --git a/submodules/TelegramCallsUI/Sources/PresentationCall.swift b/submodules/TelegramCallsUI/Sources/PresentationCall.swift index 6c4da73a80..53fdce80a0 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCall.swift @@ -22,6 +22,7 @@ private final class PresentationCallToneRenderer { private let toneRenderer: MediaPlayerAudioRenderer private var toneRendererAudioSession: MediaPlayerAudioSessionCustomControl? private var toneRendererAudioSessionActivated = false + private let audioLevelPipe = ValuePipe() init(tone: PresentationCallTone) { let queue = Queue.mainQueue() @@ -33,7 +34,7 @@ private final class PresentationCallToneRenderer { self.toneRenderer = MediaPlayerAudioRenderer(audioSession: .custom({ control in return controlImpl?(control) ?? EmptyDisposable - }), playAndRecord: false, forceAudioToSpeaker: false, baseRate: 1.0, updatedRate: {}, audioPaused: {}) + }), playAndRecord: false, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: self.audioLevelPipe, updatedRate: {}, audioPaused: {}) controlImpl = { [weak self] control in queue.async { @@ -190,7 +191,7 @@ public final class PresentationCallImpl: PresentationCall { private var sessionStateDisposable: Disposable? - private let statePromise = ValuePromise(PresentationCallState(state: .waiting, videoState: .notAvailable, remoteVideoState: .inactive), ignoreRepeated: true) + private let statePromise = ValuePromise() public var state: Signal { return self.statePromise.get() } @@ -233,7 +234,9 @@ public final class PresentationCallImpl: PresentationCall { private var droppedCall = false private var dropCallKitCallTimer: SwiftSignalKit.Timer? - init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal) { + private var videoCapturer: OngoingCallVideoCapturer? + + init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal, startWithVideo: Bool) { self.account = account self.audioSession = audioSession self.callSessionManager = callSessionManager @@ -259,6 +262,13 @@ public final class PresentationCallImpl: PresentationCall { self.isOutgoing = isOutgoing self.isVideo = initialState?.type == .video self.peer = peer + self.isVideo = startWithVideo + if self.isVideo { + self.videoCapturer = OngoingCallVideoCapturer() + self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .activeOutgoing, remoteVideoState: .inactive)) + } else { + self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .notAvailable, remoteVideoState: .inactive)) + } self.serializedData = serializedData self.dataSaving = dataSaving @@ -440,13 +450,17 @@ public final class PresentationCallImpl: PresentationCall { mappedRemoteVideoState = .active } } else { - mappedVideoState = .notAvailable + if self.isVideo { + mappedVideoState = .activeOutgoing + } else { + mappedVideoState = .notAvailable + } mappedRemoteVideoState = .inactive } switch sessionState.state { case .ringing: - presentationState = PresentationCallState(state: .ringing, videoState: .notAvailable, remoteVideoState: .inactive) + presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState) if previous == nil || previousControl == nil { if !self.reportedIncomingCall { self.reportedIncomingCall = true @@ -509,7 +523,7 @@ public final class PresentationCallImpl: PresentationCall { presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState) } } else { - presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: .notAvailable, remoteVideoState: .inactive) + presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState) } } @@ -523,8 +537,9 @@ public final class PresentationCallImpl: PresentationCall { if let _ = audioSessionControl, !wasActive || previousControl == nil { let logName = "\(id.id)_\(id.accessHash)" - let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, isVideo: sessionState.type == .video, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName) + let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName) self.ongoingContext = ongoingContext + ongoingContext.setIsMuted(self.isMutedValue) self.debugInfoValue.set(ongoingContext.debugInfo()) @@ -718,8 +733,8 @@ public final class PresentationCallImpl: PresentationCall { self.ongoingContext?.setEnableVideo(value) } - public func switchVideoCamera() { - self.ongoingContext?.switchVideoCamera() + public func setOutgoingVideoIsPaused(_ isPaused: Bool) { + self.videoCapturer?.setIsVideoEnabled(!isPaused) } public func setCurrentAudioOutput(_ output: AudioSessionOutput) { @@ -748,6 +763,10 @@ public final class PresentationCallImpl: PresentationCall { } public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) { - self.ongoingContext?.makeOutgoingVideoView(completion: completion) + self.videoCapturer?.makeOutgoingVideoView(completion: completion) + } + + public func switchVideoCamera() { + self.videoCapturer?.switchCamera() } } diff --git a/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift b/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift index d0a41bf7ff..c85864c2aa 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift @@ -278,52 +278,6 @@ public final class PresentationCallManagerImpl: PresentationCallManager { self.callSettingsDisposable?.dispose() } - public func injectRingingStateSynchronously(account: Account, ringingState: CallSessionRingingState, callSession: CallSession) { - if self.currentCall != nil { - return - } - - let semaphore = DispatchSemaphore(value: 0) - var data: (PreferencesView, AccountSharedDataView, Peer?)? - let _ = combineLatest( - account.postbox.preferencesView(keys: [PreferencesKeys.voipConfiguration, ApplicationSpecificPreferencesKeys.voipDerivedState, PreferencesKeys.appConfiguration]) - |> take(1), - accountManager.sharedData(keys: [SharedDataKeys.autodownloadSettings]) - |> take(1), - account.postbox.transaction { transaction -> Peer? in - return transaction.getPeer(ringingState.peerId) - } - ).start(next: { preferences, sharedData, peer in - data = (preferences, sharedData, peer) - semaphore.signal() - }) - semaphore.wait() - - if let (preferences, sharedData, maybePeer) = data, let peer = maybePeer { - let configuration = preferences.values[PreferencesKeys.voipConfiguration] as? VoipConfiguration ?? .defaultValue - let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue - let derivedState = preferences.values[ApplicationSpecificPreferencesKeys.voipDerivedState] as? VoipDerivedState ?? .default - let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings - - let enableCallKit = true - - let call = PresentationCallImpl(account: account, audioSession: self.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(self.callKitIntegration, settings: self.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: self.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: self.getDeviceAccessData, initialState: callSession, internalId: ringingState.id, peerId: ringingState.peerId, isOutgoing: false, peer: peer, proxyServer: self.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: .none, updatedNetworkType: account.networkType) - self.updateCurrentCall(call) - self.currentCallPromise.set(.single(call)) - self.hasActiveCallsPromise.set(true) - self.removeCurrentCallDisposable.set((call.canBeRemoved - |> deliverOnMainQueue).start(next: { [weak self, weak call] value in - if value, let strongSelf = self, let call = call { - if strongSelf.currentCall === call { - strongSelf.updateCurrentCall(nil) - strongSelf.currentCallPromise.set(.single(nil)) - strongSelf.hasActiveCallsPromise.set(false) - } - } - })) - } - } - private func ringingStatesUpdated(_ ringingStates: [(Account, Peer, CallSessionRingingState, Bool, NetworkType)], enableCallKit: Bool) { if let firstState = ringingStates.first { if self.currentCall == nil { @@ -338,7 +292,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager { let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue - let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType) + let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType, startWithVideo: firstState.2.isVideo) strongSelf.updateCurrentCall(call) strongSelf.currentCallPromise.set(.single(call)) strongSelf.hasActiveCallsPromise.set(true) @@ -491,7 +445,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager { let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue - let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType) + let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType, startWithVideo: isVideo) strongSelf.updateCurrentCall(call) strongSelf.currentCallPromise.set(.single(call)) strongSelf.hasActiveCallsPromise.set(true) diff --git a/submodules/TelegramCore/Sources/ApiGroupOrChannel.swift b/submodules/TelegramCore/Sources/ApiGroupOrChannel.swift index f930f5b555..fdc02d6982 100644 --- a/submodules/TelegramCore/Sources/ApiGroupOrChannel.swift +++ b/submodules/TelegramCore/Sources/ApiGroupOrChannel.swift @@ -7,8 +7,7 @@ import SyncCore func imageRepresentationsForApiChatPhoto(_ photo: Api.ChatPhoto) -> [TelegramMediaImageRepresentation] { var representations: [TelegramMediaImageRepresentation] = [] switch photo { - case let .chatPhoto(photoSmall, photoBig, dcId): - + case let .chatPhoto(flags, photoSmall, photoBig, dcId): let smallResource: TelegramMediaResource let fullSizeResource: TelegramMediaResource switch photoSmall { diff --git a/submodules/TelegramCore/Sources/CallSessionManager.swift b/submodules/TelegramCore/Sources/CallSessionManager.swift index dae749103b..55c5657aec 100644 --- a/submodules/TelegramCore/Sources/CallSessionManager.swift +++ b/submodules/TelegramCore/Sources/CallSessionManager.swift @@ -107,9 +107,10 @@ typealias CallSessionStableId = Int64 public struct CallSessionRingingState: Equatable { public let id: CallSessionInternalId public let peerId: PeerId + public let isVideo: Bool public static func ==(lhs: CallSessionRingingState, rhs: CallSessionRingingState) -> Bool { - return lhs.id == rhs.id && lhs.peerId == rhs.peerId + return lhs.id == rhs.id && lhs.peerId == rhs.peerId && lhs.isVideo == rhs.isVideo } } @@ -365,7 +366,7 @@ private final class CallSessionManagerContext { var ringingContexts: [CallSessionRingingState] = [] for (id, context) in self.contexts { if case .ringing = context.state { - ringingContexts.append(CallSessionRingingState(id: id, peerId: context.peerId)) + ringingContexts.append(CallSessionRingingState(id: id, peerId: context.peerId, isVideo: context.type == .video)) } } return ringingContexts diff --git a/submodules/TelegramCore/Sources/PeerPhotoUpdater.swift b/submodules/TelegramCore/Sources/PeerPhotoUpdater.swift index 542bb2adef..71e3909e6f 100644 --- a/submodules/TelegramCore/Sources/PeerPhotoUpdater.swift +++ b/submodules/TelegramCore/Sources/PeerPhotoUpdater.swift @@ -96,26 +96,25 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan } return .single((.progress(mappedProgress), photoResult.resource)) case let .inputFile(file): - if peer is TelegramUser { - var videoFile: Api.InputFile? - if let videoResult = videoResult { - switch videoResult.content { - case .error: - return .fail(.generic) - case let .result(resultData): - switch resultData { - case let .progress(progress): - let mappedProgress = 0.2 + progress * 0.8 - return .single((.progress(mappedProgress), photoResult.resource)) - case let .inputFile(file): - videoFile = file - break - default: - return .fail(.generic) - } - } + var videoFile: Api.InputFile? + if let videoResult = videoResult { + switch videoResult.content { + case .error: + return .fail(.generic) + case let .result(resultData): + switch resultData { + case let .progress(progress): + let mappedProgress = 0.2 + progress * 0.8 + return .single((.progress(mappedProgress), photoResult.resource)) + case let .inputFile(file): + videoFile = file + break + default: + return .fail(.generic) + } } - + } + if peer is TelegramUser { var flags: Int32 = (1 << 0) if let _ = videoFile { flags |= (1 << 1) @@ -191,11 +190,19 @@ public func updatePeerPhotoInternal(postbox: Postbox, network: Network, stateMan } |> mapError {_ in return UploadPeerPhotoError.generic} } } else { + var flags: Int32 = (1 << 0) + if let _ = videoFile { + flags |= (1 << 1) + if let _ = videoStartTimestamp { + flags |= (1 << 2) + } + } + let request: Signal if let peer = peer as? TelegramGroup { - request = network.request(Api.functions.messages.editChatPhoto(chatId: peer.id.id, photo: .inputChatUploadedPhoto(file: file))) + request = network.request(Api.functions.messages.editChatPhoto(chatId: peer.id.id, photo: .inputChatUploadedPhoto(flags: flags, file: file, video: videoFile, videoStartTs: videoStartTimestamp))) } else if let peer = peer as? TelegramChannel, let inputChannel = apiInputChannel(peer) { - request = network.request(Api.functions.channels.editPhoto(channel: inputChannel, photo: .inputChatUploadedPhoto(file: file))) + request = network.request(Api.functions.channels.editPhoto(channel: inputChannel, photo: .inputChatUploadedPhoto(flags: flags, file: file, video: videoFile, videoStartTs: videoStartTimestamp))) } else { assertionFailure() request = .complete() diff --git a/submodules/TelegramCore/Sources/TelegramUser.swift b/submodules/TelegramCore/Sources/TelegramUser.swift index e2a925cd8e..d81a8a1220 100644 --- a/submodules/TelegramCore/Sources/TelegramUser.swift +++ b/submodules/TelegramCore/Sources/TelegramUser.swift @@ -7,7 +7,7 @@ import SyncCore func parsedTelegramProfilePhoto(_ photo: Api.UserProfilePhoto) -> [TelegramMediaImageRepresentation] { var representations: [TelegramMediaImageRepresentation] = [] switch photo { - case let .userProfilePhoto(_, photoSmall, photoBig, dcId): + case let .userProfilePhoto(flags, _, photoSmall, photoBig, dcId): let smallResource: TelegramMediaResource let fullSizeResource: TelegramMediaResource switch photoSmall { diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/Contents.json new file mode 100644 index 0000000000..ac8c955846 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_calls_accept.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/ic_calls_accept.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/ic_calls_accept.pdf new file mode 100644 index 0000000000..b8eb92df16 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/ic_calls_accept.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/Contents.json new file mode 100644 index 0000000000..1a290513b6 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_calls_video.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/ic_calls_video.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/ic_calls_video.pdf new file mode 100644 index 0000000000..436c916812 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/ic_calls_video.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/Contents.json new file mode 100644 index 0000000000..ffce8d6fc5 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_calls_decline.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/ic_calls_decline.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/ic_calls_decline.pdf new file mode 100644 index 0000000000..5f765d4e0b Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/ic_calls_decline.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@2x.png b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@2x.png deleted file mode 100644 index 07403f47ec..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@2x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@3x.png b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@3x.png deleted file mode 100644 index 62a62518d8..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@3x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/Contents.json index da57014646..1d8c3321c7 100644 --- a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/Contents.json @@ -1,22 +1,12 @@ { "images" : [ { - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "filename" : "CallMuteIcon@2x.png", - "scale" : "2x" - }, - { - "idiom" : "universal", - "filename" : "CallMuteIcon@3x.png", - "scale" : "3x" + "filename" : "ic_calls_mute.pdf", + "idiom" : "universal" } ], "info" : { - "version" : 1, - "author" : "xcode" + "author" : "xcode", + "version" : 1 } -} \ No newline at end of file +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/ic_calls_mute.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/ic_calls_mute.pdf new file mode 100644 index 0000000000..22a473dbc8 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/ic_calls_mute.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@2x.png b/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@2x.png deleted file mode 100644 index f3587ea0f3..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@2x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@3x.png b/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@3x.png deleted file mode 100644 index 8237f34688..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@3x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/Contents.json deleted file mode 100644 index 8d3bffbcf4..0000000000 --- a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/Contents.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "images" : [ - { - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "filename" : "CallPhoneIcon@2x.png", - "scale" : "2x" - }, - { - "idiom" : "universal", - "filename" : "CallPhoneIcon@3x.png", - "scale" : "3x" - } - ], - "info" : { - "version" : 1, - "author" : "xcode" - } -} \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@2x.png b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@2x.png deleted file mode 100644 index 9b5e566eb4..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@2x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@3x.png b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@3x.png deleted file mode 100644 index 0026e6063d..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@3x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/Contents.json index 6995cd3e65..d0d69abee5 100644 --- a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/Contents.json @@ -1,22 +1,12 @@ { "images" : [ { - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "filename" : "CallRouteSpeaker@2x.png", - "scale" : "2x" - }, - { - "idiom" : "universal", - "filename" : "CallRouteSpeaker@3x.png", - "scale" : "3x" + "filename" : "ic_calls_speaker.pdf", + "idiom" : "universal" } ], "info" : { - "version" : 1, - "author" : "xcode" + "author" : "xcode", + "version" : 1 } -} \ No newline at end of file +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/ic_calls_speaker.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/ic_calls_speaker.pdf new file mode 100644 index 0000000000..22af6e39c6 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/ic_calls_speaker.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@2x.png b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@2x.png deleted file mode 100644 index 996959b567..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@2x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@3x.png b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@3x.png deleted file mode 100644 index 345c9a8f3b..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@3x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/Contents.json index 6ae257da6a..d0d69abee5 100644 --- a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/Contents.json @@ -1,22 +1,12 @@ { "images" : [ { - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "filename" : "CallSpeakerIcon@2x.png", - "scale" : "2x" - }, - { - "idiom" : "universal", - "filename" : "CallSpeakerIcon@3x.png", - "scale" : "3x" + "filename" : "ic_calls_speaker.pdf", + "idiom" : "universal" } ], "info" : { - "version" : 1, - "author" : "xcode" + "author" : "xcode", + "version" : 1 } -} \ No newline at end of file +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/ic_calls_speaker.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/ic_calls_speaker.pdf new file mode 100644 index 0000000000..22af6e39c6 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/ic_calls_speaker.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Contents.json index 0dd1dc8086..389dd744c8 100644 --- a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Contents.json @@ -1,7 +1,7 @@ { "images" : [ { - "filename" : "Video.pdf", + "filename" : "ic_calls_cameraflip.pdf", "idiom" : "universal" } ], diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Video.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Video.pdf deleted file mode 100644 index 71f35844b3..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Video.pdf and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/ic_calls_cameraflip.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/ic_calls_cameraflip.pdf new file mode 100644 index 0000000000..4f0db952ed Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/ic_calls_cameraflip.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/Contents.json new file mode 100644 index 0000000000..9796eb0aa2 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_calls_tlogo.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/ic_calls_tlogo.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/ic_calls_tlogo.pdf new file mode 100644 index 0000000000..6da8161ecb Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/ic_calls_tlogo.pdf differ diff --git a/submodules/TelegramUI/Sources/BlobView.swift b/submodules/TelegramUI/Sources/BlobView.swift index 3f3d6e1613..72bb99564b 100644 --- a/submodules/TelegramUI/Sources/BlobView.swift +++ b/submodules/TelegramUI/Sources/BlobView.swift @@ -5,7 +5,7 @@ import LegacyComponents private enum Constants { - static let maxLevel: CGFloat = 5 + static let maxLevel: CGFloat = 4 } final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration { @@ -25,30 +25,30 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration pointsCount: 8, minRandomness: 1, maxRandomness: 1, - minSpeed: 1, + minSpeed: 1.5, maxSpeed: 7, - minScale: 0.55, - maxScale: 0.9, + minScale: 0.52, + maxScale: 0.87, scaleSpeed: 0.2, - isCircle: true + isCircle: false ) private let bigBlob = BlobView( pointsCount: 8, minRandomness: 1, maxRandomness: 1, - minSpeed: 1, + minSpeed: 1.5, maxSpeed: 7, - minScale: 0.55, + minScale: 0.57, maxScale: 1, scaleSpeed: 0.2, - isCircle: true + isCircle: false ) override init(frame: CGRect) { super.init(frame: frame) addSubview(bigBlob) - //addSubview(mediumBlob) + addSubview(mediumBlob) addSubview(smallBlob) } @@ -65,13 +65,19 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration func updateLevel(_ level: CGFloat) { let normalizedLevel = min(1, max(level / Constants.maxLevel, 0)) + smallBlob.updateSpeedLevel(to: normalizedLevel) + mediumBlob.updateSpeedLevel(to: normalizedLevel) + bigBlob.updateSpeedLevel(to: normalizedLevel) + } + + func tick(_ level: CGFloat) { + let normalizedLevel = min(1, max(level / Constants.maxLevel, 0)) + smallBlob.level = normalizedLevel mediumBlob.level = normalizedLevel bigBlob.level = normalizedLevel } - func tick(_ level: CGFloat) { } - override func layoutSubviews() { super.layoutSubviews() @@ -104,22 +110,25 @@ final class BlobView: UIView { let maxScale: CGFloat let scaleSpeed: CGFloat + var scaleLevelsToBalance = [CGFloat]() + // If true ignores randomness and pointsCount let isCircle: Bool var level: CGFloat = 0 { didSet { - speedLevel = max(level, speedLevel) - scaleLevel = max(level, scaleLevel) - - if abs(scaleLevel - lastScaleLevel) > 0.4 { - animateToNewScale() - } + CATransaction.begin() + CATransaction.setDisableActions(true) + let lv = minScale + (maxScale - minScale) * level + shapeLayer.transform = CATransform3DMakeScale(lv, lv, 1) + CATransaction.commit() } } private var speedLevel: CGFloat = 0 private var scaleLevel: CGFloat = 0 + + private var lastSpeedLevel: CGFloat = 0 private var lastScaleLevel: CGFloat = 0 private let shapeLayer: CAShapeLayer = { @@ -190,18 +199,31 @@ final class BlobView: UIView { shapeLayer.fillColor = color.cgColor } + func updateSpeedLevel(to newSpeedLevel: CGFloat) { + speedLevel = max(speedLevel, newSpeedLevel) + + if abs(lastSpeedLevel - newSpeedLevel) > 0.5 { + animateToNewShape() + } + } + func startAnimating() { animateToNewShape() - animateToNewScale() } func animateToNewScale() { - let isDownscale = lastScaleLevel > scaleLevel - lastScaleLevel = scaleLevel + let scaleLevelForAnimation: CGFloat = { + if scaleLevelsToBalance.isEmpty { + return 0 + } + return scaleLevelsToBalance.reduce(0, +) / CGFloat(scaleLevelsToBalance.count) + }() + let isDownscale = lastScaleLevel > scaleLevelForAnimation + lastScaleLevel = scaleLevelForAnimation shapeLayer.pop_removeAnimation(forKey: "scale") - let currentScale = minScale + (maxScale - minScale) * scaleLevel + let currentScale = minScale + (maxScale - minScale) * scaleLevelForAnimation let scaleAnimation = POPBasicAnimation(propertyNamed: kPOPLayerScaleXY)! scaleAnimation.toValue = CGPoint(x: currentScale, y: currentScale) scaleAnimation.duration = isDownscale ? 0.45 : CFTimeInterval(scaleSpeed) @@ -213,6 +235,7 @@ final class BlobView: UIView { shapeLayer.pop_add(scaleAnimation, forKey: "scale") scaleLevel = 0 + scaleLevelsToBalance.removeAll() } func animateToNewShape() { @@ -221,7 +244,7 @@ final class BlobView: UIView { if pop_animation(forKey: "blob") != nil { fromPoints = currentPoints toPoints = nil - pop_removeAllAnimations() + shapeLayer.pop_removeAnimation(forKey: "blob") } if fromPoints == nil { @@ -257,6 +280,7 @@ final class BlobView: UIView { animation.toValue = 1 pop_add(animation, forKey: "blob") + lastSpeedLevel = speedLevel speedLevel = 0 } diff --git a/submodules/TelegramUI/Sources/ChatController.swift b/submodules/TelegramUI/Sources/ChatController.swift index 871eff3768..58e3384859 100644 --- a/submodules/TelegramUI/Sources/ChatController.swift +++ b/submodules/TelegramUI/Sources/ChatController.swift @@ -321,6 +321,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G private let peekData: ChatPeekTimeout? private let peekTimerDisposable = MetaDisposable() + private var shouldDisplayDownButton = false + private var hasEmbeddedTitleContent = false private var isEmbeddedTitleContentHidden = false @@ -2544,6 +2546,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } else { strongSelf.audioRecorderStatusDisposable = nil } + strongSelf.updateDownButtonVisibility() } } }) @@ -2592,6 +2595,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G videoRecorder.lockVideo() } } + strongSelf.updateDownButtonVisibility() if let previousVideoRecorderValue = previousVideoRecorderValue { previousVideoRecorderValue.dismissVideo() @@ -3091,32 +3095,33 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G } self.chatDisplayNode.historyNode.contentPositionChanged = { [weak self] offset in - if let strongSelf = self { - let offsetAlpha: CGFloat - let plainInputSeparatorAlpha: CGFloat - switch offset { - case let .known(offset): - if offset < 40.0 { - offsetAlpha = 0.0 - } else { - offsetAlpha = 1.0 - } - if offset < 4.0 { - plainInputSeparatorAlpha = 0.0 - } else { - plainInputSeparatorAlpha = 1.0 - } - case .unknown: - offsetAlpha = 1.0 - plainInputSeparatorAlpha = 1.0 - case .none: + guard let strongSelf = self else { return } + + let offsetAlpha: CGFloat + let plainInputSeparatorAlpha: CGFloat + switch offset { + case let .known(offset): + if offset < 40.0 { offsetAlpha = 0.0 + } else { + offsetAlpha = 1.0 + } + if offset < 4.0 { plainInputSeparatorAlpha = 0.0 - } - - strongSelf.chatDisplayNode.navigateButtons.displayDownButton = !offsetAlpha.isZero - strongSelf.chatDisplayNode.updatePlainInputSeparatorAlpha(plainInputSeparatorAlpha, transition: .animated(duration: 0.2, curve: .easeInOut)) + } else { + plainInputSeparatorAlpha = 1.0 + } + case .unknown: + offsetAlpha = 1.0 + plainInputSeparatorAlpha = 1.0 + case .none: + offsetAlpha = 0.0 + plainInputSeparatorAlpha = 0.0 } + + strongSelf.shouldDisplayDownButton = !offsetAlpha.isZero + strongSelf.updateDownButtonVisibility() + strongSelf.chatDisplayNode.updatePlainInputSeparatorAlpha(plainInputSeparatorAlpha, transition: .animated(duration: 0.2, curve: .easeInOut)) } self.chatDisplayNode.historyNode.scrolledToIndex = { [weak self] toIndex in @@ -7636,6 +7641,11 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G self.chatDisplayNode.historyNode.scrollToEndOfHistory() } + func updateDownButtonVisibility() { + let recordingMediaMessage = self.audioRecorderValue != nil || self.videoRecorderValue != nil + self.chatDisplayNode.navigateButtons.displayDownButton = self.shouldDisplayDownButton && !recordingMediaMessage + } + func updateTextInputState(_ textInputState: ChatTextInputState) { self.updateChatPresentationInterfaceState(interactive: false, { state in state.updatedInterfaceState({ state in diff --git a/submodules/TelegramUI/Sources/ChatMessageFileBubbleContentNode.swift b/submodules/TelegramUI/Sources/ChatMessageFileBubbleContentNode.swift index df47dae88f..d05a91106a 100644 --- a/submodules/TelegramUI/Sources/ChatMessageFileBubbleContentNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageFileBubbleContentNode.swift @@ -11,6 +11,22 @@ import TelegramUIPreferences class ChatMessageFileBubbleContentNode: ChatMessageBubbleContentNode { private let interactiveFileNode: ChatMessageInteractiveFileNode + override var visibility: ListViewItemNodeVisibility { + didSet { + var wasVisible = false + if case .visible = oldValue { + wasVisible = true + } + var isVisible = false + if case .visible = self.visibility { + isVisible = true + } + if wasVisible != isVisible { + self.interactiveFileNode.visibility = isVisible + } + } + } + required init() { self.interactiveFileNode = ChatMessageInteractiveFileNode() diff --git a/submodules/TelegramUI/Sources/ChatMessageInteractiveFileNode.swift b/submodules/TelegramUI/Sources/ChatMessageInteractiveFileNode.swift index 09f27fc685..41a9428ad2 100644 --- a/submodules/TelegramUI/Sources/ChatMessageInteractiveFileNode.swift +++ b/submodules/TelegramUI/Sources/ChatMessageInteractiveFileNode.swift @@ -33,6 +33,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { private var iconNode: TransformImageNode? private var statusNode: SemanticStatusNode? + private var playbackAudioLevelView: VoiceBlobView? + private var displayLinkAnimator: ConstantDisplayLinkAnimator? private var streamingStatusNode: RadialStatusNode? private var tapRecognizer: UITapGestureRecognizer? @@ -40,6 +42,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { private let playbackStatusDisposable = MetaDisposable() private let playbackStatus = Promise() + private let audioLevelEventsDisposable = MetaDisposable() + private var playerUpdateTimer: SwiftSignalKit.Timer? private var playerStatus: MediaPlayerStatus? { didSet { @@ -54,6 +58,30 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { } } + private var inputAudioLevel: CGFloat = 0.0 + private var currentAudioLevel: CGFloat = 0.0 + + var visibility: Bool = false { + didSet { + if self.visibility != oldValue { + if self.visibility { + if self.displayLinkAnimator == nil { + self.displayLinkAnimator = ConstantDisplayLinkAnimator(update: { [weak self] in + guard let strongSelf = self else { + return + } + strongSelf.currentAudioLevel = strongSelf.currentAudioLevel * 0.9 + strongSelf.inputAudioLevel * 0.1 + strongSelf.playbackAudioLevelView?.tick(strongSelf.currentAudioLevel) + }) + } + self.displayLinkAnimator?.isPaused = false + } else { + self.displayLinkAnimator?.isPaused = true + } + } + } + } + private let fetchControls = Atomic(value: nil) private var resourceStatus: FileMediaResourceStatus? private var actualFetchStatus: MediaResourceStatus? @@ -120,6 +148,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { self.statusDisposable.dispose() self.playbackStatusDisposable.dispose() self.fetchDisposable.dispose() + self.audioLevelEventsDisposable.dispose() } override func didLoad() { @@ -204,6 +233,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { var updateImageSignal: Signal<(TransformImageArguments) -> DrawingContext?, NoError>? var updatedStatusSignal: Signal<(FileMediaResourceStatus, MediaResourceStatus?), NoError>? + var updatedAudioLevelEventsSignal: Signal? var updatedPlaybackStatusSignal: Signal? var updatedFetchControls: FetchControls? @@ -241,11 +271,13 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { |> map { resourceStatus, actualFetchStatus -> (FileMediaResourceStatus, MediaResourceStatus?) in return (resourceStatus, actualFetchStatus) } + updatedAudioLevelEventsSignal = messageFileMediaPlaybackAudioLevelEvents(context: context, file: file, message: message, isRecentActions: isRecentActions) } else { updatedStatusSignal = messageFileMediaResourceStatus(context: context, file: file, message: message, isRecentActions: isRecentActions) |> map { resourceStatus -> (FileMediaResourceStatus, MediaResourceStatus?) in return (resourceStatus, nil) } + updatedAudioLevelEventsSignal = messageFileMediaPlaybackAudioLevelEvents(context: context, file: file, message: message, isRecentActions: isRecentActions) } updatedPlaybackStatusSignal = messageFileMediaPlaybackStatus(context: context, file: file, message: message, isRecentActions: isRecentActions) } @@ -622,6 +654,17 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { })) } + if let updatedAudioLevelEventsSignal = updatedAudioLevelEventsSignal { + strongSelf.audioLevelEventsDisposable.set((updatedAudioLevelEventsSignal + |> deliverOnMainQueue).start(next: { value in + guard let strongSelf = self else { + return + } + strongSelf.inputAudioLevel = CGFloat(value) + strongSelf.playbackAudioLevelView?.updateLevel(CGFloat(value)) + })) + } + if let updatedPlaybackStatusSignal = updatedPlaybackStatusSignal { strongSelf.playbackStatus.set(updatedPlaybackStatusSignal) strongSelf.playbackStatusDisposable.set((updatedPlaybackStatusSignal |> deliverOnMainQueue).start(next: { [weak strongSelf] status in @@ -636,6 +679,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { strongSelf.waveformNode.displaysAsynchronously = !presentationData.isPreview strongSelf.statusNode?.displaysAsynchronously = !presentationData.isPreview strongSelf.statusNode?.frame = progressFrame + strongSelf.playbackAudioLevelView?.frame = progressFrame.insetBy(dx: -20.0, dy: -20.0) strongSelf.progressFrame = progressFrame strongSelf.streamingCacheStatusFrame = streamingCacheStatusFrame strongSelf.fileIconImage = fileIconImage @@ -816,6 +860,14 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode { let statusNode = SemanticStatusNode(backgroundNodeColor: backgroundNodeColor, foregroundNodeColor: foregroundNodeColor) self.statusNode = statusNode statusNode.frame = progressFrame + + if self.playbackAudioLevelView == nil, false { + let playbackAudioLevelView = VoiceBlobView(frame: progressFrame.insetBy(dx: -20.0, dy: -20.0)) + playbackAudioLevelView.setColor(presentationData.theme.theme.chat.inputPanel.actionControlFillColor) + self.playbackAudioLevelView = playbackAudioLevelView + self.view.addSubview(playbackAudioLevelView) + } + self.addSubnode(statusNode) } else if let statusNode = self.statusNode { statusNode.backgroundNodeColor = backgroundNodeColor diff --git a/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift b/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift index c4642c1ef6..1d3cf7f30f 100644 --- a/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift +++ b/submodules/TelegramUI/Sources/ChatTextInputPanelNode.swift @@ -1055,20 +1055,22 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate { audioRecordingDotNode.frame = CGRect(origin: CGPoint(x: leftInset + 2.0 - UIScreenPixel, y: panelHeight - 44 + 1), size: CGSize(width: 40.0, height: 40)) if animateDotAppearing { - audioRecordingDotNode.layer.animateScale(from: 0.3, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false) - audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in - if finished { - let animation = CAKeyframeAnimation(keyPath: "opacity") - animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber] - animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber] - animation.duration = 0.5 - animation.autoreverses = true - animation.repeatCount = Float.infinity - - audioRecordingDotNode?.layer.add(animation, forKey: "recording") - } - }) - + let dotStartScale: CGFloat = (audioRecordingDotNode.layer.presentation()?.value(forKeyPath: "transform.scale.x") as? CGFloat) ?? 1 + audioRecordingDotNode.layer.animateScale(from: dotStartScale, to: 1, duration: 0.15, delay: 0, removeOnCompletion: false) + if audioRecordingDotNode.layer.animation(forKey: "recording") == nil { + audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1), to: 1, duration: 0.15, delay: 0, completion: { [weak audioRecordingDotNode] finished in + if finished { + let animation = CAKeyframeAnimation(keyPath: "opacity") + animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber] + animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber] + animation.duration = 0.5 + animation.autoreverses = true + animation.repeatCount = Float.infinity + + audioRecordingDotNode?.layer.add(animation, forKey: "recording") + } + }) + } self.attachmentButton.layer.animateAlpha(from: CGFloat(self.attachmentButton.layer.presentation()?.opacity ?? 1), to: 0, duration: 0.15, delay: 0, removeOnCompletion: false) self.attachmentButton.layer.animateScale(from: 1, to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false) } @@ -1103,8 +1105,8 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate { self?.audioRecordingDotNode = nil - audioRecordingDotNode.layer.animateScale(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1), to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false) - audioRecordingDotNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, delay: 0, removeOnCompletion: false) { [weak audioRecordingDotNode] _ in + audioRecordingDotNode.layer.animateScale(from: 1, to: 0.3, duration: 0.15, delay: 0, removeOnCompletion: false) + audioRecordingDotNode.layer.animateAlpha(from: CGFloat(audioRecordingDotNode.layer.presentation()?.opacity ?? 1), to: 0.0, duration: 0.15, delay: 0, removeOnCompletion: false) { [weak audioRecordingDotNode] _ in audioRecordingDotNode?.removeFromSupernode() } diff --git a/submodules/TelegramUI/Sources/FileMediaResourceStatus.swift b/submodules/TelegramUI/Sources/FileMediaResourceStatus.swift index 03980fc8e1..37d4817207 100644 --- a/submodules/TelegramUI/Sources/FileMediaResourceStatus.swift +++ b/submodules/TelegramUI/Sources/FileMediaResourceStatus.swift @@ -33,6 +33,18 @@ func messageFileMediaPlaybackStatus(context: AccountContext, file: TelegramMedia } } +func messageFileMediaPlaybackAudioLevelEvents(context: AccountContext, file: TelegramMediaFile, message: Message, isRecentActions: Bool) -> Signal { + guard let playerType = peerMessageMediaPlayerType(message) else { + return .never() + } + + if let (playlistId, itemId) = peerMessagesMediaPlaylistAndItemId(message, isRecentActions: isRecentActions) { + return context.sharedContext.mediaManager.filteredPlayerAudioLevelEvents(accountId: context.account.id, playlistId: playlistId, itemId: itemId, type: playerType) + } else { + return .never() + } +} + func messageFileMediaResourceStatus(context: AccountContext, file: TelegramMediaFile, message: Message, isRecentActions: Bool, isSharedMedia: Bool = false) -> Signal { let playbackStatus = internalMessageFileMediaPlaybackStatus(context: context, file: file, message: message, isRecentActions: isRecentActions) |> map { status -> MediaPlayerPlaybackStatus? in return status?.status diff --git a/submodules/TelegramUI/Sources/ManagedAudioRecorder.swift b/submodules/TelegramUI/Sources/ManagedAudioRecorder.swift index aac10f44be..a01bc652a3 100644 --- a/submodules/TelegramUI/Sources/ManagedAudioRecorder.swift +++ b/submodules/TelegramUI/Sources/ManagedAudioRecorder.swift @@ -166,6 +166,7 @@ final class ManagedAudioRecorderContext { private var micLevelPeak: Int16 = 0 private var micLevelPeakCount: Int = 0 + private var audioLevelPeakUpdate: Double = 0.0 fileprivate var isPaused = false @@ -213,7 +214,7 @@ final class ManagedAudioRecorderContext { } return ActionDisposable { } - }), playAndRecord: true, forceAudioToSpeaker: false, baseRate: 1.0, updatedRate: { + }), playAndRecord: true, forceAudioToSpeaker: false, baseRate: 1.0, audioLevelPipe: ValuePipe(), updatedRate: { }, audioPaused: {}) self.toneRenderer = toneRenderer @@ -580,6 +581,12 @@ final class ManagedAudioRecorderContext { if self.micLevelPeakCount >= 1200 { let level = Float(self.micLevelPeak) / 4000.0 + /*let timestamp = CFAbsoluteTimeGetCurrent() + if !self.audioLevelPeakUpdate.isZero { + let delta = timestamp - self.audioLevelPeakUpdate + print("level = \(level), delta = \(delta)") + } + self.audioLevelPeakUpdate = timestamp*/ self.micLevel.set(level) self.micLevelPeak = 0 self.micLevelPeakCount = 0 diff --git a/submodules/TelegramUI/Sources/MediaManager.swift b/submodules/TelegramUI/Sources/MediaManager.swift index 7644847d6c..2132faf34f 100644 --- a/submodules/TelegramUI/Sources/MediaManager.swift +++ b/submodules/TelegramUI/Sources/MediaManager.swift @@ -70,24 +70,34 @@ public final class MediaManagerImpl: NSObject, MediaManager { private var nextPlayerIndex: Int32 = 0 + private let voiceMediaPlayerStateDisposable = MetaDisposable() private var voiceMediaPlayer: SharedMediaPlayer? { didSet { if self.voiceMediaPlayer !== oldValue { if let voiceMediaPlayer = self.voiceMediaPlayer { let account = voiceMediaPlayer.account - self.voiceMediaPlayerStateValue.set(voiceMediaPlayer.playbackState - |> map { state -> (Account, SharedMediaPlayerItemPlaybackStateOrLoading)? in - guard let state = state else { - return nil + self.voiceMediaPlayerStateDisposable.set((voiceMediaPlayer.playbackState + |> deliverOnMainQueue).start(next: { [weak self, weak voiceMediaPlayer] state in + guard let strongSelf = self else { + return + } + guard let state = state, let voiceMediaPlayer = voiceMediaPlayer else { + strongSelf.voiceMediaPlayerStateValue.set(.single(nil)) + return } if case let .item(item) = state { - return (account, .state(item)) + strongSelf.voiceMediaPlayerStateValue.set(.single((account, .state(item)))) + let audioLevelValue: (AccountRecordId, SharedMediaPlaylistId, SharedMediaPlaylistItemId, Signal)? = (account.id, item.playlistId, item.item.id, voiceMediaPlayer.audioLevel) + strongSelf.voiceMediaPlayerAudioLevelEvents.set(.single(audioLevelValue)) } else { - return (account, .loading) + strongSelf.voiceMediaPlayerStateValue.set(.single((account, .loading))) + strongSelf.voiceMediaPlayerAudioLevelEvents.set(.single(nil)) } - } |> deliverOnMainQueue) + })) } else { + self.voiceMediaPlayerStateDisposable.set(nil) self.voiceMediaPlayerStateValue.set(.single(nil)) + self.voiceMediaPlayerAudioLevelEvents.set(.single(nil)) } } } @@ -97,6 +107,8 @@ public final class MediaManagerImpl: NSObject, MediaManager { return self.voiceMediaPlayerStateValue.get() } + private let voiceMediaPlayerAudioLevelEvents = Promise<(AccountRecordId, SharedMediaPlaylistId, SharedMediaPlaylistItemId, Signal)?>(nil) + private var musicMediaPlayer: SharedMediaPlayer? { didSet { if self.musicMediaPlayer !== oldValue { @@ -427,6 +439,7 @@ public final class MediaManagerImpl: NSObject, MediaManager { self.setPlaylistByTypeDisposables.dispose() self.mediaPlaybackStateDisposable.dispose() self.globalAudioSessionForegroundDisposable.dispose() + self.voiceMediaPlayerStateDisposable.dispose() } public func audioRecorder(beginWithTone: Bool, applicationBindings: TelegramApplicationBindings, beganWithTone: @escaping (Bool) -> Void) -> Signal { @@ -569,6 +582,26 @@ public final class MediaManagerImpl: NSObject, MediaManager { }) } + public func filteredPlayerAudioLevelEvents(accountId: AccountRecordId, playlistId: SharedMediaPlaylistId, itemId: SharedMediaPlaylistItemId, type: MediaManagerPlayerType) -> Signal { + switch type { + case .voice: + return self.voiceMediaPlayerAudioLevelEvents.get() + |> mapToSignal { value -> Signal in + guard let value = value else { + return .never() + } + let (accountIdValue, playlistIdValue, itemIdValue, signal) = value + if accountIdValue == accountId && playlistId.isEqual(to: playlistIdValue) && itemId.isEqual(to: itemIdValue) { + return signal + } else { + return .never() + } + } + case .music: + return .never() + } + } + @objc func playCommandEvent(_ command: AnyObject) -> MPRemoteCommandHandlerStatus { self.playlistControl(.playback(.play), type: nil) diff --git a/submodules/TelegramUI/Sources/SharedMediaPlayer.swift b/submodules/TelegramUI/Sources/SharedMediaPlayer.swift index 1d3dbfbe20..1c155117c6 100644 --- a/submodules/TelegramUI/Sources/SharedMediaPlayer.swift +++ b/submodules/TelegramUI/Sources/SharedMediaPlayer.swift @@ -143,7 +143,27 @@ final class SharedMediaPlayer { return self.playbackStateValue.get() } - private var playbackItem: SharedMediaPlaybackItem? + private let audioLevelPipe = ValuePipe() + var audioLevel: Signal { + return self.audioLevelPipe.signal() + } + private let audioLevelDisposable = MetaDisposable() + + private var playbackItem: SharedMediaPlaybackItem? { + didSet { + if playbackItem != oldValue { + switch playbackItem { + case let .audio(player): + let audioLevelPipe = self.audioLevelPipe + self.audioLevelDisposable.set((player.audioLevelEvents.start(next: { [weak audioLevelPipe] value in + audioLevelPipe?.putNext(value) + }))) + default: + self.audioLevelDisposable.set(nil) + } + } + } + } private var currentPlayedToEnd = false private var scheduledPlaybackAction: SharedMediaPlayerPlaybackControlAction? private var scheduledStartTime: Double? diff --git a/submodules/TelegramVoip/Sources/OngoingCallContext.swift b/submodules/TelegramVoip/Sources/OngoingCallContext.swift index 600db2aec9..a0e912e736 100644 --- a/submodules/TelegramVoip/Sources/OngoingCallContext.swift +++ b/submodules/TelegramVoip/Sources/OngoingCallContext.swift @@ -245,7 +245,6 @@ private protocol OngoingCallThreadLocalContextProtocol: class { func nativeSetNetworkType(_ type: NetworkType) func nativeSetIsMuted(_ value: Bool) func nativeSetVideoEnabled(_ value: Bool) - func nativeSwitchVideoCamera() func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void) func nativeDebugInfo() -> String func nativeVersion() -> String @@ -292,6 +291,26 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol { } } +public final class OngoingCallVideoCapturer { + fileprivate let impl: OngoingCallThreadLocalContextVideoCapturer + + public init() { + self.impl = OngoingCallThreadLocalContextVideoCapturer() + } + + public func switchCamera() { + self.impl.switchVideoCamera() + } + + public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) { + self.impl.makeOutgoingVideoView(completion) + } + + public func setIsVideoEnabled(_ value: Bool) { + self.impl.setIsVideoEnabled(value) + } +} + extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol { func nativeSetNetworkType(_ type: NetworkType) { self.setNetworkType(ongoingNetworkTypeForTypeWebrtc(type)) @@ -309,10 +328,6 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt self.setVideoEnabled(value) } - func nativeSwitchVideoCamera() { - self.switchVideoCamera() - } - func nativeDebugInfo() -> String { return self.debugInfo() ?? "" } @@ -463,7 +478,7 @@ public final class OngoingCallContext { return result } - public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, isVideo: Bool, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal, logName: String) { + public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal, logName: String) { let _ = setupLogs OngoingCallThreadLocalContext.applyServerConfig(serializedData) //OngoingCallThreadLocalContextWebrtc.applyServerConfig(serializedData) @@ -542,9 +557,9 @@ public final class OngoingCallContext { )) } } - let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, isVideo: isVideo, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in + let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in callSessionManager?.sendSignalingData(internalId: internalId, data: data) - }) + }, videoCapturer: video?.impl) strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context)) context.stateChanged = { state, videoState, remoteVideoState in @@ -696,12 +711,6 @@ public final class OngoingCallContext { } } - public func switchVideoCamera() { - self.withContext { context in - context.nativeSwitchVideoCamera() - } - } - public func debugInfo() -> Signal<(String, String), NoError> { let poll = Signal<(String, String), NoError> { subscriber in self.withContext { context in @@ -725,14 +734,4 @@ public final class OngoingCallContext { } } } - - public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) { - self.withContext { context in - if let context = context as? OngoingCallThreadLocalContextWebrtc { - context.makeOutgoingVideoView(completion) - } else { - completion(nil) - } - } - } } diff --git a/submodules/TgVoipWebrtc/Impl/CodecsApple.h b/submodules/TgVoipWebrtc/Impl/CodecsApple.h index 442df44018..a09d444731 100644 --- a/submodules/TgVoipWebrtc/Impl/CodecsApple.h +++ b/submodules/TgVoipWebrtc/Impl/CodecsApple.h @@ -13,6 +13,8 @@ namespace TGVOIP_NAMESPACE { class VideoCapturerInterface { public: virtual ~VideoCapturerInterface(); + + virtual void setIsEnabled(bool isEnabled) = 0; }; void configurePlatformAudio(); diff --git a/submodules/TgVoipWebrtc/Impl/CodecsApple.mm b/submodules/TgVoipWebrtc/Impl/CodecsApple.mm index 71e4d06a24..e3fad7c4e8 100644 --- a/submodules/TgVoipWebrtc/Impl/CodecsApple.mm +++ b/submodules/TgVoipWebrtc/Impl/CodecsApple.mm @@ -112,6 +112,10 @@ [_videoCapturer stopCapture]; } +- (void)setIsEnabled:(bool)isEnabled { + [_videoCapturer setIsEnabled:isEnabled]; +} + @end @interface VideoCapturerInterfaceImplHolder : NSObject @@ -153,6 +157,16 @@ public: }); } + virtual void setIsEnabled(bool isEnabled) { + VideoCapturerInterfaceImplHolder *implReference = _implReference; + dispatch_async(dispatch_get_main_queue(), ^{ + if (implReference.reference != nil) { + VideoCapturerInterfaceImplReference *reference = (__bridge VideoCapturerInterfaceImplReference *)implReference.reference; + [reference setIsEnabled:isEnabled]; + } + }); + } + private: rtc::scoped_refptr _source; VideoCapturerInterfaceImplHolder *_implReference; diff --git a/submodules/TgVoipWebrtc/Impl/Manager.cpp b/submodules/TgVoipWebrtc/Impl/Manager.cpp index 6ffced3943..1e7cbe0a5f 100644 --- a/submodules/TgVoipWebrtc/Impl/Manager.cpp +++ b/submodules/TgVoipWebrtc/Impl/Manager.cpp @@ -26,8 +26,7 @@ static rtc::Thread *makeMediaThread() { return value.get(); } - -static rtc::Thread *getMediaThread() { +rtc::Thread *Manager::getMediaThread() { static rtc::Thread *value = makeMediaThread(); return value; } @@ -37,7 +36,7 @@ Manager::Manager( TgVoipEncryptionKey encryptionKey, bool enableP2P, std::vector const &rtcServers, - bool isVideo, + std::shared_ptr videoCapture, std::function stateUpdated, std::function videoStateUpdated, std::function remoteVideoIsActiveUpdated, @@ -47,7 +46,7 @@ _thread(thread), _encryptionKey(encryptionKey), _enableP2P(enableP2P), _rtcServers(rtcServers), -_startWithVideo(isVideo), +_videoCapture(videoCapture), _stateUpdated(stateUpdated), _videoStateUpdated(videoStateUpdated), _remoteVideoIsActiveUpdated(remoteVideoIsActiveUpdated), @@ -111,11 +110,11 @@ void Manager::start() { ); })); bool isOutgoing = _encryptionKey.isOutgoing; - _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [isOutgoing, thread = _thread, startWithVideo = _startWithVideo, weakThis]() { + _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [isOutgoing, thread = _thread, videoCapture = _videoCapture, weakThis]() { return new MediaManager( getMediaThread(), isOutgoing, - startWithVideo, + videoCapture, [thread, weakThis](const rtc::CopyOnWriteBuffer &packet) { thread->PostTask(RTC_FROM_HERE, [weakThis, packet]() { auto strongThis = weakThis.lock(); @@ -203,12 +202,6 @@ void Manager::setMuteOutgoingAudio(bool mute) { }); } -void Manager::switchVideoCamera() { - _mediaManager->perform([](MediaManager *mediaManager) { - mediaManager->switchVideoCamera(); - }); -} - void Manager::notifyIsLocalVideoActive(bool isActive) { rtc::CopyOnWriteBuffer buffer; uint8_t mode = 4; @@ -228,12 +221,6 @@ void Manager::setIncomingVideoOutput(std::shared_ptr> sink) { - _mediaManager->perform([sink](MediaManager *mediaManager) { - mediaManager->setOutgoingVideoOutput(sink); - }); -} - #ifdef TGVOIP_NAMESPACE } #endif diff --git a/submodules/TgVoipWebrtc/Impl/Manager.h b/submodules/TgVoipWebrtc/Impl/Manager.h index 79e8583937..ff113c4175 100644 --- a/submodules/TgVoipWebrtc/Impl/Manager.h +++ b/submodules/TgVoipWebrtc/Impl/Manager.h @@ -12,12 +12,14 @@ namespace TGVOIP_NAMESPACE { class Manager : public std::enable_shared_from_this { public: + static rtc::Thread *getMediaThread(); + Manager( rtc::Thread *thread, TgVoipEncryptionKey encryptionKey, bool enableP2P, std::vector const &rtcServers, - bool isVideo, + std::shared_ptr videoCapture, std::function stateUpdated, std::function videoStateUpdated, std::function remoteVideoIsActiveUpdated, @@ -29,17 +31,15 @@ public: void receiveSignalingData(const std::vector &data); void setSendVideo(bool sendVideo); void setMuteOutgoingAudio(bool mute); - void switchVideoCamera(); void notifyIsLocalVideoActive(bool isActive); void setIncomingVideoOutput(std::shared_ptr> sink); - void setOutgoingVideoOutput(std::shared_ptr> sink); private: rtc::Thread *_thread; TgVoipEncryptionKey _encryptionKey; bool _enableP2P; std::vector _rtcServers; - bool _startWithVideo; + std::shared_ptr _videoCapture; std::function _stateUpdated; std::function _videoStateUpdated; std::function _remoteVideoIsActiveUpdated; diff --git a/submodules/TgVoipWebrtc/Impl/MediaManager.cpp b/submodules/TgVoipWebrtc/Impl/MediaManager.cpp index b1e22eb78f..5c91342c7f 100644 --- a/submodules/TgVoipWebrtc/Impl/MediaManager.cpp +++ b/submodules/TgVoipWebrtc/Impl/MediaManager.cpp @@ -19,6 +19,9 @@ #include "api/video_codecs/builtin_video_encoder_factory.h" +#include "TgVoip.h" +#include "VideoCaptureInterfaceImpl.h" + #if TARGET_OS_IPHONE #include "CodecsApple.h" @@ -164,7 +167,7 @@ static rtc::Thread *makeWorkerThread() { } -static rtc::Thread *getWorkerThread() { +rtc::Thread *MediaManager::getWorkerThread() { static rtc::Thread *value = makeWorkerThread(); return value; } @@ -172,7 +175,7 @@ static rtc::Thread *getWorkerThread() { MediaManager::MediaManager( rtc::Thread *thread, bool isOutgoing, - bool startWithVideo, + std::shared_ptr videoCapture, std::function packetEmitted, std::function localVideoCaptureActiveUpdated ) : @@ -180,7 +183,8 @@ _packetEmitted(packetEmitted), _localVideoCaptureActiveUpdated(localVideoCaptureActiveUpdated), _thread(thread), _eventLog(std::make_unique()), -_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) { +_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), +_videoCapture(videoCapture) { _ssrcAudio.incoming = isOutgoing ? ssrcAudioIncoming : ssrcAudioOutgoing; _ssrcAudio.outgoing = (!isOutgoing) ? ssrcAudioIncoming : ssrcAudioOutgoing; _ssrcAudio.fecIncoming = isOutgoing ? ssrcAudioFecIncoming : ssrcAudioFecOutgoing; @@ -199,7 +203,6 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) { _videoCodecs = AssignPayloadTypesAndDefaultCodecs(videoEncoderFactory->GetSupportedFormats()); _isSendingVideo = false; - _useFrontCamera = true; _audioNetworkInterface = std::unique_ptr(new MediaManager::NetworkInterfaceImpl(this, false)); _videoNetworkInterface = std::unique_ptr(new MediaManager::NetworkInterfaceImpl(this, true)); @@ -283,9 +286,9 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) { _videoChannel->SetInterface(_videoNetworkInterface.get(), webrtc::MediaTransportConfig()); - _nativeVideoSource = makeVideoSource(_thread, getWorkerThread()); - - if (startWithVideo) { + if (_videoCapture != nullptr) { + ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->setIsActiveUpdated(this->_localVideoCaptureActiveUpdated); + setSendVideo(true); } } @@ -372,10 +375,6 @@ void MediaManager::setSendVideo(bool sendVideo) { codec.SetParam(cricket::kCodecParamStartBitrate, 512); codec.SetParam(cricket::kCodecParamMaxBitrate, 2500); - _videoCapturer = makeVideoCapturer(_nativeVideoSource, _useFrontCamera, [localVideoCaptureActiveUpdated = _localVideoCaptureActiveUpdated](bool isActive) { - localVideoCaptureActiveUpdated(isActive); - }); - cricket::VideoSendParameters videoSendParameters; videoSendParameters.codecs.push_back(codec); @@ -402,11 +401,15 @@ void MediaManager::setSendVideo(bool sendVideo) { videoSendStreamParams.cname = "cname"; _videoChannel->AddSendStream(videoSendStreamParams); - _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get()); + if (_videoCapture != nullptr) { + _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->_videoSource.get()); + } _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); } else { _videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing)); - _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get()); + if (_videoCapture != nullptr) { + _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->_videoSource); + } } cricket::VideoRecvParameters videoRecvParameters; @@ -449,8 +452,6 @@ void MediaManager::setSendVideo(bool sendVideo) { _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, nullptr); _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); - _videoCapturer.reset(); - _videoChannel->RemoveRecvStream(_ssrcVideo.incoming); _videoChannel->RemoveRecvStream(_ssrcVideo.fecIncoming); _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); @@ -466,25 +467,11 @@ void MediaManager::setMuteOutgoingAudio(bool mute) { _audioChannel->SetAudioSend(_ssrcAudio.outgoing, _isConnected && !_muteOutgoingAudio, nullptr, &_audioSource); } -void MediaManager::switchVideoCamera() { - if (_isSendingVideo) { - _useFrontCamera = !_useFrontCamera; - _videoCapturer = makeVideoCapturer(_nativeVideoSource, _useFrontCamera, [localVideoCaptureActiveUpdated = _localVideoCaptureActiveUpdated](bool isActive) { - localVideoCaptureActiveUpdated(isActive); - }); - } -} - void MediaManager::setIncomingVideoOutput(std::shared_ptr> sink) { _currentIncomingVideoSink = sink; _videoChannel->SetSink(_ssrcVideo.incoming, _currentIncomingVideoSink.get()); } -void MediaManager::setOutgoingVideoOutput(std::shared_ptr> sink) { - _currentOutgoingVideoSink = sink; - _nativeVideoSource->AddOrUpdateSink(_currentOutgoingVideoSink.get(), rtc::VideoSinkWants()); -} - MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) : _mediaManager(mediaManager), _isVideo(isVideo) { diff --git a/submodules/TgVoipWebrtc/Impl/MediaManager.h b/submodules/TgVoipWebrtc/Impl/MediaManager.h index 5e49c94ff4..a283d02283 100644 --- a/submodules/TgVoipWebrtc/Impl/MediaManager.h +++ b/submodules/TgVoipWebrtc/Impl/MediaManager.h @@ -7,6 +7,8 @@ #include "api/transport/field_trial_based_config.h" #include "pc/rtp_sender.h" +#include "TgVoip.h" + #include #include @@ -54,10 +56,12 @@ private: friend class MediaManager::NetworkInterfaceImpl; public: + static rtc::Thread *getWorkerThread(); + MediaManager( rtc::Thread *thread, bool isOutgoing, - bool startWithVideo, + std::shared_ptr videoCapture, std::function packetEmitted, std::function localVideoCaptureActiveUpdated ); @@ -68,9 +72,7 @@ public: void notifyPacketSent(const rtc::SentPacket &sentPacket); void setSendVideo(bool sendVideo); void setMuteOutgoingAudio(bool mute); - void switchVideoCamera(); void setIncomingVideoOutput(std::shared_ptr> sink); - void setOutgoingVideoOutput(std::shared_ptr> sink); protected: std::function _packetEmitted; @@ -90,7 +92,6 @@ private: std::vector _videoCodecs; bool _isSendingVideo; - bool _useFrontCamera; std::unique_ptr _mediaEngine; std::unique_ptr _call; @@ -99,10 +100,8 @@ private: std::unique_ptr _audioChannel; std::unique_ptr _videoChannel; std::unique_ptr _videoBitrateAllocatorFactory; - rtc::scoped_refptr _nativeVideoSource; - std::unique_ptr _videoCapturer; + std::shared_ptr _videoCapture; std::shared_ptr> _currentIncomingVideoSink; - std::shared_ptr> _currentOutgoingVideoSink; std::unique_ptr _audioNetworkInterface; std::unique_ptr _videoNetworkInterface; diff --git a/submodules/TgVoipWebrtc/Impl/TgVoip.h b/submodules/TgVoipWebrtc/Impl/TgVoip.h index c3be58baf5..ccd7a485d2 100644 --- a/submodules/TgVoipWebrtc/Impl/TgVoip.h +++ b/submodules/TgVoipWebrtc/Impl/TgVoip.h @@ -129,6 +129,19 @@ struct TgVoipAudioDataCallbacks { std::function preprocessed; }; +class TgVoipVideoCaptureInterface { +protected: + TgVoipVideoCaptureInterface() = default; +public: + static std::shared_ptr makeInstance(); + + virtual ~TgVoipVideoCaptureInterface(); + + virtual void switchCamera() = 0; + virtual void setIsVideoEnabled(bool isVideoEnabled) = 0; + virtual void setVideoOutput(std::shared_ptr> sink) = 0; +}; + class TgVoip { protected: TgVoip() = default; @@ -146,7 +159,7 @@ public: std::vector const &rtcServers, TgVoipNetworkType initialNetworkType, TgVoipEncryptionKey const &encryptionKey, - bool isVideo, + std::shared_ptr videoCapture, std::function stateUpdated, std::function videoStateUpdated, std::function remoteVideoIsActiveUpdated, @@ -161,7 +174,6 @@ public: virtual void setEchoCancellationStrength(int strength) = 0; virtual void setIncomingVideoOutput(std::shared_ptr> sink) = 0; - virtual void setOutgoingVideoOutput(std::shared_ptr> sink) = 0; virtual std::string getLastError() = 0; virtual std::string getDebugInfo() = 0; @@ -171,7 +183,6 @@ public: virtual void receiveSignalingData(const std::vector &data) = 0; virtual void setSendVideo(bool sendVideo) = 0; - virtual void switchVideoCamera() = 0; virtual TgVoipFinalState stop() = 0; }; diff --git a/submodules/TgVoipWebrtc/Impl/TgVoip.mm b/submodules/TgVoipWebrtc/Impl/TgVoip.mm index 905f997955..5cfb5d75a4 100644 --- a/submodules/TgVoipWebrtc/Impl/TgVoip.mm +++ b/submodules/TgVoipWebrtc/Impl/TgVoip.mm @@ -5,10 +5,21 @@ #include "rtc_base/logging.h" #include "Manager.h" +#include "MediaManager.h" #include #include +#include "VideoCaptureInterfaceImpl.h" + +#if TARGET_OS_IPHONE + +#include "CodecsApple.h" + +#else +#error "Unsupported platform" +#endif + #import #include @@ -142,7 +153,7 @@ public: std::vector const &rtcServers, TgVoipConfig const &config, TgVoipEncryptionKey const &encryptionKey, - bool isVideo, + std::shared_ptr videoCapture, TgVoipNetworkType initialNetworkType, std::function stateUpdated, std::function videoStateUpdated, @@ -160,13 +171,13 @@ public: bool enableP2P = config.enableP2P; - _manager.reset(new ThreadLocalObject(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, isVideo, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers](){ + _manager.reset(new ThreadLocalObject(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers, videoCapture](){ return new Manager( getManagerThread(), encryptionKey, enableP2P, rtcServers, - isVideo, + videoCapture, [stateUpdated](const TgVoipState &state) { stateUpdated(state); }, @@ -201,12 +212,6 @@ public: manager->setSendVideo(sendVideo); }); }; - - void switchVideoCamera() override { - _manager->perform([](Manager *manager) { - manager->switchVideoCamera(); - }); - } void setNetworkType(TgVoipNetworkType networkType) override { /*message::NetworkType mappedType; @@ -267,12 +272,6 @@ public: manager->setIncomingVideoOutput(sink); }); } - - void setOutgoingVideoOutput(std::shared_ptr> sink) override { - _manager->perform([sink](Manager *manager) { - manager->setOutgoingVideoOutput(sink); - }); - } void setAudioOutputGainControlEnabled(bool enabled) override { } @@ -387,7 +386,7 @@ TgVoip *TgVoip::makeInstance( std::vector const &rtcServers, TgVoipNetworkType initialNetworkType, TgVoipEncryptionKey const &encryptionKey, - bool isVideo, + std::shared_ptr videoCapture, std::function stateUpdated, std::function videoStateUpdated, std::function remoteVideoIsActiveUpdated, @@ -400,7 +399,7 @@ TgVoip *TgVoip::makeInstance( rtcServers, config, encryptionKey, - isVideo, + videoCapture, initialNetworkType, stateUpdated, videoStateUpdated, @@ -411,6 +410,12 @@ TgVoip *TgVoip::makeInstance( TgVoip::~TgVoip() = default; +std::shared_ptrTgVoipVideoCaptureInterface::makeInstance() { + return std::shared_ptr(new TgVoipVideoCaptureInterfaceImpl()); +} + +TgVoipVideoCaptureInterface::~TgVoipVideoCaptureInterface() = default; + #ifdef TGVOIP_NAMESPACE } #endif diff --git a/submodules/TgVoipWebrtc/Impl/ThreadLocalObject.h b/submodules/TgVoipWebrtc/Impl/ThreadLocalObject.h index fbaee62e2d..ce5007a21c 100644 --- a/submodules/TgVoipWebrtc/Impl/ThreadLocalObject.h +++ b/submodules/TgVoipWebrtc/Impl/ThreadLocalObject.h @@ -43,6 +43,12 @@ public: }); } + T *getSyncAssumingSameThread() { + assert(_thread->IsCurrent()); + assert(_valueHolder->_value != nullptr); + return _valueHolder->_value.get(); + } + private: rtc::Thread *_thread; std::shared_ptr> _valueHolder; diff --git a/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h b/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h index 5c8c24e9c2..f9f3e63f09 100644 --- a/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h +++ b/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h @@ -17,6 +17,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps; - (void)stopCapture; +- (void)setIsEnabled:(bool)isEnabled; @end diff --git a/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm b/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm index 723145155f..8149f87c45 100644 --- a/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm +++ b/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm @@ -39,6 +39,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr UIDeviceOrientation _orientation; void (^_isActiveUpdated)(bool); + bool _isActiveValue; + bool _inForegroundValue; + bool _isPaused; } @end @@ -49,6 +52,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr self = [super init]; if (self != nil) { _source = source; + _isActiveValue = true; + _inForegroundValue = true; + _isPaused = false; _isActiveUpdated = [isActiveUpdated copy]; if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) { @@ -124,6 +130,11 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr [self stopCaptureWithCompletionHandler:nil]; } +- (void)setIsEnabled:(bool)isEnabled { + _isPaused = !isEnabled; + [self updateIsActiveValue]; +} + - (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps @@ -253,7 +264,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:_rotation timeStampNs:timeStampNs]; - getObjCVideoSource(_source)->OnCapturedFrame(videoFrame); + if (!_isPaused) { + getObjCVideoSource(_source)->OnCapturedFrame(videoFrame); + } } - (void)captureOutput:(AVCaptureOutput *)captureOutput @@ -316,15 +329,23 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr _hasRetriedOnFatalError = NO; }]; - if (_isActiveUpdated) { - _isActiveUpdated(true); - } + _inForegroundValue = true; + [self updateIsActiveValue]; } - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { RTCLog(@"Capture session stopped."); - if (_isActiveUpdated) { - _isActiveUpdated(false); + _inForegroundValue = false; + [self updateIsActiveValue]; +} + +- (void)updateIsActiveValue { + bool isActive = _inForegroundValue && !_isPaused; + if (isActive != _isActiveValue) { + _isActiveValue = isActive; + if (_isActiveUpdated) { + _isActiveUpdated(_isActiveValue); + } } } diff --git a/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.h b/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.h new file mode 100644 index 0000000000..324c61b181 --- /dev/null +++ b/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.h @@ -0,0 +1,53 @@ +#ifndef VIDEO_CAPTURE_INTERFACE_IMPL_H +#define VIDEO_CAPTURE_INTERFACE_IMPL_H + +#include "TgVoip.h" +#include +#include "ThreadLocalObject.h" +#include "api/media_stream_interface.h" + +#ifdef TGVOIP_NAMESPACE +namespace TGVOIP_NAMESPACE { +#endif + +class VideoCapturerInterface; + +class TgVoipVideoCaptureInterfaceObject { +public: + TgVoipVideoCaptureInterfaceObject(); + ~TgVoipVideoCaptureInterfaceObject(); + + void switchCamera(); + void setIsVideoEnabled(bool isVideoEnabled); + void setVideoOutput(std::shared_ptr> sink); + void setIsActiveUpdated(std::function isActiveUpdated); + +public: + rtc::scoped_refptr _videoSource; + std::unique_ptr _videoCapturer; + +private: + std::shared_ptr> _currentSink; + std::function _isActiveUpdated; + bool _useFrontCamera; + bool _isVideoEnabled; +}; + +class TgVoipVideoCaptureInterfaceImpl : public TgVoipVideoCaptureInterface { +public: + TgVoipVideoCaptureInterfaceImpl(); + virtual ~TgVoipVideoCaptureInterfaceImpl(); + + virtual void switchCamera(); + virtual void setIsVideoEnabled(bool isVideoEnabled); + virtual void setVideoOutput(std::shared_ptr> sink); + +public: + std::unique_ptr> _impl; +}; + +#ifdef TGVOIP_NAMESPACE +} +#endif + +#endif diff --git a/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.mm b/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.mm new file mode 100644 index 0000000000..e66e8c4a7a --- /dev/null +++ b/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.mm @@ -0,0 +1,90 @@ +#include "VideoCaptureInterfaceImpl.h" + +#include "CodecsApple.h" +#include "Manager.h" +#include "MediaManager.h" + +#ifdef TGVOIP_NAMESPACE +namespace TGVOIP_NAMESPACE { +#endif + +TgVoipVideoCaptureInterfaceObject::TgVoipVideoCaptureInterfaceObject() { + _useFrontCamera = true; + _isVideoEnabled = true; + _videoSource = makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread()); + //this should outlive the capturer + _videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) { + if (this->_isActiveUpdated) { + this->_isActiveUpdated(isActive); + } + }); +} + +TgVoipVideoCaptureInterfaceObject::~TgVoipVideoCaptureInterfaceObject() { + if (_currentSink != nullptr) { + _videoSource->RemoveSink(_currentSink.get()); + } +} + +void TgVoipVideoCaptureInterfaceObject::switchCamera() { + _useFrontCamera = !_useFrontCamera; + _videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) { + if (this->_isActiveUpdated) { + this->_isActiveUpdated(isActive); + } + }); +} + +void TgVoipVideoCaptureInterfaceObject::setIsVideoEnabled(bool isVideoEnabled) { + if (_isVideoEnabled != isVideoEnabled) { + _isVideoEnabled = isVideoEnabled; + _videoCapturer->setIsEnabled(isVideoEnabled); + } +} + +void TgVoipVideoCaptureInterfaceObject::setVideoOutput(std::shared_ptr> sink) { + if (_currentSink != nullptr) { + _videoSource->RemoveSink(_currentSink.get()); + } + _currentSink = sink; + if (_currentSink != nullptr) { + _videoSource->AddOrUpdateSink(_currentSink.get(), rtc::VideoSinkWants()); + } +} + +void TgVoipVideoCaptureInterfaceObject::setIsActiveUpdated(std::function isActiveUpdated) { + _isActiveUpdated = isActiveUpdated; +} + +TgVoipVideoCaptureInterfaceImpl::TgVoipVideoCaptureInterfaceImpl() { + _impl.reset(new ThreadLocalObject( + Manager::getMediaThread(), + []() { + return new TgVoipVideoCaptureInterfaceObject(); + } + )); +} + +TgVoipVideoCaptureInterfaceImpl::~TgVoipVideoCaptureInterfaceImpl() { + +} + +void TgVoipVideoCaptureInterfaceImpl::switchCamera() { + _impl->perform([](TgVoipVideoCaptureInterfaceObject *impl) { + impl->switchCamera(); + }); +} + +void TgVoipVideoCaptureInterfaceImpl::setIsVideoEnabled(bool isVideoEnabled) { + _impl->perform([isVideoEnabled](TgVoipVideoCaptureInterfaceObject *impl) { + impl->setIsVideoEnabled(isVideoEnabled); + }); +} + +void TgVoipVideoCaptureInterfaceImpl::setVideoOutput(std::shared_ptr> sink) { + _impl->perform([sink](TgVoipVideoCaptureInterfaceObject *impl) { + impl->setVideoOutput(sink); + }); +} + +} diff --git a/submodules/TgVoipWebrtc/PublicHeaders/TgVoip/OngoingCallThreadLocalContext.h b/submodules/TgVoipWebrtc/PublicHeaders/TgVoip/OngoingCallThreadLocalContext.h index 1582e4351a..b15f82773f 100644 --- a/submodules/TgVoipWebrtc/PublicHeaders/TgVoip/OngoingCallThreadLocalContext.h +++ b/submodules/TgVoipWebrtc/PublicHeaders/TgVoip/OngoingCallThreadLocalContext.h @@ -78,6 +78,17 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { @end +@interface OngoingCallThreadLocalContextVideoCapturer : NSObject + +- (instancetype _Nonnull)init; + +- (void)switchVideoCamera; +- (void)setIsVideoEnabled:(bool)isVideoEnabled; + +- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion; + +@end + @interface OngoingCallThreadLocalContextWebrtc : NSObject + (void)setupLoggingFunction:(void (* _Nullable)(NSString * _Nullable))loggingFunction; @@ -88,7 +99,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { @property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc); @property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t); -- (instancetype _Nonnull)initWithQueue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing isVideo:(bool)isVideo primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData; +- (instancetype _Nonnull)initWithQueue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer; - (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion; - (bool)needRate; @@ -99,10 +110,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { - (void)setIsMuted:(bool)isMuted; - (void)setVideoEnabled:(bool)videoEnabled; -- (void)switchVideoCamera; - (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType; - (void)makeIncomingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion; -- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion; - (void)addSignalingData:(NSData * _Nonnull)data; @end diff --git a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm index a10e9950be..833505792b 100644 --- a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm +++ b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm @@ -21,6 +21,49 @@ using namespace TGVOIP_NAMESPACE; @end +@interface OngoingCallThreadLocalContextVideoCapturer () { + std::shared_ptr _interface; +} + +@end + +@implementation OngoingCallThreadLocalContextVideoCapturer + +- (instancetype _Nonnull)init { + self = [super init]; + if (self != nil) { + _interface = TgVoipVideoCaptureInterface::makeInstance(); + } + return self; +} + +- (void)switchVideoCamera { + _interface->switchCamera(); +} + +- (void)setIsVideoEnabled:(bool)isVideoEnabled { + _interface->setIsVideoEnabled(isVideoEnabled); +} + +- (std::shared_ptr)getInterface { + return _interface; +} + +- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion { + std::shared_ptr interface = _interface; + dispatch_async(dispatch_get_main_queue(), ^{ + VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero]; + remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill; + + std::shared_ptr> sink = [remoteRenderer getSink]; + interface->setVideoOutput(sink); + + completion(remoteRenderer); + }); +} + +@end + @interface OngoingCallThreadLocalContextWebrtc () { id _queue; int32_t _contextId; @@ -36,6 +79,7 @@ using namespace TGVOIP_NAMESPACE; OngoingCallStateWebrtc _state; OngoingCallVideoStateWebrtc _videoState; OngoingCallRemoteVideoStateWebrtc _remoteVideoState; + OngoingCallThreadLocalContextVideoCapturer *_videoCapturer; int32_t _signalBars; NSData *_lastDerivedState; @@ -134,7 +178,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; return @"2.7.7"; } -- (instancetype _Nonnull)initWithQueue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing isVideo:(bool)isVideo primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData; { +- (instancetype _Nonnull)initWithQueue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer { self = [super init]; if (self != nil) { _queue = queue; @@ -146,7 +190,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; _callPacketTimeout = 10.0; _networkType = networkType; _sendSignalingData = [sendSignalingData copy]; - if (isVideo) { + _videoCapturer = videoCapturer; + if (videoCapturer != nil) { _videoState = OngoingCallVideoStateActiveOutgoing; _remoteVideoState = OngoingCallRemoteVideoStateActive; } else { @@ -236,7 +281,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; parsedRtcServers, callControllerNetworkTypeForType(networkType), encryptionKey, - isVideo, + [_videoCapturer getInterface], [weakSelf, queue](TgVoipState state) { [queue dispatch:^{ __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf; @@ -424,12 +469,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; } } -- (void)switchVideoCamera { - if (_tgVoip) { - _tgVoip->switchVideoCamera(); - } -} - - (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType { if (_networkType != networkType) { _networkType = networkType; @@ -457,23 +496,5 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; } } -- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion { - if (_tgVoip) { - __weak OngoingCallThreadLocalContextWebrtc *weakSelf = self; - dispatch_async(dispatch_get_main_queue(), ^{ - VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero]; - remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill; - - std::shared_ptr> sink = [remoteRenderer getSink]; - __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf; - if (strongSelf) { - strongSelf->_tgVoip->setOutgoingVideoOutput(sink); - } - - completion(remoteRenderer); - }); - } -} - @end