mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
[WIP] Video chats v2
This commit is contained in:
parent
1567e6719c
commit
67ded11399
@ -103,6 +103,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
case enableQuickReactionSwitch(Bool)
|
||||
case disableReloginTokens(Bool)
|
||||
case callV2(Bool)
|
||||
case experimentalCallMute(Bool)
|
||||
case liveStreamV2(Bool)
|
||||
case preferredVideoCodec(Int, String, String?, Bool)
|
||||
case disableVideoAspectScaling(Bool)
|
||||
@ -128,7 +129,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
return DebugControllerSection.web.rawValue
|
||||
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
|
||||
return DebugControllerSection.experiments.rawValue
|
||||
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .callV2, .liveStreamV2:
|
||||
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .callV2, .experimentalCallMute, .liveStreamV2:
|
||||
return DebugControllerSection.experiments.rawValue
|
||||
case .logTranslationRecognition, .resetTranslationStates:
|
||||
return DebugControllerSection.translation.rawValue
|
||||
@ -243,10 +244,12 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
return 50
|
||||
case .callV2:
|
||||
return 51
|
||||
case .liveStreamV2:
|
||||
case .experimentalCallMute:
|
||||
return 52
|
||||
case .liveStreamV2:
|
||||
return 53
|
||||
case let .preferredVideoCodec(index, _, _, _):
|
||||
return 53 + index
|
||||
return 54 + index
|
||||
case .disableVideoAspectScaling:
|
||||
return 100
|
||||
case .enableNetworkFramework:
|
||||
@ -1325,6 +1328,16 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
||||
})
|
||||
}).start()
|
||||
})
|
||||
case let .experimentalCallMute(value):
|
||||
return ItemListSwitchItem(presentationData: presentationData, title: "[WIP] OS mic mute", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
||||
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
|
||||
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
|
||||
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
|
||||
settings.experimentalCallMute = value
|
||||
return PreferencesEntry(settings)
|
||||
})
|
||||
}).start()
|
||||
})
|
||||
case let .liveStreamV2(value):
|
||||
return ItemListSwitchItem(presentationData: presentationData, title: "Live Stream V2", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
||||
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
|
||||
@ -1490,6 +1503,7 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
|
||||
entries.append(.playlistPlayback(experimentalSettings.playlistPlayback))
|
||||
entries.append(.enableQuickReactionSwitch(!experimentalSettings.disableQuickReaction))
|
||||
entries.append(.callV2(experimentalSettings.callV2))
|
||||
entries.append(.experimentalCallMute(experimentalSettings.experimentalCallMute))
|
||||
entries.append(.liveStreamV2(experimentalSettings.liveStreamV2))
|
||||
}
|
||||
|
||||
|
@ -114,6 +114,9 @@ swift_library(
|
||||
"//submodules/TelegramUI/Components/PlainButtonComponent",
|
||||
"//submodules/TelegramUI/Components/LottieComponent",
|
||||
"//submodules/TelegramUI/Components/Stories/PeerListItemComponent",
|
||||
"//submodules/TelegramUI/Components/BackButtonComponent",
|
||||
"//submodules/DirectMediaImageCache",
|
||||
"//submodules/FastBlur",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -295,7 +295,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
|
||||
self.sharedAudioDevice = nil
|
||||
} else {
|
||||
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create()
|
||||
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: context.sharedContext.immediateExperimentalUISettings.experimentalCallMute)
|
||||
}
|
||||
|
||||
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|
||||
|
@ -1687,7 +1687,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
strongSelf.requestCall(movingFromBroadcastToRtc: false)
|
||||
}
|
||||
}
|
||||
}, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: self.isVideoEnabled ? .generic : .none, enableNoiseSuppression: false, disableAudioInput: self.isStream, preferX264: self.accountContext.sharedContext.immediateExperimentalUISettings.preferredVideoCodec == "H264", logPath: allocateCallLogPath(account: self.account), onMutedSpeechActivityDetected: { [weak self] value in
|
||||
}, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: self.isVideoEnabled ? .generic : .none, enableNoiseSuppression: false, disableAudioInput: self.isStream, enableSystemMute: self.accountContext.sharedContext.immediateExperimentalUISettings.experimentalCallMute, preferX264: self.accountContext.sharedContext.immediateExperimentalUISettings.preferredVideoCodec == "H264", logPath: allocateCallLogPath(account: self.account), onMutedSpeechActivityDetected: { [weak self] value in
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
@ -2997,7 +2997,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
self.hasScreencast = true
|
||||
|
||||
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in })
|
||||
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, enableSystemMute: false, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in })
|
||||
self.screencastCallContext = screencastCallContext
|
||||
|
||||
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
||||
|
@ -98,9 +98,9 @@ final class VideoChatActionButtonComponent: Component {
|
||||
case .connecting:
|
||||
backgroundColor = UIColor(white: 1.0, alpha: 0.1)
|
||||
case .muted:
|
||||
backgroundColor = isActive ? UIColor(rgb: 0x002E5D) : UIColor(rgb: 0x027FFF)
|
||||
backgroundColor = !isActive ? UIColor(rgb: 0x002E5D) : UIColor(rgb: 0x027FFF)
|
||||
case .unmuted:
|
||||
backgroundColor = isActive ? UIColor(rgb: 0x124B21) : UIColor(rgb: 0x34C659)
|
||||
backgroundColor = !isActive ? UIColor(rgb: 0x124B21) : UIColor(rgb: 0x34C659)
|
||||
}
|
||||
iconDiameter = 60.0
|
||||
case .leave:
|
||||
|
@ -0,0 +1,100 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import MultilineTextComponent
|
||||
import TelegramPresentationData
|
||||
import AppBundle
|
||||
import BackButtonComponent
|
||||
|
||||
final class VideoChatExpandedControlsComponent: Component {
|
||||
let theme: PresentationTheme
|
||||
let strings: PresentationStrings
|
||||
let backAction: () -> Void
|
||||
|
||||
init(
|
||||
theme: PresentationTheme,
|
||||
strings: PresentationStrings,
|
||||
backAction: @escaping () -> Void
|
||||
) {
|
||||
self.theme = theme
|
||||
self.strings = strings
|
||||
self.backAction = backAction
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoChatExpandedControlsComponent, rhs: VideoChatExpandedControlsComponent) -> Bool {
|
||||
if lhs.theme !== rhs.theme {
|
||||
return false
|
||||
}
|
||||
if lhs.strings !== rhs.strings {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
final class View: UIView {
|
||||
private let backButton = ComponentView<Empty>()
|
||||
|
||||
private var component: VideoChatExpandedControlsComponent?
|
||||
private var isUpdating: Bool = false
|
||||
|
||||
private var ignoreScrolling: Bool = false
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
if let backButtonView = self.backButton.view, let result = backButtonView.hitTest(self.convert(point, to: backButtonView), with: event) {
|
||||
return result
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func update(component: VideoChatExpandedControlsComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
self.isUpdating = true
|
||||
defer {
|
||||
self.isUpdating = false
|
||||
}
|
||||
|
||||
self.component = component
|
||||
|
||||
let backButtonSize = self.backButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(BackButtonComponent(
|
||||
title: component.strings.Common_Back,
|
||||
color: .white,
|
||||
action: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.backAction()
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width * 0.5, height: 100.0)
|
||||
)
|
||||
let backButtonFrame = CGRect(origin: CGPoint(x: 12.0, y: 12.0), size: backButtonSize)
|
||||
if let backButtonView = self.backButton.view {
|
||||
if backButtonView.superview == nil {
|
||||
self.addSubview(backButtonView)
|
||||
}
|
||||
transition.setFrame(view: backButtonView, frame: backButtonFrame)
|
||||
}
|
||||
|
||||
return availableSize
|
||||
}
|
||||
}
|
||||
|
||||
func makeView() -> View {
|
||||
return View()
|
||||
}
|
||||
|
||||
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
@ -0,0 +1,671 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import MultilineTextComponent
|
||||
import TelegramPresentationData
|
||||
import AppBundle
|
||||
import TelegramCore
|
||||
import AccountContext
|
||||
import SwiftSignalKit
|
||||
import MetalEngine
|
||||
import CallScreen
|
||||
import AvatarNode
|
||||
|
||||
final class VideoChatParticipantThumbnailComponent: Component {
|
||||
let call: PresentationGroupCall
|
||||
let theme: PresentationTheme
|
||||
let participant: GroupCallParticipantsContext.Participant
|
||||
let isPresentation: Bool
|
||||
let isSelected: Bool
|
||||
let action: (() -> Void)?
|
||||
|
||||
init(
|
||||
call: PresentationGroupCall,
|
||||
theme: PresentationTheme,
|
||||
participant: GroupCallParticipantsContext.Participant,
|
||||
isPresentation: Bool,
|
||||
isSelected: Bool,
|
||||
action: (() -> Void)?
|
||||
) {
|
||||
self.call = call
|
||||
self.theme = theme
|
||||
self.participant = participant
|
||||
self.isPresentation = isPresentation
|
||||
self.isSelected = isSelected
|
||||
self.action = action
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoChatParticipantThumbnailComponent, rhs: VideoChatParticipantThumbnailComponent) -> Bool {
|
||||
if lhs.call !== rhs.call {
|
||||
return false
|
||||
}
|
||||
if lhs.theme !== rhs.theme {
|
||||
return false
|
||||
}
|
||||
if lhs.participant != rhs.participant {
|
||||
return false
|
||||
}
|
||||
if lhs.isPresentation != rhs.isPresentation {
|
||||
return false
|
||||
}
|
||||
if lhs.isSelected != rhs.isSelected {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private struct VideoSpec: Equatable {
|
||||
var resolution: CGSize
|
||||
var rotationAngle: Float
|
||||
|
||||
init(resolution: CGSize, rotationAngle: Float) {
|
||||
self.resolution = resolution
|
||||
self.rotationAngle = rotationAngle
|
||||
}
|
||||
}
|
||||
|
||||
final class View: HighlightTrackingButton {
|
||||
private static let selectedBorderImage: UIImage? = {
|
||||
return generateStretchableFilledCircleImage(diameter: 20.0, color: nil, strokeColor: UIColor.white, strokeWidth: 2.0)?.withRenderingMode(.alwaysTemplate)
|
||||
}()
|
||||
|
||||
private var component: VideoChatParticipantThumbnailComponent?
|
||||
private weak var componentState: EmptyComponentState?
|
||||
private var isUpdating: Bool = false
|
||||
|
||||
private var avatarNode: AvatarNode?
|
||||
private let title = ComponentView<Empty>()
|
||||
private let muteStatus = ComponentView<Empty>()
|
||||
|
||||
private var selectedBorderView: UIImageView?
|
||||
|
||||
private var videoSource: AdaptedCallVideoSource?
|
||||
private var videoDisposable: Disposable?
|
||||
private var videoBackgroundLayer: SimpleLayer?
|
||||
private var videoLayer: PrivateCallVideoLayer?
|
||||
private var videoSpec: VideoSpec?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
super.init(frame: frame)
|
||||
|
||||
//TODO:release optimize
|
||||
self.clipsToBounds = true
|
||||
self.layer.cornerRadius = 10.0
|
||||
|
||||
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.videoDisposable?.dispose()
|
||||
}
|
||||
|
||||
@objc private func pressed() {
|
||||
guard let component = self.component, let action = component.action else {
|
||||
return
|
||||
}
|
||||
action()
|
||||
}
|
||||
|
||||
func update(component: VideoChatParticipantThumbnailComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
self.isUpdating = true
|
||||
defer {
|
||||
self.isUpdating = false
|
||||
}
|
||||
|
||||
if self.component == nil {
|
||||
self.backgroundColor = UIColor(rgb: 0x1C1C1E)
|
||||
}
|
||||
|
||||
self.component = component
|
||||
self.componentState = state
|
||||
|
||||
let avatarNode: AvatarNode
|
||||
if let current = self.avatarNode {
|
||||
avatarNode = current
|
||||
} else {
|
||||
avatarNode = AvatarNode(font: avatarPlaceholderFont(size: 17.0))
|
||||
avatarNode.isUserInteractionEnabled = false
|
||||
self.avatarNode = avatarNode
|
||||
self.addSubview(avatarNode.view)
|
||||
}
|
||||
|
||||
let avatarSize = CGSize(width: 50.0, height: 50.0)
|
||||
let avatarFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - avatarSize.width) * 0.5), y: 7.0), size: avatarSize)
|
||||
transition.setFrame(view: avatarNode.view, frame: avatarFrame)
|
||||
avatarNode.updateSize(size: avatarSize)
|
||||
if component.participant.peer.smallProfileImage != nil {
|
||||
avatarNode.setPeerV2(context: component.call.accountContext, theme: component.theme, peer: EnginePeer(component.participant.peer), displayDimensions: avatarSize)
|
||||
} else {
|
||||
avatarNode.setPeer(context: component.call.accountContext, theme: component.theme, peer: EnginePeer(component.participant.peer), displayDimensions: avatarSize)
|
||||
}
|
||||
|
||||
let muteStatusSize = self.muteStatus.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(VideoChatMuteIconComponent(
|
||||
color: .white,
|
||||
isMuted: component.participant.muteState != nil
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 36.0, height: 36.0)
|
||||
)
|
||||
let muteStatusFrame = CGRect(origin: CGPoint(x: availableSize.width + 5.0 - muteStatusSize.width, y: availableSize.height + 5.0 - muteStatusSize.height), size: muteStatusSize)
|
||||
if let muteStatusView = self.muteStatus.view {
|
||||
if muteStatusView.superview == nil {
|
||||
self.addSubview(muteStatusView)
|
||||
}
|
||||
transition.setPosition(view: muteStatusView, position: muteStatusFrame.center)
|
||||
transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size))
|
||||
transition.setScale(view: muteStatusView, scale: 0.65)
|
||||
}
|
||||
|
||||
let titleSize = self.title.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: EnginePeer(component.participant.peer).compactDisplayTitle, font: Font.semibold(13.0), textColor: .white))
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width - 6.0 * 2.0 - 8.0, height: 100.0)
|
||||
)
|
||||
let titleFrame = CGRect(origin: CGPoint(x: 6.0, y: availableSize.height - 6.0 - titleSize.height), size: titleSize)
|
||||
if let titleView = self.title.view {
|
||||
if titleView.superview == nil {
|
||||
titleView.layer.anchorPoint = CGPoint()
|
||||
titleView.isUserInteractionEnabled = false
|
||||
self.addSubview(titleView)
|
||||
}
|
||||
transition.setPosition(view: titleView, position: titleFrame.origin)
|
||||
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
|
||||
}
|
||||
|
||||
if let videoDescription = component.isPresentation ? component.participant.presentationDescription : component.participant.videoDescription {
|
||||
let videoBackgroundLayer: SimpleLayer
|
||||
if let current = self.videoBackgroundLayer {
|
||||
videoBackgroundLayer = current
|
||||
} else {
|
||||
videoBackgroundLayer = SimpleLayer()
|
||||
videoBackgroundLayer.backgroundColor = UIColor(white: 0.1, alpha: 1.0).cgColor
|
||||
self.videoBackgroundLayer = videoBackgroundLayer
|
||||
self.layer.insertSublayer(videoBackgroundLayer, above: avatarNode.layer)
|
||||
videoBackgroundLayer.isHidden = true
|
||||
}
|
||||
|
||||
let videoLayer: PrivateCallVideoLayer
|
||||
if let current = self.videoLayer {
|
||||
videoLayer = current
|
||||
} else {
|
||||
videoLayer = PrivateCallVideoLayer()
|
||||
self.videoLayer = videoLayer
|
||||
self.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
||||
self.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)
|
||||
|
||||
videoLayer.blurredLayer.opacity = 0.25
|
||||
|
||||
if let input = (component.call as! PresentationGroupCallImpl).video(endpointId: videoDescription.endpointId) {
|
||||
let videoSource = AdaptedCallVideoSource(videoStreamSignal: input)
|
||||
self.videoSource = videoSource
|
||||
|
||||
self.videoDisposable?.dispose()
|
||||
self.videoDisposable = videoSource.addOnUpdated { [weak self] in
|
||||
guard let self, let videoSource = self.videoSource, let videoLayer = self.videoLayer else {
|
||||
return
|
||||
}
|
||||
|
||||
let videoOutput = videoSource.currentOutput
|
||||
videoLayer.video = videoOutput
|
||||
|
||||
if let videoOutput {
|
||||
let videoSpec = VideoSpec(resolution: videoOutput.resolution, rotationAngle: videoOutput.rotationAngle)
|
||||
if self.videoSpec != videoSpec {
|
||||
self.videoSpec = videoSpec
|
||||
if !self.isUpdating {
|
||||
self.componentState?.updated(transition: .immediate, isLocal: true)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if self.videoSpec != nil {
|
||||
self.videoSpec = nil
|
||||
if !self.isUpdating {
|
||||
self.componentState?.updated(transition: .immediate, isLocal: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*var notifyOrientationUpdated = false
|
||||
var notifyIsMirroredUpdated = false
|
||||
|
||||
if !self.didReportFirstFrame {
|
||||
notifyOrientationUpdated = true
|
||||
notifyIsMirroredUpdated = true
|
||||
}
|
||||
|
||||
if let currentOutput = videoOutput {
|
||||
let currentAspect: CGFloat
|
||||
if currentOutput.resolution.height > 0.0 {
|
||||
currentAspect = currentOutput.resolution.width / currentOutput.resolution.height
|
||||
} else {
|
||||
currentAspect = 1.0
|
||||
}
|
||||
if self.currentAspect != currentAspect {
|
||||
self.currentAspect = currentAspect
|
||||
notifyOrientationUpdated = true
|
||||
}
|
||||
|
||||
let currentOrientation: PresentationCallVideoView.Orientation
|
||||
if currentOutput.followsDeviceOrientation {
|
||||
currentOrientation = .rotation0
|
||||
} else {
|
||||
if abs(currentOutput.rotationAngle - 0.0) < .ulpOfOne {
|
||||
currentOrientation = .rotation0
|
||||
} else if abs(currentOutput.rotationAngle - Float.pi * 0.5) < .ulpOfOne {
|
||||
currentOrientation = .rotation90
|
||||
} else if abs(currentOutput.rotationAngle - Float.pi) < .ulpOfOne {
|
||||
currentOrientation = .rotation180
|
||||
} else if abs(currentOutput.rotationAngle - Float.pi * 3.0 / 2.0) < .ulpOfOne {
|
||||
currentOrientation = .rotation270
|
||||
} else {
|
||||
currentOrientation = .rotation0
|
||||
}
|
||||
}
|
||||
if self.currentOrientation != currentOrientation {
|
||||
self.currentOrientation = currentOrientation
|
||||
notifyOrientationUpdated = true
|
||||
}
|
||||
|
||||
let currentIsMirrored = !currentOutput.mirrorDirection.isEmpty
|
||||
if self.currentIsMirrored != currentIsMirrored {
|
||||
self.currentIsMirrored = currentIsMirrored
|
||||
notifyIsMirroredUpdated = true
|
||||
}
|
||||
}
|
||||
|
||||
if !self.didReportFirstFrame {
|
||||
self.didReportFirstFrame = true
|
||||
self.onFirstFrameReceived?(Float(self.currentAspect))
|
||||
}
|
||||
|
||||
if notifyOrientationUpdated {
|
||||
self.onOrientationUpdated?(self.currentOrientation, self.currentAspect)
|
||||
}
|
||||
|
||||
if notifyIsMirroredUpdated {
|
||||
self.onIsMirroredUpdated?(self.currentIsMirrored)
|
||||
}*/
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
transition.setFrame(layer: videoBackgroundLayer, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
|
||||
if let videoSpec = self.videoSpec {
|
||||
videoBackgroundLayer.isHidden = component.isSelected
|
||||
videoLayer.blurredLayer.isHidden = component.isSelected
|
||||
videoLayer.isHidden = component.isSelected
|
||||
|
||||
let rotatedResolution = videoSpec.resolution
|
||||
let videoSize = rotatedResolution.aspectFilled(availableSize)
|
||||
let videoFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - videoSize.width) * 0.5), y: floorToScreenPixels((availableSize.height - videoSize.height) * 0.5)), size: videoSize)
|
||||
let blurredVideoSize = rotatedResolution.aspectFilled(availableSize)
|
||||
let blurredVideoFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - blurredVideoSize.width) * 0.5), y: floorToScreenPixels((availableSize.height - blurredVideoSize.height) * 0.5)), size: blurredVideoSize)
|
||||
|
||||
let videoResolution = rotatedResolution.aspectFitted(CGSize(width: availableSize.width * 3.0, height: availableSize.height * 3.0))
|
||||
let rotatedVideoResolution = videoResolution
|
||||
|
||||
transition.setPosition(layer: videoLayer, position: videoFrame.center)
|
||||
transition.setBounds(layer: videoLayer, bounds: CGRect(origin: CGPoint(), size: videoFrame.size))
|
||||
videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)), edgeInset: 2)
|
||||
|
||||
transition.setPosition(layer: videoLayer.blurredLayer, position: blurredVideoFrame.center)
|
||||
transition.setBounds(layer: videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: blurredVideoFrame.size))
|
||||
}
|
||||
} else {
|
||||
if let videoBackgroundLayer = self.videoBackgroundLayer {
|
||||
self.videoBackgroundLayer = nil
|
||||
videoBackgroundLayer.removeFromSuperlayer()
|
||||
}
|
||||
if let videoLayer = self.videoLayer {
|
||||
self.videoLayer = nil
|
||||
videoLayer.blurredLayer.removeFromSuperlayer()
|
||||
videoLayer.removeFromSuperlayer()
|
||||
}
|
||||
self.videoDisposable?.dispose()
|
||||
self.videoDisposable = nil
|
||||
self.videoSource = nil
|
||||
self.videoSpec = nil
|
||||
}
|
||||
|
||||
if component.isSelected {
|
||||
let selectedBorderView: UIImageView
|
||||
if let current = self.selectedBorderView {
|
||||
selectedBorderView = current
|
||||
} else {
|
||||
selectedBorderView = UIImageView()
|
||||
self.selectedBorderView = selectedBorderView
|
||||
self.addSubview(selectedBorderView)
|
||||
selectedBorderView.image = View.selectedBorderImage
|
||||
}
|
||||
selectedBorderView.tintColor = component.theme.list.itemAccentColor
|
||||
selectedBorderView.frame = CGRect(origin: CGPoint(), size: availableSize)
|
||||
} else {
|
||||
if let selectedBorderView = self.selectedBorderView {
|
||||
self.selectedBorderView = nil
|
||||
selectedBorderView.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
|
||||
return availableSize
|
||||
}
|
||||
}
|
||||
|
||||
func makeView() -> View {
|
||||
return View(frame: CGRect())
|
||||
}
|
||||
|
||||
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
||||
|
||||
final class VideoChatExpandedParticipantThumbnailsComponent: Component {
|
||||
final class Participant: Equatable {
|
||||
struct Key: Hashable {
|
||||
var id: EnginePeer.Id
|
||||
var isPresentation: Bool
|
||||
|
||||
init(id: EnginePeer.Id, isPresentation: Bool) {
|
||||
self.id = id
|
||||
self.isPresentation = isPresentation
|
||||
}
|
||||
}
|
||||
|
||||
let participant: GroupCallParticipantsContext.Participant
|
||||
let isPresentation: Bool
|
||||
|
||||
var key: Key {
|
||||
return Key(id: self.participant.peer.id, isPresentation: self.isPresentation)
|
||||
}
|
||||
|
||||
init(
|
||||
participant: GroupCallParticipantsContext.Participant,
|
||||
isPresentation: Bool
|
||||
) {
|
||||
self.participant = participant
|
||||
self.isPresentation = isPresentation
|
||||
}
|
||||
|
||||
static func ==(lhs: Participant, rhs: Participant) -> Bool {
|
||||
if lhs === rhs {
|
||||
return true
|
||||
}
|
||||
if lhs.participant != rhs.participant {
|
||||
return false
|
||||
}
|
||||
if lhs.isPresentation != rhs.isPresentation {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
let call: PresentationGroupCall
|
||||
let theme: PresentationTheme
|
||||
let participants: [Participant]
|
||||
let selectedParticipant: Participant.Key?
|
||||
let updateSelectedParticipant: (Participant.Key) -> Void
|
||||
|
||||
init(
|
||||
call: PresentationGroupCall,
|
||||
theme: PresentationTheme,
|
||||
participants: [Participant],
|
||||
selectedParticipant: Participant.Key?,
|
||||
updateSelectedParticipant: @escaping (Participant.Key) -> Void
|
||||
) {
|
||||
self.call = call
|
||||
self.theme = theme
|
||||
self.participants = participants
|
||||
self.selectedParticipant = selectedParticipant
|
||||
self.updateSelectedParticipant = updateSelectedParticipant
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoChatExpandedParticipantThumbnailsComponent, rhs: VideoChatExpandedParticipantThumbnailsComponent) -> Bool {
|
||||
if lhs.call !== rhs.call {
|
||||
return false
|
||||
}
|
||||
if lhs.theme !== rhs.theme {
|
||||
return false
|
||||
}
|
||||
if lhs.participants != rhs.participants {
|
||||
return false
|
||||
}
|
||||
if lhs.selectedParticipant != rhs.selectedParticipant {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private final class ScrollView: UIScrollView {
|
||||
override func touchesShouldCancel(in view: UIView) -> Bool {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
private struct ItemLayout {
|
||||
let containerSize: CGSize
|
||||
let containerInsets: UIEdgeInsets
|
||||
let itemCount: Int
|
||||
let itemSize: CGSize
|
||||
let itemSpacing: CGFloat
|
||||
|
||||
let contentSize: CGSize
|
||||
|
||||
init(containerSize: CGSize, containerInsets: UIEdgeInsets, itemCount: Int) {
|
||||
self.containerSize = containerSize
|
||||
self.containerInsets = containerInsets
|
||||
self.itemCount = itemCount
|
||||
self.itemSize = CGSize(width: 84.0, height: 84.0)
|
||||
self.itemSpacing = 6.0
|
||||
|
||||
let itemsWidth: CGFloat = CGFloat(itemCount) * self.itemSize.width + CGFloat(max(itemCount - 1, 0)) * self.itemSpacing
|
||||
self.contentSize = CGSize(width: self.containerInsets.left + self.containerInsets.right + itemsWidth, height: self.containerInsets.top + self.containerInsets.bottom + self.itemSize.height)
|
||||
}
|
||||
|
||||
func frame(at index: Int) -> CGRect {
|
||||
let frame = CGRect(origin: CGPoint(x: self.containerInsets.left + CGFloat(index) * (self.itemSize.width + self.itemSpacing), y: self.containerInsets.top), size: self.itemSize)
|
||||
return frame
|
||||
}
|
||||
|
||||
func visibleItemRange(for rect: CGRect) -> (minIndex: Int, maxIndex: Int) {
|
||||
if self.itemCount == 0 {
|
||||
return (0, -1)
|
||||
}
|
||||
let offsetRect = rect.offsetBy(dx: -self.containerInsets.left, dy: 0.0)
|
||||
var minVisibleRow = Int(floor((offsetRect.minY) / (self.itemSize.width)))
|
||||
minVisibleRow = max(0, minVisibleRow)
|
||||
let maxVisibleRow = Int(ceil((offsetRect.maxY) / (self.itemSize.width)))
|
||||
|
||||
let minVisibleIndex = minVisibleRow
|
||||
let maxVisibleIndex = min(self.itemCount - 1, (maxVisibleRow + 1) - 1)
|
||||
|
||||
return (minVisibleIndex, maxVisibleIndex)
|
||||
}
|
||||
}
|
||||
|
||||
private final class VisibleItem {
|
||||
let view = ComponentView<Empty>()
|
||||
|
||||
init() {
|
||||
}
|
||||
}
|
||||
|
||||
final class View: UIView, UIScrollViewDelegate {
|
||||
private let scrollView: ScrollView
|
||||
|
||||
private var component: VideoChatExpandedParticipantThumbnailsComponent?
|
||||
private var isUpdating: Bool = false
|
||||
|
||||
private var ignoreScrolling: Bool = false
|
||||
|
||||
private var itemLayout: ItemLayout?
|
||||
private var visibleItems: [Participant.Key: VisibleItem] = [:]
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.scrollView = ScrollView()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.scrollView.delaysContentTouches = false
|
||||
self.scrollView.canCancelContentTouches = true
|
||||
self.scrollView.clipsToBounds = false
|
||||
self.scrollView.contentInsetAdjustmentBehavior = .never
|
||||
if #available(iOS 13.0, *) {
|
||||
self.scrollView.automaticallyAdjustsScrollIndicatorInsets = false
|
||||
}
|
||||
self.scrollView.showsVerticalScrollIndicator = false
|
||||
self.scrollView.showsHorizontalScrollIndicator = false
|
||||
self.scrollView.alwaysBounceHorizontal = false
|
||||
self.scrollView.alwaysBounceVertical = false
|
||||
self.scrollView.scrollsToTop = false
|
||||
self.scrollView.delegate = self
|
||||
self.scrollView.clipsToBounds = true
|
||||
|
||||
self.addSubview(self.scrollView)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func scrollViewDidScroll(_ scrollView: UIScrollView) {
|
||||
if !self.ignoreScrolling {
|
||||
self.updateScrolling(transition: .immediate)
|
||||
}
|
||||
}
|
||||
|
||||
private func updateScrolling(transition: ComponentTransition) {
|
||||
guard let component = self.component, let itemLayout = self.itemLayout else {
|
||||
return
|
||||
}
|
||||
|
||||
var validListItemIds: [Participant.Key] = []
|
||||
let visibleListItemRange = itemLayout.visibleItemRange(for: self.scrollView.bounds)
|
||||
if visibleListItemRange.maxIndex >= visibleListItemRange.minIndex {
|
||||
for i in visibleListItemRange.minIndex ... visibleListItemRange.maxIndex {
|
||||
let participant = component.participants[i]
|
||||
validListItemIds.append(participant.key)
|
||||
|
||||
var itemTransition = transition
|
||||
let itemView: VisibleItem
|
||||
if let current = self.visibleItems[participant.key] {
|
||||
itemView = current
|
||||
} else {
|
||||
itemTransition = itemTransition.withAnimation(.none)
|
||||
itemView = VisibleItem()
|
||||
self.visibleItems[participant.key] = itemView
|
||||
}
|
||||
|
||||
let itemFrame = itemLayout.frame(at: i)
|
||||
|
||||
let participantKey = participant.key
|
||||
let _ = itemView.view.update(
|
||||
transition: itemTransition,
|
||||
component: AnyComponent(VideoChatParticipantThumbnailComponent(
|
||||
call: component.call,
|
||||
theme: component.theme,
|
||||
participant: participant.participant,
|
||||
isPresentation: participant.isPresentation,
|
||||
isSelected: component.selectedParticipant == participant.key,
|
||||
action: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.updateSelectedParticipant(participantKey)
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: itemFrame.size
|
||||
)
|
||||
if let itemComponentView = itemView.view.view {
|
||||
if itemComponentView.superview == nil {
|
||||
itemComponentView.clipsToBounds = true
|
||||
|
||||
self.scrollView.addSubview(itemComponentView)
|
||||
|
||||
if !transition.animation.isImmediate {
|
||||
itemComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||
transition.animateScale(view: itemComponentView, from: 0.001, to: 1.0)
|
||||
}
|
||||
}
|
||||
transition.setFrame(view: itemComponentView, frame: itemFrame)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var removedListItemIds: [Participant.Key] = []
|
||||
for (itemId, itemView) in self.visibleItems {
|
||||
if !validListItemIds.contains(itemId) {
|
||||
removedListItemIds.append(itemId)
|
||||
|
||||
if let itemComponentView = itemView.view.view {
|
||||
if !transition.animation.isImmediate {
|
||||
transition.setScale(view: itemComponentView, scale: 0.001)
|
||||
itemComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak itemComponentView] _ in
|
||||
itemComponentView?.removeFromSuperview()
|
||||
})
|
||||
} else {
|
||||
itemComponentView.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for itemId in removedListItemIds {
|
||||
self.visibleItems.removeValue(forKey: itemId)
|
||||
}
|
||||
}
|
||||
|
||||
func update(component: VideoChatExpandedParticipantThumbnailsComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
self.isUpdating = true
|
||||
defer {
|
||||
self.isUpdating = false
|
||||
}
|
||||
|
||||
self.component = component
|
||||
|
||||
let itemLayout = ItemLayout(
|
||||
containerSize: availableSize,
|
||||
containerInsets: UIEdgeInsets(top: 10.0, left: 10.0, bottom: 10.0, right: 10.0),
|
||||
itemCount: component.participants.count
|
||||
)
|
||||
self.itemLayout = itemLayout
|
||||
|
||||
let size = CGSize(width: availableSize.width, height: itemLayout.contentSize.height)
|
||||
|
||||
self.ignoreScrolling = true
|
||||
if self.scrollView.bounds.size != size {
|
||||
transition.setFrame(view: self.scrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: size))
|
||||
}
|
||||
let contentSize = CGSize(width: itemLayout.contentSize.width, height: size.height)
|
||||
if self.scrollView.contentSize != contentSize {
|
||||
self.scrollView.contentSize = contentSize
|
||||
}
|
||||
self.ignoreScrolling = false
|
||||
|
||||
self.updateScrolling(transition: transition)
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
|
||||
func makeView() -> View {
|
||||
return View()
|
||||
}
|
||||
|
||||
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
@ -5,6 +5,7 @@ import ComponentFlow
|
||||
import MultilineTextComponent
|
||||
import TelegramPresentationData
|
||||
import LottieComponent
|
||||
import VoiceChatActionButton
|
||||
|
||||
final class VideoChatMicButtonComponent: Component {
|
||||
enum Content {
|
||||
@ -15,13 +16,16 @@ final class VideoChatMicButtonComponent: Component {
|
||||
|
||||
let content: Content
|
||||
let isCollapsed: Bool
|
||||
let updateUnmutedStateIsPushToTalk: (Bool?) -> Void
|
||||
|
||||
init(
|
||||
content: Content,
|
||||
isCollapsed: Bool
|
||||
isCollapsed: Bool,
|
||||
updateUnmutedStateIsPushToTalk: @escaping (Bool?) -> Void
|
||||
) {
|
||||
self.content = content
|
||||
self.isCollapsed = isCollapsed
|
||||
self.updateUnmutedStateIsPushToTalk = updateUnmutedStateIsPushToTalk
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoChatMicButtonComponent, rhs: VideoChatMicButtonComponent) -> Bool {
|
||||
@ -36,16 +40,67 @@ final class VideoChatMicButtonComponent: Component {
|
||||
|
||||
final class View: HighlightTrackingButton {
|
||||
private let background = ComponentView<Empty>()
|
||||
private let icon = ComponentView<Empty>()
|
||||
private let title = ComponentView<Empty>()
|
||||
private let icon: VoiceChatActionButtonIconNode
|
||||
|
||||
private var component: VideoChatMicButtonComponent?
|
||||
private var isUpdating: Bool = false
|
||||
|
||||
private var beginTrackingTimestamp: Double = 0.0
|
||||
private var beginTrackingWasPushToTalk: Bool = false
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.icon = VoiceChatActionButtonIconNode(isColored: false)
|
||||
|
||||
super.init(frame: frame)
|
||||
}
|
||||
|
||||
override func beginTracking(_ touch: UITouch, with event: UIEvent?) -> Bool {
|
||||
self.beginTrackingTimestamp = CFAbsoluteTimeGetCurrent()
|
||||
if let component = self.component {
|
||||
switch component.content {
|
||||
case .connecting:
|
||||
self.beginTrackingWasPushToTalk = false
|
||||
case .muted:
|
||||
self.beginTrackingWasPushToTalk = true
|
||||
component.updateUnmutedStateIsPushToTalk(true)
|
||||
case .unmuted:
|
||||
self.beginTrackingWasPushToTalk = false
|
||||
}
|
||||
}
|
||||
|
||||
return super.beginTracking(touch, with: event)
|
||||
}
|
||||
|
||||
override func endTracking(_ touch: UITouch?, with event: UIEvent?) {
|
||||
if let component = self.component {
|
||||
let timestamp = CFAbsoluteTimeGetCurrent()
|
||||
|
||||
switch component.content {
|
||||
case .connecting:
|
||||
break
|
||||
case .muted:
|
||||
component.updateUnmutedStateIsPushToTalk(false)
|
||||
case .unmuted:
|
||||
if self.beginTrackingWasPushToTalk {
|
||||
if timestamp < self.beginTrackingTimestamp + 0.15 {
|
||||
component.updateUnmutedStateIsPushToTalk(false)
|
||||
} else {
|
||||
component.updateUnmutedStateIsPushToTalk(nil)
|
||||
}
|
||||
} else {
|
||||
component.updateUnmutedStateIsPushToTalk(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return super.endTracking(touch, with: event)
|
||||
}
|
||||
|
||||
override func cancelTracking(with event: UIEvent?) {
|
||||
return super.cancelTracking(with: event)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
@ -97,6 +152,7 @@ final class VideoChatMicButtonComponent: Component {
|
||||
)
|
||||
if let backgroundView = self.background.view {
|
||||
if backgroundView.superview == nil {
|
||||
backgroundView.isUserInteractionEnabled = false
|
||||
self.addSubview(backgroundView)
|
||||
}
|
||||
transition.setFrame(view: backgroundView, frame: CGRect(origin: CGPoint(), size: size))
|
||||
@ -105,6 +161,7 @@ final class VideoChatMicButtonComponent: Component {
|
||||
let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: size.height + 16.0), size: titleSize)
|
||||
if let titleView = self.title.view {
|
||||
if titleView.superview == nil {
|
||||
titleView.isUserInteractionEnabled = false
|
||||
self.addSubview(titleView)
|
||||
}
|
||||
transition.setPosition(view: titleView, position: titleFrame.center)
|
||||
@ -112,25 +169,24 @@ final class VideoChatMicButtonComponent: Component {
|
||||
alphaTransition.setAlpha(view: titleView, alpha: component.isCollapsed ? 0.0 : 1.0)
|
||||
}
|
||||
|
||||
let iconSize = self.icon.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(LottieComponent(
|
||||
content: LottieComponent.AppBundleContent(
|
||||
name: "VoiceUnmute"
|
||||
),
|
||||
color: .white
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 100.0, height: 100.0)
|
||||
)
|
||||
if self.icon.view.superview == nil {
|
||||
self.icon.view.isUserInteractionEnabled = false
|
||||
self.addSubview(self.icon.view)
|
||||
}
|
||||
let iconSize = CGSize(width: 100.0, height: 100.0)
|
||||
let iconFrame = CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) * 0.5), y: floor((size.height - iconSize.height) * 0.5)), size: iconSize)
|
||||
if let iconView = self.icon.view {
|
||||
if iconView.superview == nil {
|
||||
self.addSubview(iconView)
|
||||
}
|
||||
transition.setPosition(view: iconView, position: iconFrame.center)
|
||||
transition.setBounds(view: iconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||
transition.setScale(view: iconView, scale: component.isCollapsed ? ((iconSize.width - 24.0) / iconSize.width) : 1.0)
|
||||
|
||||
transition.setPosition(view: self.icon.view, position: iconFrame.center)
|
||||
transition.setBounds(view: self.icon.view, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||
transition.setScale(view: self.icon.view, scale: component.isCollapsed ? ((iconSize.width - 24.0) / iconSize.width) : 1.0)
|
||||
|
||||
switch component.content {
|
||||
case .connecting:
|
||||
self.icon.enqueueState(.mute)
|
||||
case .muted:
|
||||
self.icon.enqueueState(.mute)
|
||||
case .unmuted:
|
||||
self.icon.enqueueState(.unmute)
|
||||
}
|
||||
|
||||
return size
|
||||
|
@ -0,0 +1,78 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import MultilineTextComponent
|
||||
import TelegramPresentationData
|
||||
import AppBundle
|
||||
import LottieComponent
|
||||
|
||||
final class VideoChatMuteIconComponent: Component {
|
||||
let color: UIColor
|
||||
let isMuted: Bool
|
||||
|
||||
init(
|
||||
color: UIColor,
|
||||
isMuted: Bool
|
||||
) {
|
||||
self.color = color
|
||||
self.isMuted = isMuted
|
||||
}
|
||||
|
||||
static func ==(lhs: VideoChatMuteIconComponent, rhs: VideoChatMuteIconComponent) -> Bool {
|
||||
if lhs.color != rhs.color {
|
||||
return false
|
||||
}
|
||||
if lhs.isMuted != rhs.isMuted {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
final class View: HighlightTrackingButton {
|
||||
private let icon: VoiceChatMicrophoneNode
|
||||
|
||||
private var component: VideoChatMuteIconComponent?
|
||||
private var isUpdating: Bool = false
|
||||
|
||||
private var contentImage: UIImage?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.icon = VoiceChatMicrophoneNode()
|
||||
|
||||
super.init(frame: frame)
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func update(component: VideoChatMuteIconComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
self.isUpdating = true
|
||||
defer {
|
||||
self.isUpdating = false
|
||||
}
|
||||
|
||||
self.component = component
|
||||
|
||||
let animationSize = availableSize
|
||||
|
||||
let animationFrame = animationSize.centered(in: CGRect(origin: CGPoint(), size: availableSize))
|
||||
if self.icon.view.superview == nil {
|
||||
self.addSubview(self.icon.view)
|
||||
}
|
||||
transition.setFrame(view: self.icon.view, frame: animationFrame)
|
||||
self.icon.update(state: VoiceChatMicrophoneNode.State(muted: component.isMuted, filled: true, color: component.color), animated: !transition.animation.isImmediate)
|
||||
|
||||
return availableSize
|
||||
}
|
||||
}
|
||||
|
||||
func makeView() -> View {
|
||||
return View()
|
||||
}
|
||||
|
||||
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
@ -10,33 +10,47 @@ import CallScreen
|
||||
import TelegramCore
|
||||
import AccountContext
|
||||
import SwiftSignalKit
|
||||
import DirectMediaImageCache
|
||||
import FastBlur
|
||||
|
||||
private func blurredAvatarImage(_ dataImage: UIImage) -> UIImage? {
|
||||
let imageContextSize = CGSize(width: 64.0, height: 64.0)
|
||||
if let imageContext = DrawingContext(size: imageContextSize, scale: 1.0, clear: true) {
|
||||
imageContext.withFlippedContext { c in
|
||||
if let cgImage = dataImage.cgImage {
|
||||
c.draw(cgImage, in: CGRect(origin: CGPoint(), size: imageContextSize))
|
||||
}
|
||||
}
|
||||
|
||||
telegramFastBlurMore(Int32(imageContext.size.width * imageContext.scale), Int32(imageContext.size.height * imageContext.scale), Int32(imageContext.bytesPerRow), imageContext.bytes)
|
||||
|
||||
return imageContext.generateImage()
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
final class VideoChatParticipantVideoComponent: Component {
|
||||
struct ExpandedState: Equatable {
|
||||
var isPinned: Bool
|
||||
|
||||
init(isPinned: Bool) {
|
||||
self.isPinned = isPinned
|
||||
}
|
||||
}
|
||||
|
||||
let call: PresentationGroupCall
|
||||
let participant: GroupCallParticipantsContext.Participant
|
||||
let isPresentation: Bool
|
||||
let expandedState: ExpandedState?
|
||||
let isExpanded: Bool
|
||||
let bottomInset: CGFloat
|
||||
let action: (() -> Void)?
|
||||
|
||||
init(
|
||||
call: PresentationGroupCall,
|
||||
participant: GroupCallParticipantsContext.Participant,
|
||||
isPresentation: Bool,
|
||||
expandedState: ExpandedState?,
|
||||
isExpanded: Bool,
|
||||
bottomInset: CGFloat,
|
||||
action: (() -> Void)?
|
||||
) {
|
||||
self.call = call
|
||||
self.participant = participant
|
||||
self.isPresentation = isPresentation
|
||||
self.expandedState = expandedState
|
||||
self.isExpanded = isExpanded
|
||||
self.bottomInset = bottomInset
|
||||
self.action = action
|
||||
}
|
||||
|
||||
@ -47,7 +61,10 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
if lhs.isPresentation != rhs.isPresentation {
|
||||
return false
|
||||
}
|
||||
if lhs.expandedState != rhs.expandedState {
|
||||
if lhs.isExpanded != rhs.isExpanded {
|
||||
return false
|
||||
}
|
||||
if lhs.bottomInset != rhs.bottomInset {
|
||||
return false
|
||||
}
|
||||
if (lhs.action == nil) != (rhs.action == nil) {
|
||||
@ -71,8 +88,12 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
private weak var componentState: EmptyComponentState?
|
||||
private var isUpdating: Bool = false
|
||||
|
||||
private let muteStatus = ComponentView<Empty>()
|
||||
private let title = ComponentView<Empty>()
|
||||
|
||||
private var blurredAvatarDisposable: Disposable?
|
||||
private var blurredAvatarView: UIImageView?
|
||||
|
||||
private var videoSource: AdaptedCallVideoSource?
|
||||
private var videoDisposable: Disposable?
|
||||
private var videoBackgroundLayer: SimpleLayer?
|
||||
@ -95,6 +116,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
|
||||
deinit {
|
||||
self.videoDisposable?.dispose()
|
||||
self.blurredAvatarDisposable?.dispose()
|
||||
}
|
||||
|
||||
@objc private func pressed() {
|
||||
@ -115,17 +137,95 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
|
||||
let nameColor = component.participant.peer.nameColor ?? .blue
|
||||
let nameColors = component.call.accountContext.peerNameColors.get(nameColor, dark: true)
|
||||
self.backgroundColor = nameColors.main
|
||||
self.backgroundColor = nameColors.main.withMultiplied(hue: 1.0, saturation: 1.0, brightness: 0.4)
|
||||
|
||||
if let smallProfileImage = component.participant.peer.smallProfileImage {
|
||||
let blurredAvatarView: UIImageView
|
||||
if let current = self.blurredAvatarView {
|
||||
blurredAvatarView = current
|
||||
|
||||
transition.setFrame(view: blurredAvatarView, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||
} else {
|
||||
blurredAvatarView = UIImageView()
|
||||
blurredAvatarView.contentMode = .scaleAspectFill
|
||||
self.blurredAvatarView = blurredAvatarView
|
||||
self.insertSubview(blurredAvatarView, at: 0)
|
||||
|
||||
blurredAvatarView.frame = CGRect(origin: CGPoint(), size: availableSize)
|
||||
}
|
||||
|
||||
if self.blurredAvatarDisposable == nil {
|
||||
//TODO:release synchronous
|
||||
if let imageCache = component.call.accountContext.imageCache as? DirectMediaImageCache, let peerReference = PeerReference(component.participant.peer) {
|
||||
if let result = imageCache.getAvatarImage(peer: peerReference, resource: MediaResourceReference.avatar(peer: peerReference, resource: smallProfileImage.resource), immediateThumbnail: component.participant.peer.profileImageRepresentations.first?.immediateThumbnailData, size: 64, synchronous: false) {
|
||||
if let image = result.image {
|
||||
blurredAvatarView.image = blurredAvatarImage(image)
|
||||
}
|
||||
if let loadSignal = result.loadSignal {
|
||||
self.blurredAvatarDisposable = (loadSignal
|
||||
|> deliverOnMainQueue).startStrict(next: { [weak self] image in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if let image {
|
||||
self.blurredAvatarView?.image = blurredAvatarImage(image)
|
||||
} else {
|
||||
self.blurredAvatarView?.image = nil
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let blurredAvatarView = self.blurredAvatarView {
|
||||
self.blurredAvatarView = nil
|
||||
blurredAvatarView.removeFromSuperview()
|
||||
}
|
||||
if let blurredAvatarDisposable = self.blurredAvatarDisposable {
|
||||
self.blurredAvatarDisposable = nil
|
||||
blurredAvatarDisposable.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
let muteStatusSize = self.muteStatus.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(VideoChatMuteIconComponent(
|
||||
color: .white,
|
||||
isMuted: component.participant.muteState != nil
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 36.0, height: 36.0)
|
||||
)
|
||||
let muteStatusFrame: CGRect
|
||||
if component.isExpanded {
|
||||
muteStatusFrame = CGRect(origin: CGPoint(x: 5.0, y: availableSize.height - component.bottomInset + 1.0 - muteStatusSize.height), size: muteStatusSize)
|
||||
} else {
|
||||
muteStatusFrame = CGRect(origin: CGPoint(x: 1.0, y: availableSize.height - component.bottomInset + 3.0 - muteStatusSize.height), size: muteStatusSize)
|
||||
}
|
||||
if let muteStatusView = self.muteStatus.view {
|
||||
if muteStatusView.superview == nil {
|
||||
self.addSubview(muteStatusView)
|
||||
}
|
||||
transition.setPosition(view: muteStatusView, position: muteStatusFrame.center)
|
||||
transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size))
|
||||
transition.setScale(view: muteStatusView, scale: component.isExpanded ? 1.0 : 0.7)
|
||||
}
|
||||
|
||||
let titleSize = self.title.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.regular(14.0), textColor: .white))
|
||||
text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white))
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width - 8.0 * 2.0, height: 100.0)
|
||||
)
|
||||
let titleFrame = CGRect(origin: CGPoint(x: 8.0, y: availableSize.height - 8.0 - titleSize.height), size: titleSize)
|
||||
let titleFrame: CGRect
|
||||
if component.isExpanded {
|
||||
titleFrame = CGRect(origin: CGPoint(x: 36.0, y: availableSize.height - component.bottomInset - 8.0 - titleSize.height), size: titleSize)
|
||||
} else {
|
||||
titleFrame = CGRect(origin: CGPoint(x: 29.0, y: availableSize.height - component.bottomInset - 4.0 - titleSize.height), size: titleSize)
|
||||
}
|
||||
if let titleView = self.title.view {
|
||||
if titleView.superview == nil {
|
||||
titleView.layer.anchorPoint = CGPoint()
|
||||
@ -133,6 +233,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
}
|
||||
transition.setPosition(view: titleView, position: titleFrame.origin)
|
||||
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
|
||||
transition.setScale(view: titleView, scale: component.isExpanded ? 1.0 : 0.825)
|
||||
}
|
||||
|
||||
if let videoDescription = component.isPresentation ? component.participant.presentationDescription : component.participant.videoDescription {
|
||||
|
@ -237,6 +237,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
}
|
||||
|
||||
let containerSize: CGSize
|
||||
let collapsedContainerInsets: UIEdgeInsets
|
||||
let sideInset: CGFloat
|
||||
let grid: Grid
|
||||
let expandedGrid: ExpandedGrid
|
||||
@ -247,6 +248,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
|
||||
init(containerSize: CGSize, sideInset: CGFloat, collapsedContainerInsets: UIEdgeInsets, expandedContainerInsets: UIEdgeInsets, gridItemCount: Int, listItemCount: Int, listItemHeight: CGFloat, listTrailingItemHeight: CGFloat) {
|
||||
self.containerSize = containerSize
|
||||
self.collapsedContainerInsets = collapsedContainerInsets
|
||||
self.sideInset = sideInset
|
||||
|
||||
self.grid = Grid(containerSize: CGSize(width: containerSize.width - sideInset * 2.0, height: containerSize.height), sideInset: 0.0, itemCount: gridItemCount)
|
||||
@ -265,12 +267,13 @@ final class VideoChatParticipantsComponent: Component {
|
||||
}
|
||||
|
||||
func contentHeight() -> CGFloat {
|
||||
var result: CGFloat = 0.0
|
||||
var result: CGFloat = self.gridOffsetY
|
||||
if self.grid.itemCount != 0 {
|
||||
result += self.grid.contentHeight()
|
||||
result += self.spacing
|
||||
}
|
||||
result += self.list.contentHeight()
|
||||
result += self.collapsedContainerInsets.bottom
|
||||
return result
|
||||
}
|
||||
|
||||
@ -336,6 +339,14 @@ final class VideoChatParticipantsComponent: Component {
|
||||
self.key = key
|
||||
}
|
||||
}
|
||||
|
||||
private final class ListItem {
|
||||
let view = ComponentView<Empty>()
|
||||
let separatorLayer = SimpleLayer()
|
||||
|
||||
init() {
|
||||
}
|
||||
}
|
||||
|
||||
final class View: UIView, UIScrollViewDelegate {
|
||||
private let scollViewClippingContainer: UIView
|
||||
@ -356,11 +367,13 @@ final class VideoChatParticipantsComponent: Component {
|
||||
private let gridItemViewContainer: UIView
|
||||
|
||||
private let expandedGridItemContainer: UIView
|
||||
private var expandedGridItemView: GridItem?
|
||||
private var expandedControlsView: ComponentView<Empty>?
|
||||
private var expandedThumbnailsView: ComponentView<Empty>?
|
||||
|
||||
private var listItemViews: [EnginePeer.Id: ComponentView<Empty>] = [:]
|
||||
private var listItemViews: [EnginePeer.Id: ListItem] = [:]
|
||||
private let listItemViewContainer: UIView
|
||||
private let listItemsBackround = ComponentView<Empty>()
|
||||
private let listItemViewSeparatorContainer: SimpleLayer
|
||||
private let listItemsBackground = ComponentView<Empty>()
|
||||
|
||||
private var itemLayout: ItemLayout?
|
||||
|
||||
@ -377,6 +390,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
|
||||
self.listItemViewContainer = UIView()
|
||||
self.listItemViewContainer.clipsToBounds = true
|
||||
self.listItemViewSeparatorContainer = SimpleLayer()
|
||||
|
||||
self.expandedGridItemContainer = UIView()
|
||||
self.expandedGridItemContainer.clipsToBounds = true
|
||||
@ -445,6 +459,15 @@ final class VideoChatParticipantsComponent: Component {
|
||||
let gridIsEmpty = self.gridParticipants.isEmpty
|
||||
self.appliedGridIsEmpty = gridIsEmpty
|
||||
|
||||
var previousExpandedItemId: VideoParticipantKey?
|
||||
for (key, item) in self.gridItemViews {
|
||||
if item.view.view?.superview == self.expandedGridItemContainer {
|
||||
previousExpandedItemId = key
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
let previousExpandedGridItemContainerFrame = self.expandedGridItemContainer.frame
|
||||
var expandedGridItemContainerFrame: CGRect
|
||||
if component.expandedVideoState != nil {
|
||||
expandedGridItemContainerFrame = itemLayout.expandedGrid.itemContainerFrame()
|
||||
@ -498,13 +521,29 @@ final class VideoChatParticipantsComponent: Component {
|
||||
self.gridItemViews[videoParticipantKey] = itemView
|
||||
}
|
||||
|
||||
var expandedItemState: VideoChatParticipantVideoComponent.ExpandedState?
|
||||
var isItemExpanded = false
|
||||
if let expandedVideoState = component.expandedVideoState, expandedVideoState.mainParticipant == videoParticipantKey {
|
||||
expandedItemState = VideoChatParticipantVideoComponent.ExpandedState(isPinned: expandedVideoState.isMainParticipantPinned)
|
||||
isItemExpanded = true
|
||||
}
|
||||
|
||||
var suppressItemExpansionCollapseAnimation = false
|
||||
if isItemExpanded {
|
||||
if let previousExpandedItemId, previousExpandedItemId != videoParticipantKey {
|
||||
suppressItemExpansionCollapseAnimation = true
|
||||
}
|
||||
} else if component.expandedVideoState != nil {
|
||||
if let previousExpandedItemId, previousExpandedItemId == videoParticipantKey {
|
||||
suppressItemExpansionCollapseAnimation = true
|
||||
}
|
||||
}
|
||||
var resultingItemTransition = commonGridItemTransition
|
||||
if suppressItemExpansionCollapseAnimation {
|
||||
itemTransition = itemTransition.withAnimation(.none)
|
||||
resultingItemTransition = commonGridItemTransition.withAnimation(.none)
|
||||
}
|
||||
|
||||
let itemFrame: CGRect
|
||||
if expandedItemState != nil {
|
||||
if isItemExpanded {
|
||||
itemFrame = CGRect(origin: CGPoint(), size: itemLayout.expandedGrid.itemContainerFrame().size)
|
||||
} else {
|
||||
itemFrame = itemLayout.gridItemFrame(at: index)
|
||||
@ -516,7 +555,8 @@ final class VideoChatParticipantsComponent: Component {
|
||||
call: component.call,
|
||||
participant: videoParticipant.participant,
|
||||
isPresentation: videoParticipant.isPresentation,
|
||||
expandedState: expandedItemState,
|
||||
isExpanded: isItemExpanded,
|
||||
bottomInset: isItemExpanded ? 96.0 : 0.0,
|
||||
action: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
@ -533,26 +573,36 @@ final class VideoChatParticipantsComponent: Component {
|
||||
)
|
||||
if let itemComponentView = itemView.view.view {
|
||||
if itemComponentView.superview == nil {
|
||||
if expandedItemState != nil {
|
||||
self.expandedGridItemContainer.addSubview(itemComponentView)
|
||||
if isItemExpanded {
|
||||
if let expandedThumbnailsView = self.expandedThumbnailsView?.view {
|
||||
self.expandedGridItemContainer.insertSubview(itemComponentView, belowSubview: expandedThumbnailsView)
|
||||
} else {
|
||||
self.expandedGridItemContainer.addSubview(itemComponentView)
|
||||
}
|
||||
} else {
|
||||
self.gridItemViewContainer.addSubview(itemComponentView)
|
||||
}
|
||||
|
||||
itemComponentView.frame = itemFrame
|
||||
|
||||
if !commonGridItemTransition.animation.isImmediate {
|
||||
commonGridItemTransition.animateScale(view: itemComponentView, from: 0.001, to: 1.0)
|
||||
if !resultingItemTransition.animation.isImmediate {
|
||||
resultingItemTransition.animateScale(view: itemComponentView, from: 0.001, to: 1.0)
|
||||
}
|
||||
if !transition.animation.isImmediate {
|
||||
if !resultingItemTransition.animation.isImmediate {
|
||||
itemComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.1)
|
||||
}
|
||||
} else if expandedItemState != nil && itemComponentView.superview != self.expandedGridItemContainer {
|
||||
} else if isItemExpanded && itemComponentView.superview != self.expandedGridItemContainer {
|
||||
let fromFrame = itemComponentView.convert(itemComponentView.bounds, to: self.expandedGridItemContainer)
|
||||
itemComponentView.center = fromFrame.center
|
||||
self.expandedGridItemContainer.addSubview(itemComponentView)
|
||||
} else if expandedItemState == nil && itemComponentView.superview != self.gridItemViewContainer {
|
||||
if !itemView.isCollapsing {
|
||||
if let expandedThumbnailsView = self.expandedThumbnailsView?.view {
|
||||
self.expandedGridItemContainer.insertSubview(itemComponentView, belowSubview: expandedThumbnailsView)
|
||||
} else {
|
||||
self.expandedGridItemContainer.addSubview(itemComponentView)
|
||||
}
|
||||
} else if !isItemExpanded && itemComponentView.superview != self.gridItemViewContainer {
|
||||
if suppressItemExpansionCollapseAnimation {
|
||||
self.gridItemViewContainer.addSubview(itemComponentView)
|
||||
} else if !itemView.isCollapsing {
|
||||
itemView.isCollapsing = true
|
||||
let targetLocalItemFrame = itemLayout.gridItemFrame(at: index)
|
||||
var targetItemFrame = self.gridItemViewContainer.convert(targetLocalItemFrame, to: self)
|
||||
@ -571,8 +621,8 @@ final class VideoChatParticipantsComponent: Component {
|
||||
}
|
||||
}
|
||||
if !itemView.isCollapsing {
|
||||
commonGridItemTransition.setPosition(view: itemComponentView, position: itemFrame.center)
|
||||
commonGridItemTransition.setBounds(view: itemComponentView, bounds: CGRect(origin: CGPoint(), size: itemFrame.size))
|
||||
resultingItemTransition.setPosition(view: itemComponentView, position: itemFrame.center)
|
||||
resultingItemTransition.setBounds(view: itemComponentView, bounds: CGRect(origin: CGPoint(), size: itemFrame.size))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -608,12 +658,12 @@ final class VideoChatParticipantsComponent: Component {
|
||||
validListItemIds.append(participant.peer.id)
|
||||
|
||||
var itemTransition = transition
|
||||
let itemView: ComponentView<Empty>
|
||||
let itemView: ListItem
|
||||
if let current = self.listItemViews[participant.peer.id] {
|
||||
itemView = current
|
||||
} else {
|
||||
itemTransition = itemTransition.withAnimation(.none)
|
||||
itemView = ComponentView()
|
||||
itemView = ListItem()
|
||||
self.listItemViews[participant.peer.id] = itemView
|
||||
}
|
||||
|
||||
@ -623,10 +673,10 @@ final class VideoChatParticipantsComponent: Component {
|
||||
if participant.peer.id == component.call.accountContext.account.peerId {
|
||||
subtitle = PeerListItemComponent.Subtitle(text: "this is you", color: .accent)
|
||||
} else {
|
||||
subtitle = PeerListItemComponent.Subtitle(text: "listening", color: .neutral)
|
||||
subtitle = PeerListItemComponent.Subtitle(text: participant.about ?? "listening", color: .neutral)
|
||||
}
|
||||
|
||||
let _ = itemView.update(
|
||||
let _ = itemView.view.update(
|
||||
transition: itemTransition,
|
||||
component: AnyComponent(PeerListItemComponent(
|
||||
context: component.call.accountContext,
|
||||
@ -640,7 +690,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
subtitleAccessory: .none,
|
||||
presence: nil,
|
||||
selectionState: .none,
|
||||
hasNext: true,
|
||||
hasNext: false,
|
||||
action: { [weak self] peer, _, _ in
|
||||
guard let self else {
|
||||
return
|
||||
@ -652,18 +702,27 @@ final class VideoChatParticipantsComponent: Component {
|
||||
environment: {},
|
||||
containerSize: itemFrame.size
|
||||
)
|
||||
if let itemComponentView = itemView.view {
|
||||
let itemSeparatorFrame = CGRect(origin: CGPoint(x: itemFrame.minX + 63.0, y: itemFrame.maxY - UIScreenPixel), size: CGSize(width: itemFrame.width - 63.0, height: UIScreenPixel))
|
||||
if let itemComponentView = itemView.view.view {
|
||||
if itemComponentView.superview == nil {
|
||||
itemComponentView.clipsToBounds = true
|
||||
|
||||
itemView.separatorLayer.backgroundColor = component.theme.list.itemBlocksSeparatorColor.blitOver(UIColor(white: 0.1, alpha: 1.0), alpha: 1.0).cgColor
|
||||
|
||||
self.listItemViewContainer.addSubview(itemComponentView)
|
||||
self.listItemViewSeparatorContainer.addSublayer(itemView.separatorLayer)
|
||||
|
||||
if !transition.animation.isImmediate {
|
||||
itemComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||
itemComponentView.frame = CGRect(origin: itemFrame.origin, size: CGSize(width: itemFrame.width, height: 0.0))
|
||||
|
||||
var startingItemSeparatorFrame = itemSeparatorFrame
|
||||
startingItemSeparatorFrame.origin.y = itemFrame.minY - UIScreenPixel
|
||||
itemView.separatorLayer.frame = startingItemSeparatorFrame
|
||||
}
|
||||
}
|
||||
transition.setFrame(view: itemComponentView, frame: itemFrame)
|
||||
transition.setFrame(layer: itemView.separatorLayer, frame: itemSeparatorFrame)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -673,13 +732,24 @@ final class VideoChatParticipantsComponent: Component {
|
||||
if !validListItemIds.contains(itemId) {
|
||||
removedListItemIds.append(itemId)
|
||||
|
||||
if let itemComponentView = itemView.view {
|
||||
if let itemComponentView = itemView.view.view {
|
||||
let itemSeparatorLayer = itemView.separatorLayer
|
||||
|
||||
if !transition.animation.isImmediate {
|
||||
var itemFrame = itemComponentView.frame
|
||||
itemFrame.size.height = 0.0
|
||||
transition.setFrame(view: itemComponentView, frame: itemFrame)
|
||||
var itemSeparatorFrame = itemSeparatorLayer.frame
|
||||
itemSeparatorFrame.origin.y = itemFrame.minY - UIScreenPixel
|
||||
transition.setFrame(layer: itemSeparatorLayer, frame: itemSeparatorFrame, completion: { [weak itemSeparatorLayer] _ in
|
||||
itemSeparatorLayer?.removeFromSuperlayer()
|
||||
})
|
||||
itemComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak itemComponentView] _ in
|
||||
itemComponentView?.removeFromSuperview()
|
||||
})
|
||||
} else {
|
||||
itemComponentView.removeFromSuperview()
|
||||
itemSeparatorLayer.removeFromSuperlayer()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -707,8 +777,161 @@ final class VideoChatParticipantsComponent: Component {
|
||||
transition.setPosition(view: self.gridItemViewContainer, position: CGPoint(x: itemLayout.gridItemContainerFrame().midX, y: itemLayout.gridItemContainerFrame().minY))
|
||||
transition.setBounds(view: self.gridItemViewContainer, bounds: CGRect(origin: CGPoint(), size: itemLayout.gridItemContainerFrame().size))
|
||||
transition.setFrame(view: self.listItemViewContainer, frame: itemLayout.listItemContainerFrame())
|
||||
transition.setFrame(layer: self.listItemViewSeparatorContainer, frame: CGRect(origin: CGPoint(), size: itemLayout.listItemContainerFrame().size))
|
||||
|
||||
transition.setFrame(view: self.expandedGridItemContainer, frame: expandedGridItemContainerFrame)
|
||||
|
||||
if let expandedVideoState = component.expandedVideoState {
|
||||
var thumbnailParticipants: [VideoChatExpandedParticipantThumbnailsComponent.Participant] = []
|
||||
for participant in self.gridParticipants {
|
||||
thumbnailParticipants.append(VideoChatExpandedParticipantThumbnailsComponent.Participant(
|
||||
participant: participant.participant,
|
||||
isPresentation: participant.isPresentation
|
||||
))
|
||||
}
|
||||
/*for participant in self.listParticipants {
|
||||
thumbnailParticipants.append(VideoChatExpandedParticipantThumbnailsComponent.Participant(
|
||||
participant: participant,
|
||||
isPresentation: false
|
||||
))
|
||||
}*/
|
||||
|
||||
var expandedThumbnailsTransition = transition
|
||||
let expandedThumbnailsView: ComponentView<Empty>
|
||||
if let current = self.expandedThumbnailsView {
|
||||
expandedThumbnailsView = current
|
||||
} else {
|
||||
expandedThumbnailsTransition = expandedThumbnailsTransition.withAnimation(.none)
|
||||
expandedThumbnailsView = ComponentView()
|
||||
self.expandedThumbnailsView = expandedThumbnailsView
|
||||
}
|
||||
let expandedThumbnailsSize = expandedThumbnailsView.update(
|
||||
transition: expandedThumbnailsTransition,
|
||||
component: AnyComponent(VideoChatExpandedParticipantThumbnailsComponent(
|
||||
call: component.call,
|
||||
theme: component.theme,
|
||||
participants: thumbnailParticipants,
|
||||
selectedParticipant: component.expandedVideoState.flatMap { expandedVideoState in
|
||||
return VideoChatExpandedParticipantThumbnailsComponent.Participant.Key(id: expandedVideoState.mainParticipant.id, isPresentation: expandedVideoState.mainParticipant.isPresentation)
|
||||
},
|
||||
updateSelectedParticipant: { [weak self] key in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.updateMainParticipant(VideoParticipantKey(id: key.id, isPresentation: key.isPresentation))
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: itemLayout.expandedGrid.itemContainerFrame().size
|
||||
)
|
||||
let expandedThumbnailsFrame = CGRect(origin: CGPoint(x: 0.0, y: expandedGridItemContainerFrame.height - expandedThumbnailsSize.height), size: expandedThumbnailsSize)
|
||||
if let expandedThumbnailsComponentView = expandedThumbnailsView.view {
|
||||
if expandedThumbnailsComponentView.superview == nil {
|
||||
self.expandedGridItemContainer.addSubview(expandedThumbnailsComponentView)
|
||||
|
||||
let fromReferenceFrame: CGRect
|
||||
if let index = self.gridParticipants.firstIndex(where: { $0.participant.peer.id == expandedVideoState.mainParticipant.id && $0.isPresentation == expandedVideoState.mainParticipant.isPresentation }) {
|
||||
fromReferenceFrame = self.gridItemViewContainer.convert(itemLayout.gridItemFrame(at: index), to: self.expandedGridItemContainer)
|
||||
} else {
|
||||
fromReferenceFrame = previousExpandedGridItemContainerFrame
|
||||
}
|
||||
|
||||
expandedThumbnailsComponentView.frame = CGRect(origin: CGPoint(x: fromReferenceFrame.minX - previousExpandedGridItemContainerFrame.minX, y: fromReferenceFrame.height - expandedThumbnailsSize.height), size: expandedThumbnailsFrame.size)
|
||||
|
||||
if !transition.animation.isImmediate {
|
||||
expandedThumbnailsComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||
}
|
||||
}
|
||||
transition.setFrame(view: expandedThumbnailsComponentView, frame: expandedThumbnailsFrame)
|
||||
}
|
||||
|
||||
var expandedControlsTransition = transition
|
||||
let expandedControlsView: ComponentView<Empty>
|
||||
if let current = self.expandedControlsView {
|
||||
expandedControlsView = current
|
||||
} else {
|
||||
expandedControlsTransition = expandedControlsTransition.withAnimation(.none)
|
||||
expandedControlsView = ComponentView()
|
||||
self.expandedControlsView = expandedControlsView
|
||||
}
|
||||
let expandedControlsSize = expandedControlsView.update(
|
||||
transition: expandedControlsTransition,
|
||||
component: AnyComponent(VideoChatExpandedControlsComponent(
|
||||
theme: component.theme,
|
||||
strings: component.strings,
|
||||
backAction: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.updateMainParticipant(nil)
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: itemLayout.expandedGrid.itemContainerFrame().size
|
||||
)
|
||||
let expandedControlsFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: expandedControlsSize)
|
||||
if let expandedControlsComponentView = expandedControlsView.view {
|
||||
if expandedControlsComponentView.superview == nil {
|
||||
self.expandedGridItemContainer.addSubview(expandedControlsComponentView)
|
||||
|
||||
let fromReferenceFrame: CGRect
|
||||
if let index = self.gridParticipants.firstIndex(where: { $0.participant.peer.id == expandedVideoState.mainParticipant.id && $0.isPresentation == expandedVideoState.mainParticipant.isPresentation }) {
|
||||
fromReferenceFrame = self.gridItemViewContainer.convert(itemLayout.gridItemFrame(at: index), to: self.expandedGridItemContainer)
|
||||
} else {
|
||||
fromReferenceFrame = previousExpandedGridItemContainerFrame
|
||||
}
|
||||
|
||||
expandedControlsComponentView.frame = CGRect(origin: CGPoint(x: fromReferenceFrame.minX - previousExpandedGridItemContainerFrame.minX, y: fromReferenceFrame.minY - previousExpandedGridItemContainerFrame.minY), size: expandedControlsFrame.size)
|
||||
|
||||
if !transition.animation.isImmediate {
|
||||
expandedControlsComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||
}
|
||||
}
|
||||
transition.setFrame(view: expandedControlsComponentView, frame: expandedControlsFrame)
|
||||
}
|
||||
} else {
|
||||
if let expandedThumbnailsView = self.expandedThumbnailsView {
|
||||
self.expandedThumbnailsView = nil
|
||||
|
||||
if transition.containedViewLayoutTransition.isAnimated, let expandedThumbnailsComponentView = expandedThumbnailsView.view {
|
||||
if let collapsingItemView = self.gridItemViews.values.first(where: { $0.isCollapsing }), let index = self.gridParticipants.firstIndex(where: { $0.participant.peer.id == collapsingItemView.key.id && $0.isPresentation == collapsingItemView.key.isPresentation }) {
|
||||
let targetLocalItemFrame = itemLayout.gridItemFrame(at: index)
|
||||
var targetItemFrame = self.gridItemViewContainer.convert(targetLocalItemFrame, to: self)
|
||||
targetItemFrame.origin.y -= expandedGridItemContainerFrame.minY
|
||||
targetItemFrame.origin.x -= expandedGridItemContainerFrame.minX
|
||||
|
||||
let targetThumbnailsFrame = CGRect(origin: CGPoint(x: targetItemFrame.minX, y: targetItemFrame.maxY - expandedThumbnailsComponentView.bounds.height), size: expandedThumbnailsComponentView.bounds.size)
|
||||
transition.setFrame(view: expandedThumbnailsComponentView, frame: targetThumbnailsFrame)
|
||||
}
|
||||
expandedThumbnailsComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.12, removeOnCompletion: false, completion: { [weak expandedThumbnailsComponentView] _ in
|
||||
expandedThumbnailsComponentView?.removeFromSuperview()
|
||||
})
|
||||
} else {
|
||||
expandedThumbnailsView.view?.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
|
||||
if let expandedControlsView = self.expandedControlsView {
|
||||
self.expandedControlsView = nil
|
||||
|
||||
if transition.containedViewLayoutTransition.isAnimated, let expandedControlsComponentView = expandedControlsView.view {
|
||||
if let collapsingItemView = self.gridItemViews.values.first(where: { $0.isCollapsing }), let index = self.gridParticipants.firstIndex(where: { $0.participant.peer.id == collapsingItemView.key.id && $0.isPresentation == collapsingItemView.key.isPresentation }) {
|
||||
let targetLocalItemFrame = itemLayout.gridItemFrame(at: index)
|
||||
var targetItemFrame = self.gridItemViewContainer.convert(targetLocalItemFrame, to: self)
|
||||
targetItemFrame.origin.y -= expandedGridItemContainerFrame.minY
|
||||
targetItemFrame.origin.x -= expandedGridItemContainerFrame.minX
|
||||
|
||||
let targetThumbnailsFrame = CGRect(origin: CGPoint(x: targetItemFrame.minX, y: targetItemFrame.minY), size: expandedControlsComponentView.bounds.size)
|
||||
transition.setFrame(view: expandedControlsComponentView, frame: targetThumbnailsFrame)
|
||||
}
|
||||
expandedControlsComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.12, removeOnCompletion: false, completion: { [weak expandedControlsComponentView] _ in
|
||||
expandedControlsComponentView?.removeFromSuperview()
|
||||
})
|
||||
} else {
|
||||
expandedControlsView.view?.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func update(component: VideoChatParticipantsComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
@ -798,21 +1021,22 @@ final class VideoChatParticipantsComponent: Component {
|
||||
)
|
||||
self.itemLayout = itemLayout
|
||||
|
||||
let listItemsBackroundSize = self.listItemsBackround.update(
|
||||
let listItemsBackgroundSize = self.listItemsBackground.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(RoundedRectangle(
|
||||
color: UIColor(white: 1.0, alpha: 0.1),
|
||||
color: UIColor(white: 0.1, alpha: 1.0),
|
||||
cornerRadius: 10.0
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width - itemLayout.sideInset * 2.0, height: itemLayout.list.contentHeight())
|
||||
)
|
||||
let listItemsBackroundFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: listItemsBackroundSize)
|
||||
if let listItemsBackroundView = self.listItemsBackround.view {
|
||||
if listItemsBackroundView.superview == nil {
|
||||
self.listItemViewContainer.addSubview(listItemsBackroundView)
|
||||
let listItemsBackgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: listItemsBackgroundSize)
|
||||
if let listItemsBackgroundView = self.listItemsBackground.view {
|
||||
if listItemsBackgroundView.superview == nil {
|
||||
self.listItemViewContainer.addSubview(listItemsBackgroundView)
|
||||
self.listItemViewContainer.layer.addSublayer(self.listItemViewSeparatorContainer)
|
||||
}
|
||||
transition.setFrame(view: listItemsBackroundView, frame: listItemsBackroundFrame)
|
||||
transition.setFrame(view: listItemsBackgroundView, frame: listItemsBackgroundFrame)
|
||||
}
|
||||
|
||||
var requestedVideo: [PresentationGroupCallRequestedVideo] = []
|
||||
|
@ -70,6 +70,8 @@ private final class VideoChatScreenComponent: Component {
|
||||
private var callState: PresentationGroupCallState?
|
||||
private var stateDisposable: Disposable?
|
||||
|
||||
private var isPushToTalkActive: Bool = false
|
||||
|
||||
private var members: PresentationGroupCallMembers?
|
||||
private var membersDisposable: Disposable?
|
||||
|
||||
@ -137,6 +139,9 @@ private final class VideoChatScreenComponent: Component {
|
||||
if abs(panGestureState.offsetFraction) > 0.6 || abs(velocity.y) >= 100.0 {
|
||||
self.panGestureState = PanGestureState(offsetFraction: panGestureState.offsetFraction < 0.0 ? -1.0 : 1.0)
|
||||
self.notifyDismissedInteractivelyOnPanGestureApply = true
|
||||
if let controller = self.environment?.controller() as? VideoChatScreenV2Impl {
|
||||
controller.notifyDismissed()
|
||||
}
|
||||
}
|
||||
|
||||
self.state?.updated(transition: .spring(duration: 0.4))
|
||||
@ -277,6 +282,27 @@ private final class VideoChatScreenComponent: Component {
|
||||
if self.members != members {
|
||||
self.members = members
|
||||
|
||||
if let expandedParticipantsVideoState = self.expandedParticipantsVideoState {
|
||||
if let _ = members?.participants.first(where: { participant in
|
||||
if participant.peer.id == expandedParticipantsVideoState.mainParticipant.id {
|
||||
if expandedParticipantsVideoState.mainParticipant.isPresentation {
|
||||
if participant.presentationDescription == nil {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
if participant.videoDescription == nil {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}) {
|
||||
} else {
|
||||
self.expandedParticipantsVideoState = nil
|
||||
}
|
||||
}
|
||||
|
||||
if !self.isUpdating {
|
||||
self.state?.updated(transition: .spring(duration: 0.4))
|
||||
}
|
||||
@ -337,7 +363,13 @@ private final class VideoChatScreenComponent: Component {
|
||||
self.notifyDismissedInteractivelyOnPanGestureApply = false
|
||||
|
||||
if let controller = self.environment?.controller() as? VideoChatScreenV2Impl {
|
||||
controller.superDismiss()
|
||||
if self.isUpdating {
|
||||
DispatchQueue.main.async { [weak controller] in
|
||||
controller?.superDismiss()
|
||||
}
|
||||
} else {
|
||||
controller.superDismiss()
|
||||
}
|
||||
}
|
||||
}
|
||||
if let completionOnPanGestureApply = self.completionOnPanGestureApply {
|
||||
@ -420,11 +452,17 @@ private final class VideoChatScreenComponent: Component {
|
||||
transition.setFrame(view: navigationRightButtonView, frame: navigationRightButtonFrame)
|
||||
}
|
||||
|
||||
let idleTitleStatusText: String
|
||||
if let callState = self.callState, callState.networkState == .connected, let members = self.members {
|
||||
idleTitleStatusText = environment.strings.VoiceChat_Panel_Members(Int32(max(1, members.totalCount)))
|
||||
} else {
|
||||
idleTitleStatusText = "connecting..."
|
||||
}
|
||||
let titleSize = self.title.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(VideoChatTitleComponent(
|
||||
title: self.peer?.debugDisplayTitle ?? " ",
|
||||
status: .idle(count: self.members?.totalCount ?? 1),
|
||||
status: idleTitleStatusText,
|
||||
strings: environment.strings
|
||||
)),
|
||||
environment: {},
|
||||
@ -518,8 +556,13 @@ private final class VideoChatScreenComponent: Component {
|
||||
actionButtonMicrophoneState = .connecting
|
||||
case .connected:
|
||||
if let _ = callState.muteState {
|
||||
micButtonContent = .muted
|
||||
actionButtonMicrophoneState = .muted
|
||||
if self.isPushToTalkActive {
|
||||
micButtonContent = .unmuted
|
||||
actionButtonMicrophoneState = .unmuted
|
||||
} else {
|
||||
micButtonContent = .muted
|
||||
actionButtonMicrophoneState = .muted
|
||||
}
|
||||
} else {
|
||||
micButtonContent = .unmuted
|
||||
actionButtonMicrophoneState = .unmuted
|
||||
@ -532,29 +575,43 @@ private final class VideoChatScreenComponent: Component {
|
||||
|
||||
let _ = self.microphoneButton.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(PlainButtonComponent(
|
||||
content: AnyComponent(VideoChatMicButtonComponent(
|
||||
content: micButtonContent,
|
||||
isCollapsed: self.expandedParticipantsVideoState != nil
|
||||
)),
|
||||
effectAlignment: .center,
|
||||
action: { [weak self] in
|
||||
component: AnyComponent(VideoChatMicButtonComponent(
|
||||
content: micButtonContent,
|
||||
isCollapsed: self.expandedParticipantsVideoState != nil,
|
||||
updateUnmutedStateIsPushToTalk: { [weak self] unmutedStateIsPushToTalk in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
guard let callState = self.callState else {
|
||||
return
|
||||
}
|
||||
if let muteState = callState.muteState {
|
||||
if muteState.canUnmute {
|
||||
component.call.setIsMuted(action: .unmuted)
|
||||
|
||||
if let unmutedStateIsPushToTalk {
|
||||
if unmutedStateIsPushToTalk {
|
||||
if let muteState = callState.muteState {
|
||||
if muteState.canUnmute {
|
||||
self.isPushToTalkActive = true
|
||||
component.call.setIsMuted(action: .muted(isPushToTalkActive: true))
|
||||
} else {
|
||||
self.isPushToTalkActive = false
|
||||
}
|
||||
} else {
|
||||
self.isPushToTalkActive = true
|
||||
component.call.setIsMuted(action: .muted(isPushToTalkActive: true))
|
||||
}
|
||||
} else {
|
||||
if let muteState = callState.muteState {
|
||||
if muteState.canUnmute {
|
||||
component.call.setIsMuted(action: .unmuted)
|
||||
}
|
||||
}
|
||||
self.isPushToTalkActive = false
|
||||
}
|
||||
self.state?.updated(transition: .spring(duration: 0.5))
|
||||
} else {
|
||||
component.call.setIsMuted(action: .muted(isPushToTalkActive: false))
|
||||
}
|
||||
},
|
||||
animateAlpha: false,
|
||||
animateScale: false
|
||||
}
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: microphoneButtonDiameter, height: microphoneButtonDiameter)
|
||||
@ -737,11 +794,16 @@ final class VideoChatScreenV2Impl: ViewControllerComponentContainer, VoiceChatCo
|
||||
self.idleTimerExtensionDisposable = nil
|
||||
|
||||
self.didAppearOnce = false
|
||||
self.notifyDismissed()
|
||||
}
|
||||
|
||||
func notifyDismissed() {
|
||||
if !self.isDismissed {
|
||||
self.isDismissed = true
|
||||
DispatchQueue.main.async {
|
||||
self.onViewDidDisappear?()
|
||||
}
|
||||
}
|
||||
|
||||
self.onViewDidDisappear?()
|
||||
}
|
||||
|
||||
public func dismiss(closing: Bool, manual: Bool) {
|
||||
@ -750,6 +812,8 @@ final class VideoChatScreenV2Impl: ViewControllerComponentContainer, VoiceChatCo
|
||||
|
||||
override public func dismiss(completion: (() -> Void)? = nil) {
|
||||
if !self.isAnimatingDismiss {
|
||||
self.notifyDismissed()
|
||||
|
||||
if let componentView = self.node.hostView.componentView as? VideoChatScreenComponent.View {
|
||||
self.isAnimatingDismiss = true
|
||||
componentView.animateOut(completion: { [weak self] in
|
||||
|
@ -6,32 +6,13 @@ import MultilineTextComponent
|
||||
import TelegramPresentationData
|
||||
|
||||
final class VideoChatTitleComponent: Component {
|
||||
enum Status: Equatable {
|
||||
enum Key {
|
||||
case idle
|
||||
case speaking
|
||||
}
|
||||
|
||||
case idle(count: Int)
|
||||
case speaking(titles: [String])
|
||||
|
||||
var key: Key {
|
||||
switch self {
|
||||
case .idle:
|
||||
return .idle
|
||||
case .speaking:
|
||||
return .speaking
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let title: String
|
||||
let status: Status
|
||||
let status: String
|
||||
let strings: PresentationStrings
|
||||
|
||||
init(
|
||||
title: String,
|
||||
status: Status,
|
||||
status: String,
|
||||
strings: PresentationStrings
|
||||
) {
|
||||
self.title = title
|
||||
@ -73,7 +54,6 @@ final class VideoChatTitleComponent: Component {
|
||||
self.isUpdating = false
|
||||
}
|
||||
|
||||
let previousComponent = self.component
|
||||
self.component = component
|
||||
|
||||
let spacing: CGFloat = 1.0
|
||||
@ -87,18 +67,6 @@ final class VideoChatTitleComponent: Component {
|
||||
containerSize: CGSize(width: availableSize.width, height: 100.0)
|
||||
)
|
||||
|
||||
if previousComponent?.status.key != component.status.key {
|
||||
if let status = self.status {
|
||||
self.status = nil
|
||||
if let statusView = status.view {
|
||||
transition.setAlpha(view: statusView, alpha: 0.0, completion: { [weak statusView] _ in
|
||||
statusView?.removeFromSuperview()
|
||||
})
|
||||
transition.setPosition(view: statusView, position: statusView.center.offsetBy(dx: 0.0, dy: -10.0))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let status: ComponentView<Empty>
|
||||
if let current = self.status {
|
||||
status = current
|
||||
@ -107,16 +75,9 @@ final class VideoChatTitleComponent: Component {
|
||||
self.status = status
|
||||
}
|
||||
let statusComponent: AnyComponent<Empty>
|
||||
switch component.status {
|
||||
case let .idle(count):
|
||||
statusComponent = AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: component.strings.VoiceChat_Panel_Members(Int32(count)), font: Font.regular(13.0), textColor: UIColor(white: 1.0, alpha: 0.5)))
|
||||
))
|
||||
case let .speaking(titles):
|
||||
statusComponent = AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: titles.joined(separator: ", "), font: Font.regular(13.0), textColor: UIColor(rgb: 0x34c759)))
|
||||
))
|
||||
}
|
||||
statusComponent = AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: component.status, font: Font.regular(13.0), textColor: UIColor(white: 1.0, alpha: 0.5)))
|
||||
))
|
||||
|
||||
let statusSize = status.update(
|
||||
transition: .immediate,
|
||||
|
@ -11,7 +11,7 @@ swift_library(
|
||||
],
|
||||
deps = [
|
||||
"//submodules/ComponentFlow",
|
||||
"//submodules/Components/BundleIconComponent",
|
||||
"//submodules/Components/MultilineTextComponent",
|
||||
"//submodules/Display",
|
||||
],
|
||||
visibility = [
|
||||
|
@ -1,8 +1,8 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import ComponentFlow
|
||||
import BundleIconComponent
|
||||
import Display
|
||||
import MultilineTextComponent
|
||||
|
||||
public final class BackButtonComponent: Component {
|
||||
public let title: String
|
||||
@ -30,22 +30,31 @@ public final class BackButtonComponent: Component {
|
||||
}
|
||||
|
||||
public final class View: HighlightTrackingButton {
|
||||
private let arrow = ComponentView<Empty>()
|
||||
private let arrowView: UIImageView
|
||||
private let title = ComponentView<Empty>()
|
||||
|
||||
private var component: BackButtonComponent?
|
||||
|
||||
public override init(frame: CGRect) {
|
||||
self.arrowView = UIImageView()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.addSubview(self.arrowView)
|
||||
|
||||
self.highligthedChanged = { [weak self] highlighted in
|
||||
if let self {
|
||||
let transition: ComponentTransition = highlighted ? .immediate : .easeInOut(duration: 0.2)
|
||||
if highlighted {
|
||||
self.layer.removeAnimation(forKey: "opacity")
|
||||
self.alpha = 0.65
|
||||
transition.setAlpha(view: self.arrowView, alpha: 0.65)
|
||||
if let titleView = self.title.view {
|
||||
transition.setAlpha(view: titleView, alpha: 0.65)
|
||||
}
|
||||
} else {
|
||||
self.alpha = 1.0
|
||||
self.layer.animateAlpha(from: 0.65, to: 1.0, duration: 0.2)
|
||||
transition.setAlpha(view: self.arrowView, alpha: 1.0)
|
||||
if let titleView = self.title.view {
|
||||
transition.setAlpha(view: titleView, alpha: 1.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -64,22 +73,44 @@ public final class BackButtonComponent: Component {
|
||||
}
|
||||
|
||||
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
return super.hitTest(point, with: event)
|
||||
if self.isHidden || self.alpha.isZero || self.isUserInteractionEnabled == false {
|
||||
return nil
|
||||
}
|
||||
|
||||
if self.bounds.insetBy(dx: -8.0, dy: -8.0).contains(point) {
|
||||
return self
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func update(component: BackButtonComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||
let sideInset: CGFloat = 4.0
|
||||
self.component = component
|
||||
|
||||
if self.arrowView.image == nil {
|
||||
self.arrowView.image = NavigationBar.backArrowImage(color: .white)?.withRenderingMode(.alwaysTemplate)
|
||||
}
|
||||
self.arrowView.tintColor = component.color
|
||||
|
||||
let titleSize = self.title.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: component.title, font: Font.regular(17.0), color: component.color)),
|
||||
component: AnyComponent(MultilineTextComponent(
|
||||
text: .plain(NSAttributedString(string: component.title, font: Font.regular(17.0), textColor: component.color))
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width - 4.0, height: availableSize.height)
|
||||
)
|
||||
|
||||
let arrowInset: CGFloat = 15.0
|
||||
|
||||
let size = CGSize(width: sideInset * 2.0 + titleSize.width, height: availableSize.height)
|
||||
let size = CGSize(width: arrowInset + titleSize.width, height: titleSize.height)
|
||||
|
||||
if let arrowImage = self.arrowView.image {
|
||||
let arrowFrame = CGRect(origin: CGPoint(x: -4.0, y: floor((size.height - arrowImage.size.height) * 0.5)), size: arrowImage.size)
|
||||
transition.setFrame(view: self.arrowView, frame: arrowFrame)
|
||||
}
|
||||
|
||||
let titleFrame = titleSize.centered(in: CGRect(origin: CGPoint(), size: size))
|
||||
let titleFrame = CGRect(origin: CGPoint(x: arrowInset, y: floor((size.height - titleSize.height) * 0.5)), size: titleSize)
|
||||
if let titleView = self.title.view {
|
||||
if titleView.superview == nil {
|
||||
titleView.layer.anchorPoint = CGPoint()
|
||||
|
@ -906,13 +906,14 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
||||
strongSelf.groupCallController = groupCallController
|
||||
navigationController.pushViewController(groupCallController)
|
||||
} else {
|
||||
strongSelf.hasGroupCallOnScreenPromise.set(true)
|
||||
|
||||
let _ = (makeVoiceChatControllerInitialData(sharedContext: strongSelf, accountContext: call.accountContext, call: call)
|
||||
|> deliverOnMainQueue).start(next: { [weak strongSelf, weak navigationController] initialData in
|
||||
guard let strongSelf, let navigationController else {
|
||||
return
|
||||
}
|
||||
|
||||
strongSelf.hasGroupCallOnScreenPromise.set(true)
|
||||
let groupCallController = makeVoiceChatController(sharedContext: strongSelf, accountContext: call.accountContext, call: call, initialData: initialData)
|
||||
groupCallController.onViewDidAppear = { [weak strongSelf] in
|
||||
if let strongSelf {
|
||||
|
@ -55,6 +55,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
public var crashOnMemoryPressure: Bool
|
||||
public var dustEffect: Bool
|
||||
public var callV2: Bool
|
||||
public var experimentalCallMute: Bool
|
||||
public var allowWebViewInspection: Bool
|
||||
public var disableReloginTokens: Bool
|
||||
public var liveStreamV2: Bool
|
||||
@ -91,6 +92,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
crashOnMemoryPressure: false,
|
||||
dustEffect: false,
|
||||
callV2: false,
|
||||
experimentalCallMute: false,
|
||||
allowWebViewInspection: false,
|
||||
disableReloginTokens: false,
|
||||
liveStreamV2: false
|
||||
@ -128,6 +130,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
crashOnMemoryPressure: Bool,
|
||||
dustEffect: Bool,
|
||||
callV2: Bool,
|
||||
experimentalCallMute: Bool,
|
||||
allowWebViewInspection: Bool,
|
||||
disableReloginTokens: Bool,
|
||||
liveStreamV2: Bool
|
||||
@ -162,6 +165,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
self.crashOnMemoryPressure = crashOnMemoryPressure
|
||||
self.dustEffect = dustEffect
|
||||
self.callV2 = callV2
|
||||
self.experimentalCallMute = experimentalCallMute
|
||||
self.allowWebViewInspection = allowWebViewInspection
|
||||
self.disableReloginTokens = disableReloginTokens
|
||||
self.liveStreamV2 = liveStreamV2
|
||||
@ -200,6 +204,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
self.crashOnMemoryPressure = try container.decodeIfPresent(Bool.self, forKey: "crashOnMemoryPressure") ?? false
|
||||
self.dustEffect = try container.decodeIfPresent(Bool.self, forKey: "dustEffect") ?? false
|
||||
self.callV2 = try container.decodeIfPresent(Bool.self, forKey: "callV2") ?? false
|
||||
self.experimentalCallMute = try container.decodeIfPresent(Bool.self, forKey: "experimentalCallMute") ?? false
|
||||
self.allowWebViewInspection = try container.decodeIfPresent(Bool.self, forKey: "allowWebViewInspection") ?? false
|
||||
self.disableReloginTokens = try container.decodeIfPresent(Bool.self, forKey: "disableReloginTokens") ?? false
|
||||
self.liveStreamV2 = try container.decodeIfPresent(Bool.self, forKey: "liveStreamV2") ?? false
|
||||
@ -238,6 +243,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
||||
try container.encode(self.crashOnMemoryPressure, forKey: "crashOnMemoryPressure")
|
||||
try container.encode(self.dustEffect, forKey: "dustEffect")
|
||||
try container.encode(self.callV2, forKey: "callV2")
|
||||
try container.encode(self.experimentalCallMute, forKey: "experimentalCallMute")
|
||||
try container.encode(self.allowWebViewInspection, forKey: "allowWebViewInspection")
|
||||
try container.encode(self.disableReloginTokens, forKey: "disableReloginTokens")
|
||||
try container.encode(self.liveStreamV2, forKey: "liveStreamV2")
|
||||
|
@ -463,7 +463,7 @@ public final class OngoingGroupCallContext {
|
||||
|
||||
private let audioSessionActiveDisposable = MetaDisposable()
|
||||
|
||||
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
|
||||
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, enableSystemMute: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
|
||||
self.queue = queue
|
||||
|
||||
#if os(iOS)
|
||||
@ -574,6 +574,7 @@ public final class OngoingGroupCallContext {
|
||||
videoContentType: _videoContentType,
|
||||
enableNoiseSuppression: enableNoiseSuppression,
|
||||
disableAudioInput: disableAudioInput,
|
||||
enableSystemMute: enableSystemMute,
|
||||
preferX264: preferX264,
|
||||
logPath: logPath,
|
||||
onMutedSpeechActivityDetected: { value in
|
||||
@ -1112,10 +1113,10 @@ public final class OngoingGroupCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
|
||||
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, enableSystemMute: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
|
||||
let queue = self.queue
|
||||
self.impl = QueueLocalObject(queue: queue, generate: {
|
||||
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected)
|
||||
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, enableSystemMute: enableSystemMute, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -742,8 +742,8 @@ public final class OngoingCallContext {
|
||||
public final class AudioDevice {
|
||||
let impl: SharedCallAudioDevice
|
||||
|
||||
public static func create() -> AudioDevice? {
|
||||
return AudioDevice(impl: SharedCallAudioDevice(disableRecording: false))
|
||||
public static func create(enableSystemMute: Bool) -> AudioDevice? {
|
||||
return AudioDevice(impl: SharedCallAudioDevice(disableRecording: false, enableSystemMute: enableSystemMute))
|
||||
}
|
||||
|
||||
private init(impl: SharedCallAudioDevice) {
|
||||
|
@ -23,7 +23,7 @@
|
||||
|
||||
@interface SharedCallAudioDevice : NSObject
|
||||
|
||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording;
|
||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording enableSystemMute:(bool)enableSystemMute;
|
||||
|
||||
+ (void)setupAudioSession;
|
||||
|
||||
@ -412,6 +412,7 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
|
||||
videoContentType:(OngoingGroupCallVideoContentType)videoContentType
|
||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||
disableAudioInput:(bool)disableAudioInput
|
||||
enableSystemMute:(bool)enableSystemMute
|
||||
preferX264:(bool)preferX264
|
||||
logPath:(NSString * _Nonnull)logPath
|
||||
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
|
||||
|
@ -79,9 +79,9 @@ public:
|
||||
|
||||
class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule {
|
||||
public:
|
||||
SharedAudioDeviceModuleImpl(bool disableAudioInput) {
|
||||
SharedAudioDeviceModuleImpl(bool disableAudioInput, bool enableSystemMute) {
|
||||
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, enableSystemMute, disableAudioInput ? 2 : 1);
|
||||
}
|
||||
|
||||
virtual ~SharedAudioDeviceModuleImpl() override {
|
||||
@ -129,11 +129,11 @@ private:
|
||||
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule;
|
||||
}
|
||||
|
||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording {
|
||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording enableSystemMute:(bool)enableSystemMute {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), [disableRecording]() mutable {
|
||||
return std::static_pointer_cast<tgcalls::SharedAudioDeviceModule>(std::make_shared<SharedAudioDeviceModuleImpl>(disableRecording));
|
||||
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), [disableRecording, enableSystemMute]() mutable {
|
||||
return std::static_pointer_cast<tgcalls::SharedAudioDeviceModule>(std::make_shared<SharedAudioDeviceModuleImpl>(disableRecording, enableSystemMute));
|
||||
}));
|
||||
}
|
||||
return self;
|
||||
@ -1278,7 +1278,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||
} else {
|
||||
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, false, 1);
|
||||
[queue dispatch:^{
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
if (strongSelf) {
|
||||
@ -1691,6 +1691,7 @@ private:
|
||||
videoContentType:(OngoingGroupCallVideoContentType)videoContentType
|
||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||
disableAudioInput:(bool)disableAudioInput
|
||||
enableSystemMute:(bool)enableSystemMute
|
||||
preferX264:(bool)preferX264
|
||||
logPath:(NSString * _Nonnull)logPath
|
||||
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
|
||||
@ -1886,6 +1887,7 @@ audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
|
||||
.outgoingAudioBitrateKbit = outgoingAudioBitrateKbit,
|
||||
.disableOutgoingAudioProcessing = disableOutgoingAudioProcessing,
|
||||
.disableAudioInput = disableAudioInput,
|
||||
.ios_enableSystemMute = enableSystemMute,
|
||||
.videoContentType = _videoContentType,
|
||||
.videoCodecPreferences = videoCodecPreferences,
|
||||
.initialEnableNoiseSuppression = enableNoiseSuppression,
|
||||
@ -1922,12 +1924,12 @@ audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
|
||||
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
|
||||
},
|
||||
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
|
||||
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput, audioDeviceModule, onMutedSpeechActivityDetected = _onMutedSpeechActivityDetected](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput, enableSystemMute, audioDeviceModule, onMutedSpeechActivityDetected = _onMutedSpeechActivityDetected](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||
if (audioDeviceModule) {
|
||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||
} else {
|
||||
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, enableSystemMute, disableAudioInput ? 2 : 1);
|
||||
if (resultModule) {
|
||||
resultModule->mutedSpeechDetectionChanged = ^(bool value) {
|
||||
if (onMutedSpeechActivityDetected) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user