mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
[WIP] Video chats v2
This commit is contained in:
parent
1567e6719c
commit
67ded11399
@ -103,6 +103,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
|||||||
case enableQuickReactionSwitch(Bool)
|
case enableQuickReactionSwitch(Bool)
|
||||||
case disableReloginTokens(Bool)
|
case disableReloginTokens(Bool)
|
||||||
case callV2(Bool)
|
case callV2(Bool)
|
||||||
|
case experimentalCallMute(Bool)
|
||||||
case liveStreamV2(Bool)
|
case liveStreamV2(Bool)
|
||||||
case preferredVideoCodec(Int, String, String?, Bool)
|
case preferredVideoCodec(Int, String, String?, Bool)
|
||||||
case disableVideoAspectScaling(Bool)
|
case disableVideoAspectScaling(Bool)
|
||||||
@ -128,7 +129,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
|||||||
return DebugControllerSection.web.rawValue
|
return DebugControllerSection.web.rawValue
|
||||||
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
|
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
|
||||||
return DebugControllerSection.experiments.rawValue
|
return DebugControllerSection.experiments.rawValue
|
||||||
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .callV2, .liveStreamV2:
|
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .callV2, .experimentalCallMute, .liveStreamV2:
|
||||||
return DebugControllerSection.experiments.rawValue
|
return DebugControllerSection.experiments.rawValue
|
||||||
case .logTranslationRecognition, .resetTranslationStates:
|
case .logTranslationRecognition, .resetTranslationStates:
|
||||||
return DebugControllerSection.translation.rawValue
|
return DebugControllerSection.translation.rawValue
|
||||||
@ -243,10 +244,12 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
|||||||
return 50
|
return 50
|
||||||
case .callV2:
|
case .callV2:
|
||||||
return 51
|
return 51
|
||||||
case .liveStreamV2:
|
case .experimentalCallMute:
|
||||||
return 52
|
return 52
|
||||||
|
case .liveStreamV2:
|
||||||
|
return 53
|
||||||
case let .preferredVideoCodec(index, _, _, _):
|
case let .preferredVideoCodec(index, _, _, _):
|
||||||
return 53 + index
|
return 54 + index
|
||||||
case .disableVideoAspectScaling:
|
case .disableVideoAspectScaling:
|
||||||
return 100
|
return 100
|
||||||
case .enableNetworkFramework:
|
case .enableNetworkFramework:
|
||||||
@ -1325,6 +1328,16 @@ private enum DebugControllerEntry: ItemListNodeEntry {
|
|||||||
})
|
})
|
||||||
}).start()
|
}).start()
|
||||||
})
|
})
|
||||||
|
case let .experimentalCallMute(value):
|
||||||
|
return ItemListSwitchItem(presentationData: presentationData, title: "[WIP] OS mic mute", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
||||||
|
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
|
||||||
|
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
|
||||||
|
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
|
||||||
|
settings.experimentalCallMute = value
|
||||||
|
return PreferencesEntry(settings)
|
||||||
|
})
|
||||||
|
}).start()
|
||||||
|
})
|
||||||
case let .liveStreamV2(value):
|
case let .liveStreamV2(value):
|
||||||
return ItemListSwitchItem(presentationData: presentationData, title: "Live Stream V2", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
return ItemListSwitchItem(presentationData: presentationData, title: "Live Stream V2", value: value, sectionId: self.section, style: .blocks, updated: { value in
|
||||||
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
|
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
|
||||||
@ -1490,6 +1503,7 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
|
|||||||
entries.append(.playlistPlayback(experimentalSettings.playlistPlayback))
|
entries.append(.playlistPlayback(experimentalSettings.playlistPlayback))
|
||||||
entries.append(.enableQuickReactionSwitch(!experimentalSettings.disableQuickReaction))
|
entries.append(.enableQuickReactionSwitch(!experimentalSettings.disableQuickReaction))
|
||||||
entries.append(.callV2(experimentalSettings.callV2))
|
entries.append(.callV2(experimentalSettings.callV2))
|
||||||
|
entries.append(.experimentalCallMute(experimentalSettings.experimentalCallMute))
|
||||||
entries.append(.liveStreamV2(experimentalSettings.liveStreamV2))
|
entries.append(.liveStreamV2(experimentalSettings.liveStreamV2))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,6 +114,9 @@ swift_library(
|
|||||||
"//submodules/TelegramUI/Components/PlainButtonComponent",
|
"//submodules/TelegramUI/Components/PlainButtonComponent",
|
||||||
"//submodules/TelegramUI/Components/LottieComponent",
|
"//submodules/TelegramUI/Components/LottieComponent",
|
||||||
"//submodules/TelegramUI/Components/Stories/PeerListItemComponent",
|
"//submodules/TelegramUI/Components/Stories/PeerListItemComponent",
|
||||||
|
"//submodules/TelegramUI/Components/BackButtonComponent",
|
||||||
|
"//submodules/DirectMediaImageCache",
|
||||||
|
"//submodules/FastBlur",
|
||||||
],
|
],
|
||||||
visibility = [
|
visibility = [
|
||||||
"//visibility:public",
|
"//visibility:public",
|
||||||
|
@ -295,7 +295,7 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
|
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_call_device"] {
|
||||||
self.sharedAudioDevice = nil
|
self.sharedAudioDevice = nil
|
||||||
} else {
|
} else {
|
||||||
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create()
|
self.sharedAudioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: context.sharedContext.immediateExperimentalUISettings.experimentalCallMute)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|
self.audioSessionActiveDisposable = (self.audioSessionActive.get()
|
||||||
|
@ -1687,7 +1687,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
strongSelf.requestCall(movingFromBroadcastToRtc: false)
|
strongSelf.requestCall(movingFromBroadcastToRtc: false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: self.isVideoEnabled ? .generic : .none, enableNoiseSuppression: false, disableAudioInput: self.isStream, preferX264: self.accountContext.sharedContext.immediateExperimentalUISettings.preferredVideoCodec == "H264", logPath: allocateCallLogPath(account: self.account), onMutedSpeechActivityDetected: { [weak self] value in
|
}, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: self.isVideoEnabled ? .generic : .none, enableNoiseSuppression: false, disableAudioInput: self.isStream, enableSystemMute: self.accountContext.sharedContext.immediateExperimentalUISettings.experimentalCallMute, preferX264: self.accountContext.sharedContext.immediateExperimentalUISettings.preferredVideoCodec == "H264", logPath: allocateCallLogPath(account: self.account), onMutedSpeechActivityDetected: { [weak self] value in
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
return
|
return
|
||||||
@ -2997,7 +2997,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|
|
||||||
self.hasScreencast = true
|
self.hasScreencast = true
|
||||||
|
|
||||||
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in })
|
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, enableSystemMute: false, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in })
|
||||||
self.screencastCallContext = screencastCallContext
|
self.screencastCallContext = screencastCallContext
|
||||||
|
|
||||||
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
||||||
|
@ -98,9 +98,9 @@ final class VideoChatActionButtonComponent: Component {
|
|||||||
case .connecting:
|
case .connecting:
|
||||||
backgroundColor = UIColor(white: 1.0, alpha: 0.1)
|
backgroundColor = UIColor(white: 1.0, alpha: 0.1)
|
||||||
case .muted:
|
case .muted:
|
||||||
backgroundColor = isActive ? UIColor(rgb: 0x002E5D) : UIColor(rgb: 0x027FFF)
|
backgroundColor = !isActive ? UIColor(rgb: 0x002E5D) : UIColor(rgb: 0x027FFF)
|
||||||
case .unmuted:
|
case .unmuted:
|
||||||
backgroundColor = isActive ? UIColor(rgb: 0x124B21) : UIColor(rgb: 0x34C659)
|
backgroundColor = !isActive ? UIColor(rgb: 0x124B21) : UIColor(rgb: 0x34C659)
|
||||||
}
|
}
|
||||||
iconDiameter = 60.0
|
iconDiameter = 60.0
|
||||||
case .leave:
|
case .leave:
|
||||||
|
@ -0,0 +1,100 @@
|
|||||||
|
import Foundation
|
||||||
|
import UIKit
|
||||||
|
import Display
|
||||||
|
import ComponentFlow
|
||||||
|
import MultilineTextComponent
|
||||||
|
import TelegramPresentationData
|
||||||
|
import AppBundle
|
||||||
|
import BackButtonComponent
|
||||||
|
|
||||||
|
final class VideoChatExpandedControlsComponent: Component {
|
||||||
|
let theme: PresentationTheme
|
||||||
|
let strings: PresentationStrings
|
||||||
|
let backAction: () -> Void
|
||||||
|
|
||||||
|
init(
|
||||||
|
theme: PresentationTheme,
|
||||||
|
strings: PresentationStrings,
|
||||||
|
backAction: @escaping () -> Void
|
||||||
|
) {
|
||||||
|
self.theme = theme
|
||||||
|
self.strings = strings
|
||||||
|
self.backAction = backAction
|
||||||
|
}
|
||||||
|
|
||||||
|
static func ==(lhs: VideoChatExpandedControlsComponent, rhs: VideoChatExpandedControlsComponent) -> Bool {
|
||||||
|
if lhs.theme !== rhs.theme {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.strings !== rhs.strings {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
final class View: UIView {
|
||||||
|
private let backButton = ComponentView<Empty>()
|
||||||
|
|
||||||
|
private var component: VideoChatExpandedControlsComponent?
|
||||||
|
private var isUpdating: Bool = false
|
||||||
|
|
||||||
|
private var ignoreScrolling: Bool = false
|
||||||
|
|
||||||
|
override init(frame: CGRect) {
|
||||||
|
super.init(frame: frame)
|
||||||
|
}
|
||||||
|
|
||||||
|
required init?(coder: NSCoder) {
|
||||||
|
fatalError("init(coder:) has not been implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||||
|
if let backButtonView = self.backButton.view, let result = backButtonView.hitTest(self.convert(point, to: backButtonView), with: event) {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(component: VideoChatExpandedControlsComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||||
|
self.isUpdating = true
|
||||||
|
defer {
|
||||||
|
self.isUpdating = false
|
||||||
|
}
|
||||||
|
|
||||||
|
self.component = component
|
||||||
|
|
||||||
|
let backButtonSize = self.backButton.update(
|
||||||
|
transition: transition,
|
||||||
|
component: AnyComponent(BackButtonComponent(
|
||||||
|
title: component.strings.Common_Back,
|
||||||
|
color: .white,
|
||||||
|
action: { [weak self] in
|
||||||
|
guard let self, let component = self.component else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
component.backAction()
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
environment: {},
|
||||||
|
containerSize: CGSize(width: availableSize.width * 0.5, height: 100.0)
|
||||||
|
)
|
||||||
|
let backButtonFrame = CGRect(origin: CGPoint(x: 12.0, y: 12.0), size: backButtonSize)
|
||||||
|
if let backButtonView = self.backButton.view {
|
||||||
|
if backButtonView.superview == nil {
|
||||||
|
self.addSubview(backButtonView)
|
||||||
|
}
|
||||||
|
transition.setFrame(view: backButtonView, frame: backButtonFrame)
|
||||||
|
}
|
||||||
|
|
||||||
|
return availableSize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeView() -> View {
|
||||||
|
return View()
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||||
|
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,671 @@
|
|||||||
|
import Foundation
|
||||||
|
import UIKit
|
||||||
|
import Display
|
||||||
|
import ComponentFlow
|
||||||
|
import MultilineTextComponent
|
||||||
|
import TelegramPresentationData
|
||||||
|
import AppBundle
|
||||||
|
import TelegramCore
|
||||||
|
import AccountContext
|
||||||
|
import SwiftSignalKit
|
||||||
|
import MetalEngine
|
||||||
|
import CallScreen
|
||||||
|
import AvatarNode
|
||||||
|
|
||||||
|
final class VideoChatParticipantThumbnailComponent: Component {
|
||||||
|
let call: PresentationGroupCall
|
||||||
|
let theme: PresentationTheme
|
||||||
|
let participant: GroupCallParticipantsContext.Participant
|
||||||
|
let isPresentation: Bool
|
||||||
|
let isSelected: Bool
|
||||||
|
let action: (() -> Void)?
|
||||||
|
|
||||||
|
init(
|
||||||
|
call: PresentationGroupCall,
|
||||||
|
theme: PresentationTheme,
|
||||||
|
participant: GroupCallParticipantsContext.Participant,
|
||||||
|
isPresentation: Bool,
|
||||||
|
isSelected: Bool,
|
||||||
|
action: (() -> Void)?
|
||||||
|
) {
|
||||||
|
self.call = call
|
||||||
|
self.theme = theme
|
||||||
|
self.participant = participant
|
||||||
|
self.isPresentation = isPresentation
|
||||||
|
self.isSelected = isSelected
|
||||||
|
self.action = action
|
||||||
|
}
|
||||||
|
|
||||||
|
static func ==(lhs: VideoChatParticipantThumbnailComponent, rhs: VideoChatParticipantThumbnailComponent) -> Bool {
|
||||||
|
if lhs.call !== rhs.call {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.theme !== rhs.theme {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.participant != rhs.participant {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.isPresentation != rhs.isPresentation {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.isSelected != rhs.isSelected {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct VideoSpec: Equatable {
|
||||||
|
var resolution: CGSize
|
||||||
|
var rotationAngle: Float
|
||||||
|
|
||||||
|
init(resolution: CGSize, rotationAngle: Float) {
|
||||||
|
self.resolution = resolution
|
||||||
|
self.rotationAngle = rotationAngle
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final class View: HighlightTrackingButton {
|
||||||
|
private static let selectedBorderImage: UIImage? = {
|
||||||
|
return generateStretchableFilledCircleImage(diameter: 20.0, color: nil, strokeColor: UIColor.white, strokeWidth: 2.0)?.withRenderingMode(.alwaysTemplate)
|
||||||
|
}()
|
||||||
|
|
||||||
|
private var component: VideoChatParticipantThumbnailComponent?
|
||||||
|
private weak var componentState: EmptyComponentState?
|
||||||
|
private var isUpdating: Bool = false
|
||||||
|
|
||||||
|
private var avatarNode: AvatarNode?
|
||||||
|
private let title = ComponentView<Empty>()
|
||||||
|
private let muteStatus = ComponentView<Empty>()
|
||||||
|
|
||||||
|
private var selectedBorderView: UIImageView?
|
||||||
|
|
||||||
|
private var videoSource: AdaptedCallVideoSource?
|
||||||
|
private var videoDisposable: Disposable?
|
||||||
|
private var videoBackgroundLayer: SimpleLayer?
|
||||||
|
private var videoLayer: PrivateCallVideoLayer?
|
||||||
|
private var videoSpec: VideoSpec?
|
||||||
|
|
||||||
|
override init(frame: CGRect) {
|
||||||
|
super.init(frame: frame)
|
||||||
|
|
||||||
|
//TODO:release optimize
|
||||||
|
self.clipsToBounds = true
|
||||||
|
self.layer.cornerRadius = 10.0
|
||||||
|
|
||||||
|
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
|
||||||
|
}
|
||||||
|
|
||||||
|
required init?(coder: NSCoder) {
|
||||||
|
fatalError("init(coder:) has not been implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
deinit {
|
||||||
|
self.videoDisposable?.dispose()
|
||||||
|
}
|
||||||
|
|
||||||
|
@objc private func pressed() {
|
||||||
|
guard let component = self.component, let action = component.action else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
action()
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(component: VideoChatParticipantThumbnailComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||||
|
self.isUpdating = true
|
||||||
|
defer {
|
||||||
|
self.isUpdating = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.component == nil {
|
||||||
|
self.backgroundColor = UIColor(rgb: 0x1C1C1E)
|
||||||
|
}
|
||||||
|
|
||||||
|
self.component = component
|
||||||
|
self.componentState = state
|
||||||
|
|
||||||
|
let avatarNode: AvatarNode
|
||||||
|
if let current = self.avatarNode {
|
||||||
|
avatarNode = current
|
||||||
|
} else {
|
||||||
|
avatarNode = AvatarNode(font: avatarPlaceholderFont(size: 17.0))
|
||||||
|
avatarNode.isUserInteractionEnabled = false
|
||||||
|
self.avatarNode = avatarNode
|
||||||
|
self.addSubview(avatarNode.view)
|
||||||
|
}
|
||||||
|
|
||||||
|
let avatarSize = CGSize(width: 50.0, height: 50.0)
|
||||||
|
let avatarFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - avatarSize.width) * 0.5), y: 7.0), size: avatarSize)
|
||||||
|
transition.setFrame(view: avatarNode.view, frame: avatarFrame)
|
||||||
|
avatarNode.updateSize(size: avatarSize)
|
||||||
|
if component.participant.peer.smallProfileImage != nil {
|
||||||
|
avatarNode.setPeerV2(context: component.call.accountContext, theme: component.theme, peer: EnginePeer(component.participant.peer), displayDimensions: avatarSize)
|
||||||
|
} else {
|
||||||
|
avatarNode.setPeer(context: component.call.accountContext, theme: component.theme, peer: EnginePeer(component.participant.peer), displayDimensions: avatarSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
let muteStatusSize = self.muteStatus.update(
|
||||||
|
transition: transition,
|
||||||
|
component: AnyComponent(VideoChatMuteIconComponent(
|
||||||
|
color: .white,
|
||||||
|
isMuted: component.participant.muteState != nil
|
||||||
|
)),
|
||||||
|
environment: {},
|
||||||
|
containerSize: CGSize(width: 36.0, height: 36.0)
|
||||||
|
)
|
||||||
|
let muteStatusFrame = CGRect(origin: CGPoint(x: availableSize.width + 5.0 - muteStatusSize.width, y: availableSize.height + 5.0 - muteStatusSize.height), size: muteStatusSize)
|
||||||
|
if let muteStatusView = self.muteStatus.view {
|
||||||
|
if muteStatusView.superview == nil {
|
||||||
|
self.addSubview(muteStatusView)
|
||||||
|
}
|
||||||
|
transition.setPosition(view: muteStatusView, position: muteStatusFrame.center)
|
||||||
|
transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size))
|
||||||
|
transition.setScale(view: muteStatusView, scale: 0.65)
|
||||||
|
}
|
||||||
|
|
||||||
|
let titleSize = self.title.update(
|
||||||
|
transition: .immediate,
|
||||||
|
component: AnyComponent(MultilineTextComponent(
|
||||||
|
text: .plain(NSAttributedString(string: EnginePeer(component.participant.peer).compactDisplayTitle, font: Font.semibold(13.0), textColor: .white))
|
||||||
|
)),
|
||||||
|
environment: {},
|
||||||
|
containerSize: CGSize(width: availableSize.width - 6.0 * 2.0 - 8.0, height: 100.0)
|
||||||
|
)
|
||||||
|
let titleFrame = CGRect(origin: CGPoint(x: 6.0, y: availableSize.height - 6.0 - titleSize.height), size: titleSize)
|
||||||
|
if let titleView = self.title.view {
|
||||||
|
if titleView.superview == nil {
|
||||||
|
titleView.layer.anchorPoint = CGPoint()
|
||||||
|
titleView.isUserInteractionEnabled = false
|
||||||
|
self.addSubview(titleView)
|
||||||
|
}
|
||||||
|
transition.setPosition(view: titleView, position: titleFrame.origin)
|
||||||
|
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
|
||||||
|
}
|
||||||
|
|
||||||
|
if let videoDescription = component.isPresentation ? component.participant.presentationDescription : component.participant.videoDescription {
|
||||||
|
let videoBackgroundLayer: SimpleLayer
|
||||||
|
if let current = self.videoBackgroundLayer {
|
||||||
|
videoBackgroundLayer = current
|
||||||
|
} else {
|
||||||
|
videoBackgroundLayer = SimpleLayer()
|
||||||
|
videoBackgroundLayer.backgroundColor = UIColor(white: 0.1, alpha: 1.0).cgColor
|
||||||
|
self.videoBackgroundLayer = videoBackgroundLayer
|
||||||
|
self.layer.insertSublayer(videoBackgroundLayer, above: avatarNode.layer)
|
||||||
|
videoBackgroundLayer.isHidden = true
|
||||||
|
}
|
||||||
|
|
||||||
|
let videoLayer: PrivateCallVideoLayer
|
||||||
|
if let current = self.videoLayer {
|
||||||
|
videoLayer = current
|
||||||
|
} else {
|
||||||
|
videoLayer = PrivateCallVideoLayer()
|
||||||
|
self.videoLayer = videoLayer
|
||||||
|
self.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
||||||
|
self.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)
|
||||||
|
|
||||||
|
videoLayer.blurredLayer.opacity = 0.25
|
||||||
|
|
||||||
|
if let input = (component.call as! PresentationGroupCallImpl).video(endpointId: videoDescription.endpointId) {
|
||||||
|
let videoSource = AdaptedCallVideoSource(videoStreamSignal: input)
|
||||||
|
self.videoSource = videoSource
|
||||||
|
|
||||||
|
self.videoDisposable?.dispose()
|
||||||
|
self.videoDisposable = videoSource.addOnUpdated { [weak self] in
|
||||||
|
guard let self, let videoSource = self.videoSource, let videoLayer = self.videoLayer else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let videoOutput = videoSource.currentOutput
|
||||||
|
videoLayer.video = videoOutput
|
||||||
|
|
||||||
|
if let videoOutput {
|
||||||
|
let videoSpec = VideoSpec(resolution: videoOutput.resolution, rotationAngle: videoOutput.rotationAngle)
|
||||||
|
if self.videoSpec != videoSpec {
|
||||||
|
self.videoSpec = videoSpec
|
||||||
|
if !self.isUpdating {
|
||||||
|
self.componentState?.updated(transition: .immediate, isLocal: true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if self.videoSpec != nil {
|
||||||
|
self.videoSpec = nil
|
||||||
|
if !self.isUpdating {
|
||||||
|
self.componentState?.updated(transition: .immediate, isLocal: true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*var notifyOrientationUpdated = false
|
||||||
|
var notifyIsMirroredUpdated = false
|
||||||
|
|
||||||
|
if !self.didReportFirstFrame {
|
||||||
|
notifyOrientationUpdated = true
|
||||||
|
notifyIsMirroredUpdated = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if let currentOutput = videoOutput {
|
||||||
|
let currentAspect: CGFloat
|
||||||
|
if currentOutput.resolution.height > 0.0 {
|
||||||
|
currentAspect = currentOutput.resolution.width / currentOutput.resolution.height
|
||||||
|
} else {
|
||||||
|
currentAspect = 1.0
|
||||||
|
}
|
||||||
|
if self.currentAspect != currentAspect {
|
||||||
|
self.currentAspect = currentAspect
|
||||||
|
notifyOrientationUpdated = true
|
||||||
|
}
|
||||||
|
|
||||||
|
let currentOrientation: PresentationCallVideoView.Orientation
|
||||||
|
if currentOutput.followsDeviceOrientation {
|
||||||
|
currentOrientation = .rotation0
|
||||||
|
} else {
|
||||||
|
if abs(currentOutput.rotationAngle - 0.0) < .ulpOfOne {
|
||||||
|
currentOrientation = .rotation0
|
||||||
|
} else if abs(currentOutput.rotationAngle - Float.pi * 0.5) < .ulpOfOne {
|
||||||
|
currentOrientation = .rotation90
|
||||||
|
} else if abs(currentOutput.rotationAngle - Float.pi) < .ulpOfOne {
|
||||||
|
currentOrientation = .rotation180
|
||||||
|
} else if abs(currentOutput.rotationAngle - Float.pi * 3.0 / 2.0) < .ulpOfOne {
|
||||||
|
currentOrientation = .rotation270
|
||||||
|
} else {
|
||||||
|
currentOrientation = .rotation0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if self.currentOrientation != currentOrientation {
|
||||||
|
self.currentOrientation = currentOrientation
|
||||||
|
notifyOrientationUpdated = true
|
||||||
|
}
|
||||||
|
|
||||||
|
let currentIsMirrored = !currentOutput.mirrorDirection.isEmpty
|
||||||
|
if self.currentIsMirrored != currentIsMirrored {
|
||||||
|
self.currentIsMirrored = currentIsMirrored
|
||||||
|
notifyIsMirroredUpdated = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.didReportFirstFrame {
|
||||||
|
self.didReportFirstFrame = true
|
||||||
|
self.onFirstFrameReceived?(Float(self.currentAspect))
|
||||||
|
}
|
||||||
|
|
||||||
|
if notifyOrientationUpdated {
|
||||||
|
self.onOrientationUpdated?(self.currentOrientation, self.currentAspect)
|
||||||
|
}
|
||||||
|
|
||||||
|
if notifyIsMirroredUpdated {
|
||||||
|
self.onIsMirroredUpdated?(self.currentIsMirrored)
|
||||||
|
}*/
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
transition.setFrame(layer: videoBackgroundLayer, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||||
|
|
||||||
|
if let videoSpec = self.videoSpec {
|
||||||
|
videoBackgroundLayer.isHidden = component.isSelected
|
||||||
|
videoLayer.blurredLayer.isHidden = component.isSelected
|
||||||
|
videoLayer.isHidden = component.isSelected
|
||||||
|
|
||||||
|
let rotatedResolution = videoSpec.resolution
|
||||||
|
let videoSize = rotatedResolution.aspectFilled(availableSize)
|
||||||
|
let videoFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - videoSize.width) * 0.5), y: floorToScreenPixels((availableSize.height - videoSize.height) * 0.5)), size: videoSize)
|
||||||
|
let blurredVideoSize = rotatedResolution.aspectFilled(availableSize)
|
||||||
|
let blurredVideoFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - blurredVideoSize.width) * 0.5), y: floorToScreenPixels((availableSize.height - blurredVideoSize.height) * 0.5)), size: blurredVideoSize)
|
||||||
|
|
||||||
|
let videoResolution = rotatedResolution.aspectFitted(CGSize(width: availableSize.width * 3.0, height: availableSize.height * 3.0))
|
||||||
|
let rotatedVideoResolution = videoResolution
|
||||||
|
|
||||||
|
transition.setPosition(layer: videoLayer, position: videoFrame.center)
|
||||||
|
transition.setBounds(layer: videoLayer, bounds: CGRect(origin: CGPoint(), size: videoFrame.size))
|
||||||
|
videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)), edgeInset: 2)
|
||||||
|
|
||||||
|
transition.setPosition(layer: videoLayer.blurredLayer, position: blurredVideoFrame.center)
|
||||||
|
transition.setBounds(layer: videoLayer.blurredLayer, bounds: CGRect(origin: CGPoint(), size: blurredVideoFrame.size))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if let videoBackgroundLayer = self.videoBackgroundLayer {
|
||||||
|
self.videoBackgroundLayer = nil
|
||||||
|
videoBackgroundLayer.removeFromSuperlayer()
|
||||||
|
}
|
||||||
|
if let videoLayer = self.videoLayer {
|
||||||
|
self.videoLayer = nil
|
||||||
|
videoLayer.blurredLayer.removeFromSuperlayer()
|
||||||
|
videoLayer.removeFromSuperlayer()
|
||||||
|
}
|
||||||
|
self.videoDisposable?.dispose()
|
||||||
|
self.videoDisposable = nil
|
||||||
|
self.videoSource = nil
|
||||||
|
self.videoSpec = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if component.isSelected {
|
||||||
|
let selectedBorderView: UIImageView
|
||||||
|
if let current = self.selectedBorderView {
|
||||||
|
selectedBorderView = current
|
||||||
|
} else {
|
||||||
|
selectedBorderView = UIImageView()
|
||||||
|
self.selectedBorderView = selectedBorderView
|
||||||
|
self.addSubview(selectedBorderView)
|
||||||
|
selectedBorderView.image = View.selectedBorderImage
|
||||||
|
}
|
||||||
|
selectedBorderView.tintColor = component.theme.list.itemAccentColor
|
||||||
|
selectedBorderView.frame = CGRect(origin: CGPoint(), size: availableSize)
|
||||||
|
} else {
|
||||||
|
if let selectedBorderView = self.selectedBorderView {
|
||||||
|
self.selectedBorderView = nil
|
||||||
|
selectedBorderView.removeFromSuperview()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return availableSize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeView() -> View {
|
||||||
|
return View(frame: CGRect())
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||||
|
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final class VideoChatExpandedParticipantThumbnailsComponent: Component {
|
||||||
|
final class Participant: Equatable {
|
||||||
|
struct Key: Hashable {
|
||||||
|
var id: EnginePeer.Id
|
||||||
|
var isPresentation: Bool
|
||||||
|
|
||||||
|
init(id: EnginePeer.Id, isPresentation: Bool) {
|
||||||
|
self.id = id
|
||||||
|
self.isPresentation = isPresentation
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let participant: GroupCallParticipantsContext.Participant
|
||||||
|
let isPresentation: Bool
|
||||||
|
|
||||||
|
var key: Key {
|
||||||
|
return Key(id: self.participant.peer.id, isPresentation: self.isPresentation)
|
||||||
|
}
|
||||||
|
|
||||||
|
init(
|
||||||
|
participant: GroupCallParticipantsContext.Participant,
|
||||||
|
isPresentation: Bool
|
||||||
|
) {
|
||||||
|
self.participant = participant
|
||||||
|
self.isPresentation = isPresentation
|
||||||
|
}
|
||||||
|
|
||||||
|
static func ==(lhs: Participant, rhs: Participant) -> Bool {
|
||||||
|
if lhs === rhs {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if lhs.participant != rhs.participant {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.isPresentation != rhs.isPresentation {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let call: PresentationGroupCall
|
||||||
|
let theme: PresentationTheme
|
||||||
|
let participants: [Participant]
|
||||||
|
let selectedParticipant: Participant.Key?
|
||||||
|
let updateSelectedParticipant: (Participant.Key) -> Void
|
||||||
|
|
||||||
|
init(
|
||||||
|
call: PresentationGroupCall,
|
||||||
|
theme: PresentationTheme,
|
||||||
|
participants: [Participant],
|
||||||
|
selectedParticipant: Participant.Key?,
|
||||||
|
updateSelectedParticipant: @escaping (Participant.Key) -> Void
|
||||||
|
) {
|
||||||
|
self.call = call
|
||||||
|
self.theme = theme
|
||||||
|
self.participants = participants
|
||||||
|
self.selectedParticipant = selectedParticipant
|
||||||
|
self.updateSelectedParticipant = updateSelectedParticipant
|
||||||
|
}
|
||||||
|
|
||||||
|
static func ==(lhs: VideoChatExpandedParticipantThumbnailsComponent, rhs: VideoChatExpandedParticipantThumbnailsComponent) -> Bool {
|
||||||
|
if lhs.call !== rhs.call {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.theme !== rhs.theme {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.participants != rhs.participants {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.selectedParticipant != rhs.selectedParticipant {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
private final class ScrollView: UIScrollView {
|
||||||
|
override func touchesShouldCancel(in view: UIView) -> Bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private struct ItemLayout {
|
||||||
|
let containerSize: CGSize
|
||||||
|
let containerInsets: UIEdgeInsets
|
||||||
|
let itemCount: Int
|
||||||
|
let itemSize: CGSize
|
||||||
|
let itemSpacing: CGFloat
|
||||||
|
|
||||||
|
let contentSize: CGSize
|
||||||
|
|
||||||
|
init(containerSize: CGSize, containerInsets: UIEdgeInsets, itemCount: Int) {
|
||||||
|
self.containerSize = containerSize
|
||||||
|
self.containerInsets = containerInsets
|
||||||
|
self.itemCount = itemCount
|
||||||
|
self.itemSize = CGSize(width: 84.0, height: 84.0)
|
||||||
|
self.itemSpacing = 6.0
|
||||||
|
|
||||||
|
let itemsWidth: CGFloat = CGFloat(itemCount) * self.itemSize.width + CGFloat(max(itemCount - 1, 0)) * self.itemSpacing
|
||||||
|
self.contentSize = CGSize(width: self.containerInsets.left + self.containerInsets.right + itemsWidth, height: self.containerInsets.top + self.containerInsets.bottom + self.itemSize.height)
|
||||||
|
}
|
||||||
|
|
||||||
|
func frame(at index: Int) -> CGRect {
|
||||||
|
let frame = CGRect(origin: CGPoint(x: self.containerInsets.left + CGFloat(index) * (self.itemSize.width + self.itemSpacing), y: self.containerInsets.top), size: self.itemSize)
|
||||||
|
return frame
|
||||||
|
}
|
||||||
|
|
||||||
|
func visibleItemRange(for rect: CGRect) -> (minIndex: Int, maxIndex: Int) {
|
||||||
|
if self.itemCount == 0 {
|
||||||
|
return (0, -1)
|
||||||
|
}
|
||||||
|
let offsetRect = rect.offsetBy(dx: -self.containerInsets.left, dy: 0.0)
|
||||||
|
var minVisibleRow = Int(floor((offsetRect.minY) / (self.itemSize.width)))
|
||||||
|
minVisibleRow = max(0, minVisibleRow)
|
||||||
|
let maxVisibleRow = Int(ceil((offsetRect.maxY) / (self.itemSize.width)))
|
||||||
|
|
||||||
|
let minVisibleIndex = minVisibleRow
|
||||||
|
let maxVisibleIndex = min(self.itemCount - 1, (maxVisibleRow + 1) - 1)
|
||||||
|
|
||||||
|
return (minVisibleIndex, maxVisibleIndex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private final class VisibleItem {
|
||||||
|
let view = ComponentView<Empty>()
|
||||||
|
|
||||||
|
init() {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final class View: UIView, UIScrollViewDelegate {
|
||||||
|
private let scrollView: ScrollView
|
||||||
|
|
||||||
|
private var component: VideoChatExpandedParticipantThumbnailsComponent?
|
||||||
|
private var isUpdating: Bool = false
|
||||||
|
|
||||||
|
private var ignoreScrolling: Bool = false
|
||||||
|
|
||||||
|
private var itemLayout: ItemLayout?
|
||||||
|
private var visibleItems: [Participant.Key: VisibleItem] = [:]
|
||||||
|
|
||||||
|
override init(frame: CGRect) {
|
||||||
|
self.scrollView = ScrollView()
|
||||||
|
|
||||||
|
super.init(frame: frame)
|
||||||
|
|
||||||
|
self.scrollView.delaysContentTouches = false
|
||||||
|
self.scrollView.canCancelContentTouches = true
|
||||||
|
self.scrollView.clipsToBounds = false
|
||||||
|
self.scrollView.contentInsetAdjustmentBehavior = .never
|
||||||
|
if #available(iOS 13.0, *) {
|
||||||
|
self.scrollView.automaticallyAdjustsScrollIndicatorInsets = false
|
||||||
|
}
|
||||||
|
self.scrollView.showsVerticalScrollIndicator = false
|
||||||
|
self.scrollView.showsHorizontalScrollIndicator = false
|
||||||
|
self.scrollView.alwaysBounceHorizontal = false
|
||||||
|
self.scrollView.alwaysBounceVertical = false
|
||||||
|
self.scrollView.scrollsToTop = false
|
||||||
|
self.scrollView.delegate = self
|
||||||
|
self.scrollView.clipsToBounds = true
|
||||||
|
|
||||||
|
self.addSubview(self.scrollView)
|
||||||
|
}
|
||||||
|
|
||||||
|
required init?(coder: NSCoder) {
|
||||||
|
fatalError("init(coder:) has not been implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func scrollViewDidScroll(_ scrollView: UIScrollView) {
|
||||||
|
if !self.ignoreScrolling {
|
||||||
|
self.updateScrolling(transition: .immediate)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func updateScrolling(transition: ComponentTransition) {
|
||||||
|
guard let component = self.component, let itemLayout = self.itemLayout else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var validListItemIds: [Participant.Key] = []
|
||||||
|
let visibleListItemRange = itemLayout.visibleItemRange(for: self.scrollView.bounds)
|
||||||
|
if visibleListItemRange.maxIndex >= visibleListItemRange.minIndex {
|
||||||
|
for i in visibleListItemRange.minIndex ... visibleListItemRange.maxIndex {
|
||||||
|
let participant = component.participants[i]
|
||||||
|
validListItemIds.append(participant.key)
|
||||||
|
|
||||||
|
var itemTransition = transition
|
||||||
|
let itemView: VisibleItem
|
||||||
|
if let current = self.visibleItems[participant.key] {
|
||||||
|
itemView = current
|
||||||
|
} else {
|
||||||
|
itemTransition = itemTransition.withAnimation(.none)
|
||||||
|
itemView = VisibleItem()
|
||||||
|
self.visibleItems[participant.key] = itemView
|
||||||
|
}
|
||||||
|
|
||||||
|
let itemFrame = itemLayout.frame(at: i)
|
||||||
|
|
||||||
|
let participantKey = participant.key
|
||||||
|
let _ = itemView.view.update(
|
||||||
|
transition: itemTransition,
|
||||||
|
component: AnyComponent(VideoChatParticipantThumbnailComponent(
|
||||||
|
call: component.call,
|
||||||
|
theme: component.theme,
|
||||||
|
participant: participant.participant,
|
||||||
|
isPresentation: participant.isPresentation,
|
||||||
|
isSelected: component.selectedParticipant == participant.key,
|
||||||
|
action: { [weak self] in
|
||||||
|
guard let self, let component = self.component else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
component.updateSelectedParticipant(participantKey)
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
environment: {},
|
||||||
|
containerSize: itemFrame.size
|
||||||
|
)
|
||||||
|
if let itemComponentView = itemView.view.view {
|
||||||
|
if itemComponentView.superview == nil {
|
||||||
|
itemComponentView.clipsToBounds = true
|
||||||
|
|
||||||
|
self.scrollView.addSubview(itemComponentView)
|
||||||
|
|
||||||
|
if !transition.animation.isImmediate {
|
||||||
|
itemComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||||
|
transition.animateScale(view: itemComponentView, from: 0.001, to: 1.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
transition.setFrame(view: itemComponentView, frame: itemFrame)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var removedListItemIds: [Participant.Key] = []
|
||||||
|
for (itemId, itemView) in self.visibleItems {
|
||||||
|
if !validListItemIds.contains(itemId) {
|
||||||
|
removedListItemIds.append(itemId)
|
||||||
|
|
||||||
|
if let itemComponentView = itemView.view.view {
|
||||||
|
if !transition.animation.isImmediate {
|
||||||
|
transition.setScale(view: itemComponentView, scale: 0.001)
|
||||||
|
itemComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak itemComponentView] _ in
|
||||||
|
itemComponentView?.removeFromSuperview()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
itemComponentView.removeFromSuperview()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for itemId in removedListItemIds {
|
||||||
|
self.visibleItems.removeValue(forKey: itemId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(component: VideoChatExpandedParticipantThumbnailsComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||||
|
self.isUpdating = true
|
||||||
|
defer {
|
||||||
|
self.isUpdating = false
|
||||||
|
}
|
||||||
|
|
||||||
|
self.component = component
|
||||||
|
|
||||||
|
let itemLayout = ItemLayout(
|
||||||
|
containerSize: availableSize,
|
||||||
|
containerInsets: UIEdgeInsets(top: 10.0, left: 10.0, bottom: 10.0, right: 10.0),
|
||||||
|
itemCount: component.participants.count
|
||||||
|
)
|
||||||
|
self.itemLayout = itemLayout
|
||||||
|
|
||||||
|
let size = CGSize(width: availableSize.width, height: itemLayout.contentSize.height)
|
||||||
|
|
||||||
|
self.ignoreScrolling = true
|
||||||
|
if self.scrollView.bounds.size != size {
|
||||||
|
transition.setFrame(view: self.scrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: size))
|
||||||
|
}
|
||||||
|
let contentSize = CGSize(width: itemLayout.contentSize.width, height: size.height)
|
||||||
|
if self.scrollView.contentSize != contentSize {
|
||||||
|
self.scrollView.contentSize = contentSize
|
||||||
|
}
|
||||||
|
self.ignoreScrolling = false
|
||||||
|
|
||||||
|
self.updateScrolling(transition: transition)
|
||||||
|
|
||||||
|
return size
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeView() -> View {
|
||||||
|
return View()
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||||
|
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||||
|
}
|
||||||
|
}
|
@ -5,6 +5,7 @@ import ComponentFlow
|
|||||||
import MultilineTextComponent
|
import MultilineTextComponent
|
||||||
import TelegramPresentationData
|
import TelegramPresentationData
|
||||||
import LottieComponent
|
import LottieComponent
|
||||||
|
import VoiceChatActionButton
|
||||||
|
|
||||||
final class VideoChatMicButtonComponent: Component {
|
final class VideoChatMicButtonComponent: Component {
|
||||||
enum Content {
|
enum Content {
|
||||||
@ -15,13 +16,16 @@ final class VideoChatMicButtonComponent: Component {
|
|||||||
|
|
||||||
let content: Content
|
let content: Content
|
||||||
let isCollapsed: Bool
|
let isCollapsed: Bool
|
||||||
|
let updateUnmutedStateIsPushToTalk: (Bool?) -> Void
|
||||||
|
|
||||||
init(
|
init(
|
||||||
content: Content,
|
content: Content,
|
||||||
isCollapsed: Bool
|
isCollapsed: Bool,
|
||||||
|
updateUnmutedStateIsPushToTalk: @escaping (Bool?) -> Void
|
||||||
) {
|
) {
|
||||||
self.content = content
|
self.content = content
|
||||||
self.isCollapsed = isCollapsed
|
self.isCollapsed = isCollapsed
|
||||||
|
self.updateUnmutedStateIsPushToTalk = updateUnmutedStateIsPushToTalk
|
||||||
}
|
}
|
||||||
|
|
||||||
static func ==(lhs: VideoChatMicButtonComponent, rhs: VideoChatMicButtonComponent) -> Bool {
|
static func ==(lhs: VideoChatMicButtonComponent, rhs: VideoChatMicButtonComponent) -> Bool {
|
||||||
@ -36,16 +40,67 @@ final class VideoChatMicButtonComponent: Component {
|
|||||||
|
|
||||||
final class View: HighlightTrackingButton {
|
final class View: HighlightTrackingButton {
|
||||||
private let background = ComponentView<Empty>()
|
private let background = ComponentView<Empty>()
|
||||||
private let icon = ComponentView<Empty>()
|
|
||||||
private let title = ComponentView<Empty>()
|
private let title = ComponentView<Empty>()
|
||||||
|
private let icon: VoiceChatActionButtonIconNode
|
||||||
|
|
||||||
private var component: VideoChatMicButtonComponent?
|
private var component: VideoChatMicButtonComponent?
|
||||||
private var isUpdating: Bool = false
|
private var isUpdating: Bool = false
|
||||||
|
|
||||||
|
private var beginTrackingTimestamp: Double = 0.0
|
||||||
|
private var beginTrackingWasPushToTalk: Bool = false
|
||||||
|
|
||||||
override init(frame: CGRect) {
|
override init(frame: CGRect) {
|
||||||
|
self.icon = VoiceChatActionButtonIconNode(isColored: false)
|
||||||
|
|
||||||
super.init(frame: frame)
|
super.init(frame: frame)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override func beginTracking(_ touch: UITouch, with event: UIEvent?) -> Bool {
|
||||||
|
self.beginTrackingTimestamp = CFAbsoluteTimeGetCurrent()
|
||||||
|
if let component = self.component {
|
||||||
|
switch component.content {
|
||||||
|
case .connecting:
|
||||||
|
self.beginTrackingWasPushToTalk = false
|
||||||
|
case .muted:
|
||||||
|
self.beginTrackingWasPushToTalk = true
|
||||||
|
component.updateUnmutedStateIsPushToTalk(true)
|
||||||
|
case .unmuted:
|
||||||
|
self.beginTrackingWasPushToTalk = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return super.beginTracking(touch, with: event)
|
||||||
|
}
|
||||||
|
|
||||||
|
override func endTracking(_ touch: UITouch?, with event: UIEvent?) {
|
||||||
|
if let component = self.component {
|
||||||
|
let timestamp = CFAbsoluteTimeGetCurrent()
|
||||||
|
|
||||||
|
switch component.content {
|
||||||
|
case .connecting:
|
||||||
|
break
|
||||||
|
case .muted:
|
||||||
|
component.updateUnmutedStateIsPushToTalk(false)
|
||||||
|
case .unmuted:
|
||||||
|
if self.beginTrackingWasPushToTalk {
|
||||||
|
if timestamp < self.beginTrackingTimestamp + 0.15 {
|
||||||
|
component.updateUnmutedStateIsPushToTalk(false)
|
||||||
|
} else {
|
||||||
|
component.updateUnmutedStateIsPushToTalk(nil)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
component.updateUnmutedStateIsPushToTalk(nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return super.endTracking(touch, with: event)
|
||||||
|
}
|
||||||
|
|
||||||
|
override func cancelTracking(with event: UIEvent?) {
|
||||||
|
return super.cancelTracking(with: event)
|
||||||
|
}
|
||||||
|
|
||||||
required init?(coder: NSCoder) {
|
required init?(coder: NSCoder) {
|
||||||
fatalError("init(coder:) has not been implemented")
|
fatalError("init(coder:) has not been implemented")
|
||||||
}
|
}
|
||||||
@ -97,6 +152,7 @@ final class VideoChatMicButtonComponent: Component {
|
|||||||
)
|
)
|
||||||
if let backgroundView = self.background.view {
|
if let backgroundView = self.background.view {
|
||||||
if backgroundView.superview == nil {
|
if backgroundView.superview == nil {
|
||||||
|
backgroundView.isUserInteractionEnabled = false
|
||||||
self.addSubview(backgroundView)
|
self.addSubview(backgroundView)
|
||||||
}
|
}
|
||||||
transition.setFrame(view: backgroundView, frame: CGRect(origin: CGPoint(), size: size))
|
transition.setFrame(view: backgroundView, frame: CGRect(origin: CGPoint(), size: size))
|
||||||
@ -105,6 +161,7 @@ final class VideoChatMicButtonComponent: Component {
|
|||||||
let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: size.height + 16.0), size: titleSize)
|
let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: size.height + 16.0), size: titleSize)
|
||||||
if let titleView = self.title.view {
|
if let titleView = self.title.view {
|
||||||
if titleView.superview == nil {
|
if titleView.superview == nil {
|
||||||
|
titleView.isUserInteractionEnabled = false
|
||||||
self.addSubview(titleView)
|
self.addSubview(titleView)
|
||||||
}
|
}
|
||||||
transition.setPosition(view: titleView, position: titleFrame.center)
|
transition.setPosition(view: titleView, position: titleFrame.center)
|
||||||
@ -112,25 +169,24 @@ final class VideoChatMicButtonComponent: Component {
|
|||||||
alphaTransition.setAlpha(view: titleView, alpha: component.isCollapsed ? 0.0 : 1.0)
|
alphaTransition.setAlpha(view: titleView, alpha: component.isCollapsed ? 0.0 : 1.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
let iconSize = self.icon.update(
|
if self.icon.view.superview == nil {
|
||||||
transition: .immediate,
|
self.icon.view.isUserInteractionEnabled = false
|
||||||
component: AnyComponent(LottieComponent(
|
self.addSubview(self.icon.view)
|
||||||
content: LottieComponent.AppBundleContent(
|
}
|
||||||
name: "VoiceUnmute"
|
let iconSize = CGSize(width: 100.0, height: 100.0)
|
||||||
),
|
|
||||||
color: .white
|
|
||||||
)),
|
|
||||||
environment: {},
|
|
||||||
containerSize: CGSize(width: 100.0, height: 100.0)
|
|
||||||
)
|
|
||||||
let iconFrame = CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) * 0.5), y: floor((size.height - iconSize.height) * 0.5)), size: iconSize)
|
let iconFrame = CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) * 0.5), y: floor((size.height - iconSize.height) * 0.5)), size: iconSize)
|
||||||
if let iconView = self.icon.view {
|
|
||||||
if iconView.superview == nil {
|
transition.setPosition(view: self.icon.view, position: iconFrame.center)
|
||||||
self.addSubview(iconView)
|
transition.setBounds(view: self.icon.view, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||||
}
|
transition.setScale(view: self.icon.view, scale: component.isCollapsed ? ((iconSize.width - 24.0) / iconSize.width) : 1.0)
|
||||||
transition.setPosition(view: iconView, position: iconFrame.center)
|
|
||||||
transition.setBounds(view: iconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
switch component.content {
|
||||||
transition.setScale(view: iconView, scale: component.isCollapsed ? ((iconSize.width - 24.0) / iconSize.width) : 1.0)
|
case .connecting:
|
||||||
|
self.icon.enqueueState(.mute)
|
||||||
|
case .muted:
|
||||||
|
self.icon.enqueueState(.mute)
|
||||||
|
case .unmuted:
|
||||||
|
self.icon.enqueueState(.unmute)
|
||||||
}
|
}
|
||||||
|
|
||||||
return size
|
return size
|
||||||
|
@ -0,0 +1,78 @@
|
|||||||
|
import Foundation
|
||||||
|
import UIKit
|
||||||
|
import Display
|
||||||
|
import ComponentFlow
|
||||||
|
import MultilineTextComponent
|
||||||
|
import TelegramPresentationData
|
||||||
|
import AppBundle
|
||||||
|
import LottieComponent
|
||||||
|
|
||||||
|
final class VideoChatMuteIconComponent: Component {
|
||||||
|
let color: UIColor
|
||||||
|
let isMuted: Bool
|
||||||
|
|
||||||
|
init(
|
||||||
|
color: UIColor,
|
||||||
|
isMuted: Bool
|
||||||
|
) {
|
||||||
|
self.color = color
|
||||||
|
self.isMuted = isMuted
|
||||||
|
}
|
||||||
|
|
||||||
|
static func ==(lhs: VideoChatMuteIconComponent, rhs: VideoChatMuteIconComponent) -> Bool {
|
||||||
|
if lhs.color != rhs.color {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.isMuted != rhs.isMuted {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
final class View: HighlightTrackingButton {
|
||||||
|
private let icon: VoiceChatMicrophoneNode
|
||||||
|
|
||||||
|
private var component: VideoChatMuteIconComponent?
|
||||||
|
private var isUpdating: Bool = false
|
||||||
|
|
||||||
|
private var contentImage: UIImage?
|
||||||
|
|
||||||
|
override init(frame: CGRect) {
|
||||||
|
self.icon = VoiceChatMicrophoneNode()
|
||||||
|
|
||||||
|
super.init(frame: frame)
|
||||||
|
}
|
||||||
|
|
||||||
|
required init?(coder: NSCoder) {
|
||||||
|
fatalError("init(coder:) has not been implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(component: VideoChatMuteIconComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||||
|
self.isUpdating = true
|
||||||
|
defer {
|
||||||
|
self.isUpdating = false
|
||||||
|
}
|
||||||
|
|
||||||
|
self.component = component
|
||||||
|
|
||||||
|
let animationSize = availableSize
|
||||||
|
|
||||||
|
let animationFrame = animationSize.centered(in: CGRect(origin: CGPoint(), size: availableSize))
|
||||||
|
if self.icon.view.superview == nil {
|
||||||
|
self.addSubview(self.icon.view)
|
||||||
|
}
|
||||||
|
transition.setFrame(view: self.icon.view, frame: animationFrame)
|
||||||
|
self.icon.update(state: VoiceChatMicrophoneNode.State(muted: component.isMuted, filled: true, color: component.color), animated: !transition.animation.isImmediate)
|
||||||
|
|
||||||
|
return availableSize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeView() -> View {
|
||||||
|
return View()
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
|
||||||
|
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||||
|
}
|
||||||
|
}
|
@ -10,33 +10,47 @@ import CallScreen
|
|||||||
import TelegramCore
|
import TelegramCore
|
||||||
import AccountContext
|
import AccountContext
|
||||||
import SwiftSignalKit
|
import SwiftSignalKit
|
||||||
|
import DirectMediaImageCache
|
||||||
|
import FastBlur
|
||||||
|
|
||||||
|
private func blurredAvatarImage(_ dataImage: UIImage) -> UIImage? {
|
||||||
|
let imageContextSize = CGSize(width: 64.0, height: 64.0)
|
||||||
|
if let imageContext = DrawingContext(size: imageContextSize, scale: 1.0, clear: true) {
|
||||||
|
imageContext.withFlippedContext { c in
|
||||||
|
if let cgImage = dataImage.cgImage {
|
||||||
|
c.draw(cgImage, in: CGRect(origin: CGPoint(), size: imageContextSize))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
telegramFastBlurMore(Int32(imageContext.size.width * imageContext.scale), Int32(imageContext.size.height * imageContext.scale), Int32(imageContext.bytesPerRow), imageContext.bytes)
|
||||||
|
|
||||||
|
return imageContext.generateImage()
|
||||||
|
} else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
final class VideoChatParticipantVideoComponent: Component {
|
final class VideoChatParticipantVideoComponent: Component {
|
||||||
struct ExpandedState: Equatable {
|
|
||||||
var isPinned: Bool
|
|
||||||
|
|
||||||
init(isPinned: Bool) {
|
|
||||||
self.isPinned = isPinned
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let call: PresentationGroupCall
|
let call: PresentationGroupCall
|
||||||
let participant: GroupCallParticipantsContext.Participant
|
let participant: GroupCallParticipantsContext.Participant
|
||||||
let isPresentation: Bool
|
let isPresentation: Bool
|
||||||
let expandedState: ExpandedState?
|
let isExpanded: Bool
|
||||||
|
let bottomInset: CGFloat
|
||||||
let action: (() -> Void)?
|
let action: (() -> Void)?
|
||||||
|
|
||||||
init(
|
init(
|
||||||
call: PresentationGroupCall,
|
call: PresentationGroupCall,
|
||||||
participant: GroupCallParticipantsContext.Participant,
|
participant: GroupCallParticipantsContext.Participant,
|
||||||
isPresentation: Bool,
|
isPresentation: Bool,
|
||||||
expandedState: ExpandedState?,
|
isExpanded: Bool,
|
||||||
|
bottomInset: CGFloat,
|
||||||
action: (() -> Void)?
|
action: (() -> Void)?
|
||||||
) {
|
) {
|
||||||
self.call = call
|
self.call = call
|
||||||
self.participant = participant
|
self.participant = participant
|
||||||
self.isPresentation = isPresentation
|
self.isPresentation = isPresentation
|
||||||
self.expandedState = expandedState
|
self.isExpanded = isExpanded
|
||||||
|
self.bottomInset = bottomInset
|
||||||
self.action = action
|
self.action = action
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,7 +61,10 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
if lhs.isPresentation != rhs.isPresentation {
|
if lhs.isPresentation != rhs.isPresentation {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if lhs.expandedState != rhs.expandedState {
|
if lhs.isExpanded != rhs.isExpanded {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.bottomInset != rhs.bottomInset {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if (lhs.action == nil) != (rhs.action == nil) {
|
if (lhs.action == nil) != (rhs.action == nil) {
|
||||||
@ -71,8 +88,12 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
private weak var componentState: EmptyComponentState?
|
private weak var componentState: EmptyComponentState?
|
||||||
private var isUpdating: Bool = false
|
private var isUpdating: Bool = false
|
||||||
|
|
||||||
|
private let muteStatus = ComponentView<Empty>()
|
||||||
private let title = ComponentView<Empty>()
|
private let title = ComponentView<Empty>()
|
||||||
|
|
||||||
|
private var blurredAvatarDisposable: Disposable?
|
||||||
|
private var blurredAvatarView: UIImageView?
|
||||||
|
|
||||||
private var videoSource: AdaptedCallVideoSource?
|
private var videoSource: AdaptedCallVideoSource?
|
||||||
private var videoDisposable: Disposable?
|
private var videoDisposable: Disposable?
|
||||||
private var videoBackgroundLayer: SimpleLayer?
|
private var videoBackgroundLayer: SimpleLayer?
|
||||||
@ -95,6 +116,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
|
|
||||||
deinit {
|
deinit {
|
||||||
self.videoDisposable?.dispose()
|
self.videoDisposable?.dispose()
|
||||||
|
self.blurredAvatarDisposable?.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc private func pressed() {
|
@objc private func pressed() {
|
||||||
@ -115,17 +137,95 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
|
|
||||||
let nameColor = component.participant.peer.nameColor ?? .blue
|
let nameColor = component.participant.peer.nameColor ?? .blue
|
||||||
let nameColors = component.call.accountContext.peerNameColors.get(nameColor, dark: true)
|
let nameColors = component.call.accountContext.peerNameColors.get(nameColor, dark: true)
|
||||||
self.backgroundColor = nameColors.main
|
self.backgroundColor = nameColors.main.withMultiplied(hue: 1.0, saturation: 1.0, brightness: 0.4)
|
||||||
|
|
||||||
|
if let smallProfileImage = component.participant.peer.smallProfileImage {
|
||||||
|
let blurredAvatarView: UIImageView
|
||||||
|
if let current = self.blurredAvatarView {
|
||||||
|
blurredAvatarView = current
|
||||||
|
|
||||||
|
transition.setFrame(view: blurredAvatarView, frame: CGRect(origin: CGPoint(), size: availableSize))
|
||||||
|
} else {
|
||||||
|
blurredAvatarView = UIImageView()
|
||||||
|
blurredAvatarView.contentMode = .scaleAspectFill
|
||||||
|
self.blurredAvatarView = blurredAvatarView
|
||||||
|
self.insertSubview(blurredAvatarView, at: 0)
|
||||||
|
|
||||||
|
blurredAvatarView.frame = CGRect(origin: CGPoint(), size: availableSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.blurredAvatarDisposable == nil {
|
||||||
|
//TODO:release synchronous
|
||||||
|
if let imageCache = component.call.accountContext.imageCache as? DirectMediaImageCache, let peerReference = PeerReference(component.participant.peer) {
|
||||||
|
if let result = imageCache.getAvatarImage(peer: peerReference, resource: MediaResourceReference.avatar(peer: peerReference, resource: smallProfileImage.resource), immediateThumbnail: component.participant.peer.profileImageRepresentations.first?.immediateThumbnailData, size: 64, synchronous: false) {
|
||||||
|
if let image = result.image {
|
||||||
|
blurredAvatarView.image = blurredAvatarImage(image)
|
||||||
|
}
|
||||||
|
if let loadSignal = result.loadSignal {
|
||||||
|
self.blurredAvatarDisposable = (loadSignal
|
||||||
|
|> deliverOnMainQueue).startStrict(next: { [weak self] image in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if let image {
|
||||||
|
self.blurredAvatarView?.image = blurredAvatarImage(image)
|
||||||
|
} else {
|
||||||
|
self.blurredAvatarView?.image = nil
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if let blurredAvatarView = self.blurredAvatarView {
|
||||||
|
self.blurredAvatarView = nil
|
||||||
|
blurredAvatarView.removeFromSuperview()
|
||||||
|
}
|
||||||
|
if let blurredAvatarDisposable = self.blurredAvatarDisposable {
|
||||||
|
self.blurredAvatarDisposable = nil
|
||||||
|
blurredAvatarDisposable.dispose()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let muteStatusSize = self.muteStatus.update(
|
||||||
|
transition: transition,
|
||||||
|
component: AnyComponent(VideoChatMuteIconComponent(
|
||||||
|
color: .white,
|
||||||
|
isMuted: component.participant.muteState != nil
|
||||||
|
)),
|
||||||
|
environment: {},
|
||||||
|
containerSize: CGSize(width: 36.0, height: 36.0)
|
||||||
|
)
|
||||||
|
let muteStatusFrame: CGRect
|
||||||
|
if component.isExpanded {
|
||||||
|
muteStatusFrame = CGRect(origin: CGPoint(x: 5.0, y: availableSize.height - component.bottomInset + 1.0 - muteStatusSize.height), size: muteStatusSize)
|
||||||
|
} else {
|
||||||
|
muteStatusFrame = CGRect(origin: CGPoint(x: 1.0, y: availableSize.height - component.bottomInset + 3.0 - muteStatusSize.height), size: muteStatusSize)
|
||||||
|
}
|
||||||
|
if let muteStatusView = self.muteStatus.view {
|
||||||
|
if muteStatusView.superview == nil {
|
||||||
|
self.addSubview(muteStatusView)
|
||||||
|
}
|
||||||
|
transition.setPosition(view: muteStatusView, position: muteStatusFrame.center)
|
||||||
|
transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size))
|
||||||
|
transition.setScale(view: muteStatusView, scale: component.isExpanded ? 1.0 : 0.7)
|
||||||
|
}
|
||||||
|
|
||||||
let titleSize = self.title.update(
|
let titleSize = self.title.update(
|
||||||
transition: .immediate,
|
transition: .immediate,
|
||||||
component: AnyComponent(MultilineTextComponent(
|
component: AnyComponent(MultilineTextComponent(
|
||||||
text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.regular(14.0), textColor: .white))
|
text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white))
|
||||||
)),
|
)),
|
||||||
environment: {},
|
environment: {},
|
||||||
containerSize: CGSize(width: availableSize.width - 8.0 * 2.0, height: 100.0)
|
containerSize: CGSize(width: availableSize.width - 8.0 * 2.0, height: 100.0)
|
||||||
)
|
)
|
||||||
let titleFrame = CGRect(origin: CGPoint(x: 8.0, y: availableSize.height - 8.0 - titleSize.height), size: titleSize)
|
let titleFrame: CGRect
|
||||||
|
if component.isExpanded {
|
||||||
|
titleFrame = CGRect(origin: CGPoint(x: 36.0, y: availableSize.height - component.bottomInset - 8.0 - titleSize.height), size: titleSize)
|
||||||
|
} else {
|
||||||
|
titleFrame = CGRect(origin: CGPoint(x: 29.0, y: availableSize.height - component.bottomInset - 4.0 - titleSize.height), size: titleSize)
|
||||||
|
}
|
||||||
if let titleView = self.title.view {
|
if let titleView = self.title.view {
|
||||||
if titleView.superview == nil {
|
if titleView.superview == nil {
|
||||||
titleView.layer.anchorPoint = CGPoint()
|
titleView.layer.anchorPoint = CGPoint()
|
||||||
@ -133,6 +233,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
}
|
}
|
||||||
transition.setPosition(view: titleView, position: titleFrame.origin)
|
transition.setPosition(view: titleView, position: titleFrame.origin)
|
||||||
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
|
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
|
||||||
|
transition.setScale(view: titleView, scale: component.isExpanded ? 1.0 : 0.825)
|
||||||
}
|
}
|
||||||
|
|
||||||
if let videoDescription = component.isPresentation ? component.participant.presentationDescription : component.participant.videoDescription {
|
if let videoDescription = component.isPresentation ? component.participant.presentationDescription : component.participant.videoDescription {
|
||||||
|
@ -237,6 +237,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let containerSize: CGSize
|
let containerSize: CGSize
|
||||||
|
let collapsedContainerInsets: UIEdgeInsets
|
||||||
let sideInset: CGFloat
|
let sideInset: CGFloat
|
||||||
let grid: Grid
|
let grid: Grid
|
||||||
let expandedGrid: ExpandedGrid
|
let expandedGrid: ExpandedGrid
|
||||||
@ -247,6 +248,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
|
|
||||||
init(containerSize: CGSize, sideInset: CGFloat, collapsedContainerInsets: UIEdgeInsets, expandedContainerInsets: UIEdgeInsets, gridItemCount: Int, listItemCount: Int, listItemHeight: CGFloat, listTrailingItemHeight: CGFloat) {
|
init(containerSize: CGSize, sideInset: CGFloat, collapsedContainerInsets: UIEdgeInsets, expandedContainerInsets: UIEdgeInsets, gridItemCount: Int, listItemCount: Int, listItemHeight: CGFloat, listTrailingItemHeight: CGFloat) {
|
||||||
self.containerSize = containerSize
|
self.containerSize = containerSize
|
||||||
|
self.collapsedContainerInsets = collapsedContainerInsets
|
||||||
self.sideInset = sideInset
|
self.sideInset = sideInset
|
||||||
|
|
||||||
self.grid = Grid(containerSize: CGSize(width: containerSize.width - sideInset * 2.0, height: containerSize.height), sideInset: 0.0, itemCount: gridItemCount)
|
self.grid = Grid(containerSize: CGSize(width: containerSize.width - sideInset * 2.0, height: containerSize.height), sideInset: 0.0, itemCount: gridItemCount)
|
||||||
@ -265,12 +267,13 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func contentHeight() -> CGFloat {
|
func contentHeight() -> CGFloat {
|
||||||
var result: CGFloat = 0.0
|
var result: CGFloat = self.gridOffsetY
|
||||||
if self.grid.itemCount != 0 {
|
if self.grid.itemCount != 0 {
|
||||||
result += self.grid.contentHeight()
|
result += self.grid.contentHeight()
|
||||||
result += self.spacing
|
result += self.spacing
|
||||||
}
|
}
|
||||||
result += self.list.contentHeight()
|
result += self.list.contentHeight()
|
||||||
|
result += self.collapsedContainerInsets.bottom
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -336,6 +339,14 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
self.key = key
|
self.key = key
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private final class ListItem {
|
||||||
|
let view = ComponentView<Empty>()
|
||||||
|
let separatorLayer = SimpleLayer()
|
||||||
|
|
||||||
|
init() {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
final class View: UIView, UIScrollViewDelegate {
|
final class View: UIView, UIScrollViewDelegate {
|
||||||
private let scollViewClippingContainer: UIView
|
private let scollViewClippingContainer: UIView
|
||||||
@ -356,11 +367,13 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
private let gridItemViewContainer: UIView
|
private let gridItemViewContainer: UIView
|
||||||
|
|
||||||
private let expandedGridItemContainer: UIView
|
private let expandedGridItemContainer: UIView
|
||||||
private var expandedGridItemView: GridItem?
|
private var expandedControlsView: ComponentView<Empty>?
|
||||||
|
private var expandedThumbnailsView: ComponentView<Empty>?
|
||||||
|
|
||||||
private var listItemViews: [EnginePeer.Id: ComponentView<Empty>] = [:]
|
private var listItemViews: [EnginePeer.Id: ListItem] = [:]
|
||||||
private let listItemViewContainer: UIView
|
private let listItemViewContainer: UIView
|
||||||
private let listItemsBackround = ComponentView<Empty>()
|
private let listItemViewSeparatorContainer: SimpleLayer
|
||||||
|
private let listItemsBackground = ComponentView<Empty>()
|
||||||
|
|
||||||
private var itemLayout: ItemLayout?
|
private var itemLayout: ItemLayout?
|
||||||
|
|
||||||
@ -377,6 +390,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
|
|
||||||
self.listItemViewContainer = UIView()
|
self.listItemViewContainer = UIView()
|
||||||
self.listItemViewContainer.clipsToBounds = true
|
self.listItemViewContainer.clipsToBounds = true
|
||||||
|
self.listItemViewSeparatorContainer = SimpleLayer()
|
||||||
|
|
||||||
self.expandedGridItemContainer = UIView()
|
self.expandedGridItemContainer = UIView()
|
||||||
self.expandedGridItemContainer.clipsToBounds = true
|
self.expandedGridItemContainer.clipsToBounds = true
|
||||||
@ -445,6 +459,15 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
let gridIsEmpty = self.gridParticipants.isEmpty
|
let gridIsEmpty = self.gridParticipants.isEmpty
|
||||||
self.appliedGridIsEmpty = gridIsEmpty
|
self.appliedGridIsEmpty = gridIsEmpty
|
||||||
|
|
||||||
|
var previousExpandedItemId: VideoParticipantKey?
|
||||||
|
for (key, item) in self.gridItemViews {
|
||||||
|
if item.view.view?.superview == self.expandedGridItemContainer {
|
||||||
|
previousExpandedItemId = key
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let previousExpandedGridItemContainerFrame = self.expandedGridItemContainer.frame
|
||||||
var expandedGridItemContainerFrame: CGRect
|
var expandedGridItemContainerFrame: CGRect
|
||||||
if component.expandedVideoState != nil {
|
if component.expandedVideoState != nil {
|
||||||
expandedGridItemContainerFrame = itemLayout.expandedGrid.itemContainerFrame()
|
expandedGridItemContainerFrame = itemLayout.expandedGrid.itemContainerFrame()
|
||||||
@ -498,13 +521,29 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
self.gridItemViews[videoParticipantKey] = itemView
|
self.gridItemViews[videoParticipantKey] = itemView
|
||||||
}
|
}
|
||||||
|
|
||||||
var expandedItemState: VideoChatParticipantVideoComponent.ExpandedState?
|
var isItemExpanded = false
|
||||||
if let expandedVideoState = component.expandedVideoState, expandedVideoState.mainParticipant == videoParticipantKey {
|
if let expandedVideoState = component.expandedVideoState, expandedVideoState.mainParticipant == videoParticipantKey {
|
||||||
expandedItemState = VideoChatParticipantVideoComponent.ExpandedState(isPinned: expandedVideoState.isMainParticipantPinned)
|
isItemExpanded = true
|
||||||
|
}
|
||||||
|
|
||||||
|
var suppressItemExpansionCollapseAnimation = false
|
||||||
|
if isItemExpanded {
|
||||||
|
if let previousExpandedItemId, previousExpandedItemId != videoParticipantKey {
|
||||||
|
suppressItemExpansionCollapseAnimation = true
|
||||||
|
}
|
||||||
|
} else if component.expandedVideoState != nil {
|
||||||
|
if let previousExpandedItemId, previousExpandedItemId == videoParticipantKey {
|
||||||
|
suppressItemExpansionCollapseAnimation = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var resultingItemTransition = commonGridItemTransition
|
||||||
|
if suppressItemExpansionCollapseAnimation {
|
||||||
|
itemTransition = itemTransition.withAnimation(.none)
|
||||||
|
resultingItemTransition = commonGridItemTransition.withAnimation(.none)
|
||||||
}
|
}
|
||||||
|
|
||||||
let itemFrame: CGRect
|
let itemFrame: CGRect
|
||||||
if expandedItemState != nil {
|
if isItemExpanded {
|
||||||
itemFrame = CGRect(origin: CGPoint(), size: itemLayout.expandedGrid.itemContainerFrame().size)
|
itemFrame = CGRect(origin: CGPoint(), size: itemLayout.expandedGrid.itemContainerFrame().size)
|
||||||
} else {
|
} else {
|
||||||
itemFrame = itemLayout.gridItemFrame(at: index)
|
itemFrame = itemLayout.gridItemFrame(at: index)
|
||||||
@ -516,7 +555,8 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
call: component.call,
|
call: component.call,
|
||||||
participant: videoParticipant.participant,
|
participant: videoParticipant.participant,
|
||||||
isPresentation: videoParticipant.isPresentation,
|
isPresentation: videoParticipant.isPresentation,
|
||||||
expandedState: expandedItemState,
|
isExpanded: isItemExpanded,
|
||||||
|
bottomInset: isItemExpanded ? 96.0 : 0.0,
|
||||||
action: { [weak self] in
|
action: { [weak self] in
|
||||||
guard let self, let component = self.component else {
|
guard let self, let component = self.component else {
|
||||||
return
|
return
|
||||||
@ -533,26 +573,36 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
)
|
)
|
||||||
if let itemComponentView = itemView.view.view {
|
if let itemComponentView = itemView.view.view {
|
||||||
if itemComponentView.superview == nil {
|
if itemComponentView.superview == nil {
|
||||||
if expandedItemState != nil {
|
if isItemExpanded {
|
||||||
self.expandedGridItemContainer.addSubview(itemComponentView)
|
if let expandedThumbnailsView = self.expandedThumbnailsView?.view {
|
||||||
|
self.expandedGridItemContainer.insertSubview(itemComponentView, belowSubview: expandedThumbnailsView)
|
||||||
|
} else {
|
||||||
|
self.expandedGridItemContainer.addSubview(itemComponentView)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
self.gridItemViewContainer.addSubview(itemComponentView)
|
self.gridItemViewContainer.addSubview(itemComponentView)
|
||||||
}
|
}
|
||||||
|
|
||||||
itemComponentView.frame = itemFrame
|
itemComponentView.frame = itemFrame
|
||||||
|
|
||||||
if !commonGridItemTransition.animation.isImmediate {
|
if !resultingItemTransition.animation.isImmediate {
|
||||||
commonGridItemTransition.animateScale(view: itemComponentView, from: 0.001, to: 1.0)
|
resultingItemTransition.animateScale(view: itemComponentView, from: 0.001, to: 1.0)
|
||||||
}
|
}
|
||||||
if !transition.animation.isImmediate {
|
if !resultingItemTransition.animation.isImmediate {
|
||||||
itemComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.1)
|
itemComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.1)
|
||||||
}
|
}
|
||||||
} else if expandedItemState != nil && itemComponentView.superview != self.expandedGridItemContainer {
|
} else if isItemExpanded && itemComponentView.superview != self.expandedGridItemContainer {
|
||||||
let fromFrame = itemComponentView.convert(itemComponentView.bounds, to: self.expandedGridItemContainer)
|
let fromFrame = itemComponentView.convert(itemComponentView.bounds, to: self.expandedGridItemContainer)
|
||||||
itemComponentView.center = fromFrame.center
|
itemComponentView.center = fromFrame.center
|
||||||
self.expandedGridItemContainer.addSubview(itemComponentView)
|
if let expandedThumbnailsView = self.expandedThumbnailsView?.view {
|
||||||
} else if expandedItemState == nil && itemComponentView.superview != self.gridItemViewContainer {
|
self.expandedGridItemContainer.insertSubview(itemComponentView, belowSubview: expandedThumbnailsView)
|
||||||
if !itemView.isCollapsing {
|
} else {
|
||||||
|
self.expandedGridItemContainer.addSubview(itemComponentView)
|
||||||
|
}
|
||||||
|
} else if !isItemExpanded && itemComponentView.superview != self.gridItemViewContainer {
|
||||||
|
if suppressItemExpansionCollapseAnimation {
|
||||||
|
self.gridItemViewContainer.addSubview(itemComponentView)
|
||||||
|
} else if !itemView.isCollapsing {
|
||||||
itemView.isCollapsing = true
|
itemView.isCollapsing = true
|
||||||
let targetLocalItemFrame = itemLayout.gridItemFrame(at: index)
|
let targetLocalItemFrame = itemLayout.gridItemFrame(at: index)
|
||||||
var targetItemFrame = self.gridItemViewContainer.convert(targetLocalItemFrame, to: self)
|
var targetItemFrame = self.gridItemViewContainer.convert(targetLocalItemFrame, to: self)
|
||||||
@ -571,8 +621,8 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !itemView.isCollapsing {
|
if !itemView.isCollapsing {
|
||||||
commonGridItemTransition.setPosition(view: itemComponentView, position: itemFrame.center)
|
resultingItemTransition.setPosition(view: itemComponentView, position: itemFrame.center)
|
||||||
commonGridItemTransition.setBounds(view: itemComponentView, bounds: CGRect(origin: CGPoint(), size: itemFrame.size))
|
resultingItemTransition.setBounds(view: itemComponentView, bounds: CGRect(origin: CGPoint(), size: itemFrame.size))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -608,12 +658,12 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
validListItemIds.append(participant.peer.id)
|
validListItemIds.append(participant.peer.id)
|
||||||
|
|
||||||
var itemTransition = transition
|
var itemTransition = transition
|
||||||
let itemView: ComponentView<Empty>
|
let itemView: ListItem
|
||||||
if let current = self.listItemViews[participant.peer.id] {
|
if let current = self.listItemViews[participant.peer.id] {
|
||||||
itemView = current
|
itemView = current
|
||||||
} else {
|
} else {
|
||||||
itemTransition = itemTransition.withAnimation(.none)
|
itemTransition = itemTransition.withAnimation(.none)
|
||||||
itemView = ComponentView()
|
itemView = ListItem()
|
||||||
self.listItemViews[participant.peer.id] = itemView
|
self.listItemViews[participant.peer.id] = itemView
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -623,10 +673,10 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
if participant.peer.id == component.call.accountContext.account.peerId {
|
if participant.peer.id == component.call.accountContext.account.peerId {
|
||||||
subtitle = PeerListItemComponent.Subtitle(text: "this is you", color: .accent)
|
subtitle = PeerListItemComponent.Subtitle(text: "this is you", color: .accent)
|
||||||
} else {
|
} else {
|
||||||
subtitle = PeerListItemComponent.Subtitle(text: "listening", color: .neutral)
|
subtitle = PeerListItemComponent.Subtitle(text: participant.about ?? "listening", color: .neutral)
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = itemView.update(
|
let _ = itemView.view.update(
|
||||||
transition: itemTransition,
|
transition: itemTransition,
|
||||||
component: AnyComponent(PeerListItemComponent(
|
component: AnyComponent(PeerListItemComponent(
|
||||||
context: component.call.accountContext,
|
context: component.call.accountContext,
|
||||||
@ -640,7 +690,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
subtitleAccessory: .none,
|
subtitleAccessory: .none,
|
||||||
presence: nil,
|
presence: nil,
|
||||||
selectionState: .none,
|
selectionState: .none,
|
||||||
hasNext: true,
|
hasNext: false,
|
||||||
action: { [weak self] peer, _, _ in
|
action: { [weak self] peer, _, _ in
|
||||||
guard let self else {
|
guard let self else {
|
||||||
return
|
return
|
||||||
@ -652,18 +702,27 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
environment: {},
|
environment: {},
|
||||||
containerSize: itemFrame.size
|
containerSize: itemFrame.size
|
||||||
)
|
)
|
||||||
if let itemComponentView = itemView.view {
|
let itemSeparatorFrame = CGRect(origin: CGPoint(x: itemFrame.minX + 63.0, y: itemFrame.maxY - UIScreenPixel), size: CGSize(width: itemFrame.width - 63.0, height: UIScreenPixel))
|
||||||
|
if let itemComponentView = itemView.view.view {
|
||||||
if itemComponentView.superview == nil {
|
if itemComponentView.superview == nil {
|
||||||
itemComponentView.clipsToBounds = true
|
itemComponentView.clipsToBounds = true
|
||||||
|
|
||||||
|
itemView.separatorLayer.backgroundColor = component.theme.list.itemBlocksSeparatorColor.blitOver(UIColor(white: 0.1, alpha: 1.0), alpha: 1.0).cgColor
|
||||||
|
|
||||||
self.listItemViewContainer.addSubview(itemComponentView)
|
self.listItemViewContainer.addSubview(itemComponentView)
|
||||||
|
self.listItemViewSeparatorContainer.addSublayer(itemView.separatorLayer)
|
||||||
|
|
||||||
if !transition.animation.isImmediate {
|
if !transition.animation.isImmediate {
|
||||||
itemComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
itemComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||||
itemComponentView.frame = CGRect(origin: itemFrame.origin, size: CGSize(width: itemFrame.width, height: 0.0))
|
itemComponentView.frame = CGRect(origin: itemFrame.origin, size: CGSize(width: itemFrame.width, height: 0.0))
|
||||||
|
|
||||||
|
var startingItemSeparatorFrame = itemSeparatorFrame
|
||||||
|
startingItemSeparatorFrame.origin.y = itemFrame.minY - UIScreenPixel
|
||||||
|
itemView.separatorLayer.frame = startingItemSeparatorFrame
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
transition.setFrame(view: itemComponentView, frame: itemFrame)
|
transition.setFrame(view: itemComponentView, frame: itemFrame)
|
||||||
|
transition.setFrame(layer: itemView.separatorLayer, frame: itemSeparatorFrame)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -673,13 +732,24 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
if !validListItemIds.contains(itemId) {
|
if !validListItemIds.contains(itemId) {
|
||||||
removedListItemIds.append(itemId)
|
removedListItemIds.append(itemId)
|
||||||
|
|
||||||
if let itemComponentView = itemView.view {
|
if let itemComponentView = itemView.view.view {
|
||||||
|
let itemSeparatorLayer = itemView.separatorLayer
|
||||||
|
|
||||||
if !transition.animation.isImmediate {
|
if !transition.animation.isImmediate {
|
||||||
|
var itemFrame = itemComponentView.frame
|
||||||
|
itemFrame.size.height = 0.0
|
||||||
|
transition.setFrame(view: itemComponentView, frame: itemFrame)
|
||||||
|
var itemSeparatorFrame = itemSeparatorLayer.frame
|
||||||
|
itemSeparatorFrame.origin.y = itemFrame.minY - UIScreenPixel
|
||||||
|
transition.setFrame(layer: itemSeparatorLayer, frame: itemSeparatorFrame, completion: { [weak itemSeparatorLayer] _ in
|
||||||
|
itemSeparatorLayer?.removeFromSuperlayer()
|
||||||
|
})
|
||||||
itemComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak itemComponentView] _ in
|
itemComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak itemComponentView] _ in
|
||||||
itemComponentView?.removeFromSuperview()
|
itemComponentView?.removeFromSuperview()
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
itemComponentView.removeFromSuperview()
|
itemComponentView.removeFromSuperview()
|
||||||
|
itemSeparatorLayer.removeFromSuperlayer()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -707,8 +777,161 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
transition.setPosition(view: self.gridItemViewContainer, position: CGPoint(x: itemLayout.gridItemContainerFrame().midX, y: itemLayout.gridItemContainerFrame().minY))
|
transition.setPosition(view: self.gridItemViewContainer, position: CGPoint(x: itemLayout.gridItemContainerFrame().midX, y: itemLayout.gridItemContainerFrame().minY))
|
||||||
transition.setBounds(view: self.gridItemViewContainer, bounds: CGRect(origin: CGPoint(), size: itemLayout.gridItemContainerFrame().size))
|
transition.setBounds(view: self.gridItemViewContainer, bounds: CGRect(origin: CGPoint(), size: itemLayout.gridItemContainerFrame().size))
|
||||||
transition.setFrame(view: self.listItemViewContainer, frame: itemLayout.listItemContainerFrame())
|
transition.setFrame(view: self.listItemViewContainer, frame: itemLayout.listItemContainerFrame())
|
||||||
|
transition.setFrame(layer: self.listItemViewSeparatorContainer, frame: CGRect(origin: CGPoint(), size: itemLayout.listItemContainerFrame().size))
|
||||||
|
|
||||||
transition.setFrame(view: self.expandedGridItemContainer, frame: expandedGridItemContainerFrame)
|
transition.setFrame(view: self.expandedGridItemContainer, frame: expandedGridItemContainerFrame)
|
||||||
|
|
||||||
|
if let expandedVideoState = component.expandedVideoState {
|
||||||
|
var thumbnailParticipants: [VideoChatExpandedParticipantThumbnailsComponent.Participant] = []
|
||||||
|
for participant in self.gridParticipants {
|
||||||
|
thumbnailParticipants.append(VideoChatExpandedParticipantThumbnailsComponent.Participant(
|
||||||
|
participant: participant.participant,
|
||||||
|
isPresentation: participant.isPresentation
|
||||||
|
))
|
||||||
|
}
|
||||||
|
/*for participant in self.listParticipants {
|
||||||
|
thumbnailParticipants.append(VideoChatExpandedParticipantThumbnailsComponent.Participant(
|
||||||
|
participant: participant,
|
||||||
|
isPresentation: false
|
||||||
|
))
|
||||||
|
}*/
|
||||||
|
|
||||||
|
var expandedThumbnailsTransition = transition
|
||||||
|
let expandedThumbnailsView: ComponentView<Empty>
|
||||||
|
if let current = self.expandedThumbnailsView {
|
||||||
|
expandedThumbnailsView = current
|
||||||
|
} else {
|
||||||
|
expandedThumbnailsTransition = expandedThumbnailsTransition.withAnimation(.none)
|
||||||
|
expandedThumbnailsView = ComponentView()
|
||||||
|
self.expandedThumbnailsView = expandedThumbnailsView
|
||||||
|
}
|
||||||
|
let expandedThumbnailsSize = expandedThumbnailsView.update(
|
||||||
|
transition: expandedThumbnailsTransition,
|
||||||
|
component: AnyComponent(VideoChatExpandedParticipantThumbnailsComponent(
|
||||||
|
call: component.call,
|
||||||
|
theme: component.theme,
|
||||||
|
participants: thumbnailParticipants,
|
||||||
|
selectedParticipant: component.expandedVideoState.flatMap { expandedVideoState in
|
||||||
|
return VideoChatExpandedParticipantThumbnailsComponent.Participant.Key(id: expandedVideoState.mainParticipant.id, isPresentation: expandedVideoState.mainParticipant.isPresentation)
|
||||||
|
},
|
||||||
|
updateSelectedParticipant: { [weak self] key in
|
||||||
|
guard let self, let component = self.component else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
component.updateMainParticipant(VideoParticipantKey(id: key.id, isPresentation: key.isPresentation))
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
environment: {},
|
||||||
|
containerSize: itemLayout.expandedGrid.itemContainerFrame().size
|
||||||
|
)
|
||||||
|
let expandedThumbnailsFrame = CGRect(origin: CGPoint(x: 0.0, y: expandedGridItemContainerFrame.height - expandedThumbnailsSize.height), size: expandedThumbnailsSize)
|
||||||
|
if let expandedThumbnailsComponentView = expandedThumbnailsView.view {
|
||||||
|
if expandedThumbnailsComponentView.superview == nil {
|
||||||
|
self.expandedGridItemContainer.addSubview(expandedThumbnailsComponentView)
|
||||||
|
|
||||||
|
let fromReferenceFrame: CGRect
|
||||||
|
if let index = self.gridParticipants.firstIndex(where: { $0.participant.peer.id == expandedVideoState.mainParticipant.id && $0.isPresentation == expandedVideoState.mainParticipant.isPresentation }) {
|
||||||
|
fromReferenceFrame = self.gridItemViewContainer.convert(itemLayout.gridItemFrame(at: index), to: self.expandedGridItemContainer)
|
||||||
|
} else {
|
||||||
|
fromReferenceFrame = previousExpandedGridItemContainerFrame
|
||||||
|
}
|
||||||
|
|
||||||
|
expandedThumbnailsComponentView.frame = CGRect(origin: CGPoint(x: fromReferenceFrame.minX - previousExpandedGridItemContainerFrame.minX, y: fromReferenceFrame.height - expandedThumbnailsSize.height), size: expandedThumbnailsFrame.size)
|
||||||
|
|
||||||
|
if !transition.animation.isImmediate {
|
||||||
|
expandedThumbnailsComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
transition.setFrame(view: expandedThumbnailsComponentView, frame: expandedThumbnailsFrame)
|
||||||
|
}
|
||||||
|
|
||||||
|
var expandedControlsTransition = transition
|
||||||
|
let expandedControlsView: ComponentView<Empty>
|
||||||
|
if let current = self.expandedControlsView {
|
||||||
|
expandedControlsView = current
|
||||||
|
} else {
|
||||||
|
expandedControlsTransition = expandedControlsTransition.withAnimation(.none)
|
||||||
|
expandedControlsView = ComponentView()
|
||||||
|
self.expandedControlsView = expandedControlsView
|
||||||
|
}
|
||||||
|
let expandedControlsSize = expandedControlsView.update(
|
||||||
|
transition: expandedControlsTransition,
|
||||||
|
component: AnyComponent(VideoChatExpandedControlsComponent(
|
||||||
|
theme: component.theme,
|
||||||
|
strings: component.strings,
|
||||||
|
backAction: { [weak self] in
|
||||||
|
guard let self, let component = self.component else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
component.updateMainParticipant(nil)
|
||||||
|
}
|
||||||
|
)),
|
||||||
|
environment: {},
|
||||||
|
containerSize: itemLayout.expandedGrid.itemContainerFrame().size
|
||||||
|
)
|
||||||
|
let expandedControlsFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: expandedControlsSize)
|
||||||
|
if let expandedControlsComponentView = expandedControlsView.view {
|
||||||
|
if expandedControlsComponentView.superview == nil {
|
||||||
|
self.expandedGridItemContainer.addSubview(expandedControlsComponentView)
|
||||||
|
|
||||||
|
let fromReferenceFrame: CGRect
|
||||||
|
if let index = self.gridParticipants.firstIndex(where: { $0.participant.peer.id == expandedVideoState.mainParticipant.id && $0.isPresentation == expandedVideoState.mainParticipant.isPresentation }) {
|
||||||
|
fromReferenceFrame = self.gridItemViewContainer.convert(itemLayout.gridItemFrame(at: index), to: self.expandedGridItemContainer)
|
||||||
|
} else {
|
||||||
|
fromReferenceFrame = previousExpandedGridItemContainerFrame
|
||||||
|
}
|
||||||
|
|
||||||
|
expandedControlsComponentView.frame = CGRect(origin: CGPoint(x: fromReferenceFrame.minX - previousExpandedGridItemContainerFrame.minX, y: fromReferenceFrame.minY - previousExpandedGridItemContainerFrame.minY), size: expandedControlsFrame.size)
|
||||||
|
|
||||||
|
if !transition.animation.isImmediate {
|
||||||
|
expandedControlsComponentView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
transition.setFrame(view: expandedControlsComponentView, frame: expandedControlsFrame)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if let expandedThumbnailsView = self.expandedThumbnailsView {
|
||||||
|
self.expandedThumbnailsView = nil
|
||||||
|
|
||||||
|
if transition.containedViewLayoutTransition.isAnimated, let expandedThumbnailsComponentView = expandedThumbnailsView.view {
|
||||||
|
if let collapsingItemView = self.gridItemViews.values.first(where: { $0.isCollapsing }), let index = self.gridParticipants.firstIndex(where: { $0.participant.peer.id == collapsingItemView.key.id && $0.isPresentation == collapsingItemView.key.isPresentation }) {
|
||||||
|
let targetLocalItemFrame = itemLayout.gridItemFrame(at: index)
|
||||||
|
var targetItemFrame = self.gridItemViewContainer.convert(targetLocalItemFrame, to: self)
|
||||||
|
targetItemFrame.origin.y -= expandedGridItemContainerFrame.minY
|
||||||
|
targetItemFrame.origin.x -= expandedGridItemContainerFrame.minX
|
||||||
|
|
||||||
|
let targetThumbnailsFrame = CGRect(origin: CGPoint(x: targetItemFrame.minX, y: targetItemFrame.maxY - expandedThumbnailsComponentView.bounds.height), size: expandedThumbnailsComponentView.bounds.size)
|
||||||
|
transition.setFrame(view: expandedThumbnailsComponentView, frame: targetThumbnailsFrame)
|
||||||
|
}
|
||||||
|
expandedThumbnailsComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.12, removeOnCompletion: false, completion: { [weak expandedThumbnailsComponentView] _ in
|
||||||
|
expandedThumbnailsComponentView?.removeFromSuperview()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
expandedThumbnailsView.view?.removeFromSuperview()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let expandedControlsView = self.expandedControlsView {
|
||||||
|
self.expandedControlsView = nil
|
||||||
|
|
||||||
|
if transition.containedViewLayoutTransition.isAnimated, let expandedControlsComponentView = expandedControlsView.view {
|
||||||
|
if let collapsingItemView = self.gridItemViews.values.first(where: { $0.isCollapsing }), let index = self.gridParticipants.firstIndex(where: { $0.participant.peer.id == collapsingItemView.key.id && $0.isPresentation == collapsingItemView.key.isPresentation }) {
|
||||||
|
let targetLocalItemFrame = itemLayout.gridItemFrame(at: index)
|
||||||
|
var targetItemFrame = self.gridItemViewContainer.convert(targetLocalItemFrame, to: self)
|
||||||
|
targetItemFrame.origin.y -= expandedGridItemContainerFrame.minY
|
||||||
|
targetItemFrame.origin.x -= expandedGridItemContainerFrame.minX
|
||||||
|
|
||||||
|
let targetThumbnailsFrame = CGRect(origin: CGPoint(x: targetItemFrame.minX, y: targetItemFrame.minY), size: expandedControlsComponentView.bounds.size)
|
||||||
|
transition.setFrame(view: expandedControlsComponentView, frame: targetThumbnailsFrame)
|
||||||
|
}
|
||||||
|
expandedControlsComponentView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.12, removeOnCompletion: false, completion: { [weak expandedControlsComponentView] _ in
|
||||||
|
expandedControlsComponentView?.removeFromSuperview()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
expandedControlsView.view?.removeFromSuperview()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(component: VideoChatParticipantsComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
func update(component: VideoChatParticipantsComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||||
@ -798,21 +1021,22 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
)
|
)
|
||||||
self.itemLayout = itemLayout
|
self.itemLayout = itemLayout
|
||||||
|
|
||||||
let listItemsBackroundSize = self.listItemsBackround.update(
|
let listItemsBackgroundSize = self.listItemsBackground.update(
|
||||||
transition: transition,
|
transition: transition,
|
||||||
component: AnyComponent(RoundedRectangle(
|
component: AnyComponent(RoundedRectangle(
|
||||||
color: UIColor(white: 1.0, alpha: 0.1),
|
color: UIColor(white: 0.1, alpha: 1.0),
|
||||||
cornerRadius: 10.0
|
cornerRadius: 10.0
|
||||||
)),
|
)),
|
||||||
environment: {},
|
environment: {},
|
||||||
containerSize: CGSize(width: availableSize.width - itemLayout.sideInset * 2.0, height: itemLayout.list.contentHeight())
|
containerSize: CGSize(width: availableSize.width - itemLayout.sideInset * 2.0, height: itemLayout.list.contentHeight())
|
||||||
)
|
)
|
||||||
let listItemsBackroundFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: listItemsBackroundSize)
|
let listItemsBackgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: listItemsBackgroundSize)
|
||||||
if let listItemsBackroundView = self.listItemsBackround.view {
|
if let listItemsBackgroundView = self.listItemsBackground.view {
|
||||||
if listItemsBackroundView.superview == nil {
|
if listItemsBackgroundView.superview == nil {
|
||||||
self.listItemViewContainer.addSubview(listItemsBackroundView)
|
self.listItemViewContainer.addSubview(listItemsBackgroundView)
|
||||||
|
self.listItemViewContainer.layer.addSublayer(self.listItemViewSeparatorContainer)
|
||||||
}
|
}
|
||||||
transition.setFrame(view: listItemsBackroundView, frame: listItemsBackroundFrame)
|
transition.setFrame(view: listItemsBackgroundView, frame: listItemsBackgroundFrame)
|
||||||
}
|
}
|
||||||
|
|
||||||
var requestedVideo: [PresentationGroupCallRequestedVideo] = []
|
var requestedVideo: [PresentationGroupCallRequestedVideo] = []
|
||||||
|
@ -70,6 +70,8 @@ private final class VideoChatScreenComponent: Component {
|
|||||||
private var callState: PresentationGroupCallState?
|
private var callState: PresentationGroupCallState?
|
||||||
private var stateDisposable: Disposable?
|
private var stateDisposable: Disposable?
|
||||||
|
|
||||||
|
private var isPushToTalkActive: Bool = false
|
||||||
|
|
||||||
private var members: PresentationGroupCallMembers?
|
private var members: PresentationGroupCallMembers?
|
||||||
private var membersDisposable: Disposable?
|
private var membersDisposable: Disposable?
|
||||||
|
|
||||||
@ -137,6 +139,9 @@ private final class VideoChatScreenComponent: Component {
|
|||||||
if abs(panGestureState.offsetFraction) > 0.6 || abs(velocity.y) >= 100.0 {
|
if abs(panGestureState.offsetFraction) > 0.6 || abs(velocity.y) >= 100.0 {
|
||||||
self.panGestureState = PanGestureState(offsetFraction: panGestureState.offsetFraction < 0.0 ? -1.0 : 1.0)
|
self.panGestureState = PanGestureState(offsetFraction: panGestureState.offsetFraction < 0.0 ? -1.0 : 1.0)
|
||||||
self.notifyDismissedInteractivelyOnPanGestureApply = true
|
self.notifyDismissedInteractivelyOnPanGestureApply = true
|
||||||
|
if let controller = self.environment?.controller() as? VideoChatScreenV2Impl {
|
||||||
|
controller.notifyDismissed()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.state?.updated(transition: .spring(duration: 0.4))
|
self.state?.updated(transition: .spring(duration: 0.4))
|
||||||
@ -277,6 +282,27 @@ private final class VideoChatScreenComponent: Component {
|
|||||||
if self.members != members {
|
if self.members != members {
|
||||||
self.members = members
|
self.members = members
|
||||||
|
|
||||||
|
if let expandedParticipantsVideoState = self.expandedParticipantsVideoState {
|
||||||
|
if let _ = members?.participants.first(where: { participant in
|
||||||
|
if participant.peer.id == expandedParticipantsVideoState.mainParticipant.id {
|
||||||
|
if expandedParticipantsVideoState.mainParticipant.isPresentation {
|
||||||
|
if participant.presentationDescription == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if participant.videoDescription == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}) {
|
||||||
|
} else {
|
||||||
|
self.expandedParticipantsVideoState = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if !self.isUpdating {
|
if !self.isUpdating {
|
||||||
self.state?.updated(transition: .spring(duration: 0.4))
|
self.state?.updated(transition: .spring(duration: 0.4))
|
||||||
}
|
}
|
||||||
@ -337,7 +363,13 @@ private final class VideoChatScreenComponent: Component {
|
|||||||
self.notifyDismissedInteractivelyOnPanGestureApply = false
|
self.notifyDismissedInteractivelyOnPanGestureApply = false
|
||||||
|
|
||||||
if let controller = self.environment?.controller() as? VideoChatScreenV2Impl {
|
if let controller = self.environment?.controller() as? VideoChatScreenV2Impl {
|
||||||
controller.superDismiss()
|
if self.isUpdating {
|
||||||
|
DispatchQueue.main.async { [weak controller] in
|
||||||
|
controller?.superDismiss()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
controller.superDismiss()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let completionOnPanGestureApply = self.completionOnPanGestureApply {
|
if let completionOnPanGestureApply = self.completionOnPanGestureApply {
|
||||||
@ -420,11 +452,17 @@ private final class VideoChatScreenComponent: Component {
|
|||||||
transition.setFrame(view: navigationRightButtonView, frame: navigationRightButtonFrame)
|
transition.setFrame(view: navigationRightButtonView, frame: navigationRightButtonFrame)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let idleTitleStatusText: String
|
||||||
|
if let callState = self.callState, callState.networkState == .connected, let members = self.members {
|
||||||
|
idleTitleStatusText = environment.strings.VoiceChat_Panel_Members(Int32(max(1, members.totalCount)))
|
||||||
|
} else {
|
||||||
|
idleTitleStatusText = "connecting..."
|
||||||
|
}
|
||||||
let titleSize = self.title.update(
|
let titleSize = self.title.update(
|
||||||
transition: transition,
|
transition: transition,
|
||||||
component: AnyComponent(VideoChatTitleComponent(
|
component: AnyComponent(VideoChatTitleComponent(
|
||||||
title: self.peer?.debugDisplayTitle ?? " ",
|
title: self.peer?.debugDisplayTitle ?? " ",
|
||||||
status: .idle(count: self.members?.totalCount ?? 1),
|
status: idleTitleStatusText,
|
||||||
strings: environment.strings
|
strings: environment.strings
|
||||||
)),
|
)),
|
||||||
environment: {},
|
environment: {},
|
||||||
@ -518,8 +556,13 @@ private final class VideoChatScreenComponent: Component {
|
|||||||
actionButtonMicrophoneState = .connecting
|
actionButtonMicrophoneState = .connecting
|
||||||
case .connected:
|
case .connected:
|
||||||
if let _ = callState.muteState {
|
if let _ = callState.muteState {
|
||||||
micButtonContent = .muted
|
if self.isPushToTalkActive {
|
||||||
actionButtonMicrophoneState = .muted
|
micButtonContent = .unmuted
|
||||||
|
actionButtonMicrophoneState = .unmuted
|
||||||
|
} else {
|
||||||
|
micButtonContent = .muted
|
||||||
|
actionButtonMicrophoneState = .muted
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
micButtonContent = .unmuted
|
micButtonContent = .unmuted
|
||||||
actionButtonMicrophoneState = .unmuted
|
actionButtonMicrophoneState = .unmuted
|
||||||
@ -532,29 +575,43 @@ private final class VideoChatScreenComponent: Component {
|
|||||||
|
|
||||||
let _ = self.microphoneButton.update(
|
let _ = self.microphoneButton.update(
|
||||||
transition: transition,
|
transition: transition,
|
||||||
component: AnyComponent(PlainButtonComponent(
|
component: AnyComponent(VideoChatMicButtonComponent(
|
||||||
content: AnyComponent(VideoChatMicButtonComponent(
|
content: micButtonContent,
|
||||||
content: micButtonContent,
|
isCollapsed: self.expandedParticipantsVideoState != nil,
|
||||||
isCollapsed: self.expandedParticipantsVideoState != nil
|
updateUnmutedStateIsPushToTalk: { [weak self] unmutedStateIsPushToTalk in
|
||||||
)),
|
|
||||||
effectAlignment: .center,
|
|
||||||
action: { [weak self] in
|
|
||||||
guard let self, let component = self.component else {
|
guard let self, let component = self.component else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
guard let callState = self.callState else {
|
guard let callState = self.callState else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if let muteState = callState.muteState {
|
|
||||||
if muteState.canUnmute {
|
if let unmutedStateIsPushToTalk {
|
||||||
component.call.setIsMuted(action: .unmuted)
|
if unmutedStateIsPushToTalk {
|
||||||
|
if let muteState = callState.muteState {
|
||||||
|
if muteState.canUnmute {
|
||||||
|
self.isPushToTalkActive = true
|
||||||
|
component.call.setIsMuted(action: .muted(isPushToTalkActive: true))
|
||||||
|
} else {
|
||||||
|
self.isPushToTalkActive = false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.isPushToTalkActive = true
|
||||||
|
component.call.setIsMuted(action: .muted(isPushToTalkActive: true))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if let muteState = callState.muteState {
|
||||||
|
if muteState.canUnmute {
|
||||||
|
component.call.setIsMuted(action: .unmuted)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.isPushToTalkActive = false
|
||||||
}
|
}
|
||||||
|
self.state?.updated(transition: .spring(duration: 0.5))
|
||||||
} else {
|
} else {
|
||||||
component.call.setIsMuted(action: .muted(isPushToTalkActive: false))
|
component.call.setIsMuted(action: .muted(isPushToTalkActive: false))
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
animateAlpha: false,
|
|
||||||
animateScale: false
|
|
||||||
)),
|
)),
|
||||||
environment: {},
|
environment: {},
|
||||||
containerSize: CGSize(width: microphoneButtonDiameter, height: microphoneButtonDiameter)
|
containerSize: CGSize(width: microphoneButtonDiameter, height: microphoneButtonDiameter)
|
||||||
@ -737,11 +794,16 @@ final class VideoChatScreenV2Impl: ViewControllerComponentContainer, VoiceChatCo
|
|||||||
self.idleTimerExtensionDisposable = nil
|
self.idleTimerExtensionDisposable = nil
|
||||||
|
|
||||||
self.didAppearOnce = false
|
self.didAppearOnce = false
|
||||||
|
self.notifyDismissed()
|
||||||
|
}
|
||||||
|
|
||||||
|
func notifyDismissed() {
|
||||||
if !self.isDismissed {
|
if !self.isDismissed {
|
||||||
self.isDismissed = true
|
self.isDismissed = true
|
||||||
|
DispatchQueue.main.async {
|
||||||
|
self.onViewDidDisappear?()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.onViewDidDisappear?()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public func dismiss(closing: Bool, manual: Bool) {
|
public func dismiss(closing: Bool, manual: Bool) {
|
||||||
@ -750,6 +812,8 @@ final class VideoChatScreenV2Impl: ViewControllerComponentContainer, VoiceChatCo
|
|||||||
|
|
||||||
override public func dismiss(completion: (() -> Void)? = nil) {
|
override public func dismiss(completion: (() -> Void)? = nil) {
|
||||||
if !self.isAnimatingDismiss {
|
if !self.isAnimatingDismiss {
|
||||||
|
self.notifyDismissed()
|
||||||
|
|
||||||
if let componentView = self.node.hostView.componentView as? VideoChatScreenComponent.View {
|
if let componentView = self.node.hostView.componentView as? VideoChatScreenComponent.View {
|
||||||
self.isAnimatingDismiss = true
|
self.isAnimatingDismiss = true
|
||||||
componentView.animateOut(completion: { [weak self] in
|
componentView.animateOut(completion: { [weak self] in
|
||||||
|
@ -6,32 +6,13 @@ import MultilineTextComponent
|
|||||||
import TelegramPresentationData
|
import TelegramPresentationData
|
||||||
|
|
||||||
final class VideoChatTitleComponent: Component {
|
final class VideoChatTitleComponent: Component {
|
||||||
enum Status: Equatable {
|
|
||||||
enum Key {
|
|
||||||
case idle
|
|
||||||
case speaking
|
|
||||||
}
|
|
||||||
|
|
||||||
case idle(count: Int)
|
|
||||||
case speaking(titles: [String])
|
|
||||||
|
|
||||||
var key: Key {
|
|
||||||
switch self {
|
|
||||||
case .idle:
|
|
||||||
return .idle
|
|
||||||
case .speaking:
|
|
||||||
return .speaking
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let title: String
|
let title: String
|
||||||
let status: Status
|
let status: String
|
||||||
let strings: PresentationStrings
|
let strings: PresentationStrings
|
||||||
|
|
||||||
init(
|
init(
|
||||||
title: String,
|
title: String,
|
||||||
status: Status,
|
status: String,
|
||||||
strings: PresentationStrings
|
strings: PresentationStrings
|
||||||
) {
|
) {
|
||||||
self.title = title
|
self.title = title
|
||||||
@ -73,7 +54,6 @@ final class VideoChatTitleComponent: Component {
|
|||||||
self.isUpdating = false
|
self.isUpdating = false
|
||||||
}
|
}
|
||||||
|
|
||||||
let previousComponent = self.component
|
|
||||||
self.component = component
|
self.component = component
|
||||||
|
|
||||||
let spacing: CGFloat = 1.0
|
let spacing: CGFloat = 1.0
|
||||||
@ -87,18 +67,6 @@ final class VideoChatTitleComponent: Component {
|
|||||||
containerSize: CGSize(width: availableSize.width, height: 100.0)
|
containerSize: CGSize(width: availableSize.width, height: 100.0)
|
||||||
)
|
)
|
||||||
|
|
||||||
if previousComponent?.status.key != component.status.key {
|
|
||||||
if let status = self.status {
|
|
||||||
self.status = nil
|
|
||||||
if let statusView = status.view {
|
|
||||||
transition.setAlpha(view: statusView, alpha: 0.0, completion: { [weak statusView] _ in
|
|
||||||
statusView?.removeFromSuperview()
|
|
||||||
})
|
|
||||||
transition.setPosition(view: statusView, position: statusView.center.offsetBy(dx: 0.0, dy: -10.0))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let status: ComponentView<Empty>
|
let status: ComponentView<Empty>
|
||||||
if let current = self.status {
|
if let current = self.status {
|
||||||
status = current
|
status = current
|
||||||
@ -107,16 +75,9 @@ final class VideoChatTitleComponent: Component {
|
|||||||
self.status = status
|
self.status = status
|
||||||
}
|
}
|
||||||
let statusComponent: AnyComponent<Empty>
|
let statusComponent: AnyComponent<Empty>
|
||||||
switch component.status {
|
statusComponent = AnyComponent(MultilineTextComponent(
|
||||||
case let .idle(count):
|
text: .plain(NSAttributedString(string: component.status, font: Font.regular(13.0), textColor: UIColor(white: 1.0, alpha: 0.5)))
|
||||||
statusComponent = AnyComponent(MultilineTextComponent(
|
))
|
||||||
text: .plain(NSAttributedString(string: component.strings.VoiceChat_Panel_Members(Int32(count)), font: Font.regular(13.0), textColor: UIColor(white: 1.0, alpha: 0.5)))
|
|
||||||
))
|
|
||||||
case let .speaking(titles):
|
|
||||||
statusComponent = AnyComponent(MultilineTextComponent(
|
|
||||||
text: .plain(NSAttributedString(string: titles.joined(separator: ", "), font: Font.regular(13.0), textColor: UIColor(rgb: 0x34c759)))
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
let statusSize = status.update(
|
let statusSize = status.update(
|
||||||
transition: .immediate,
|
transition: .immediate,
|
||||||
|
@ -11,7 +11,7 @@ swift_library(
|
|||||||
],
|
],
|
||||||
deps = [
|
deps = [
|
||||||
"//submodules/ComponentFlow",
|
"//submodules/ComponentFlow",
|
||||||
"//submodules/Components/BundleIconComponent",
|
"//submodules/Components/MultilineTextComponent",
|
||||||
"//submodules/Display",
|
"//submodules/Display",
|
||||||
],
|
],
|
||||||
visibility = [
|
visibility = [
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import Foundation
|
import Foundation
|
||||||
import UIKit
|
import UIKit
|
||||||
import ComponentFlow
|
import ComponentFlow
|
||||||
import BundleIconComponent
|
|
||||||
import Display
|
import Display
|
||||||
|
import MultilineTextComponent
|
||||||
|
|
||||||
public final class BackButtonComponent: Component {
|
public final class BackButtonComponent: Component {
|
||||||
public let title: String
|
public let title: String
|
||||||
@ -30,22 +30,31 @@ public final class BackButtonComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public final class View: HighlightTrackingButton {
|
public final class View: HighlightTrackingButton {
|
||||||
private let arrow = ComponentView<Empty>()
|
private let arrowView: UIImageView
|
||||||
private let title = ComponentView<Empty>()
|
private let title = ComponentView<Empty>()
|
||||||
|
|
||||||
private var component: BackButtonComponent?
|
private var component: BackButtonComponent?
|
||||||
|
|
||||||
public override init(frame: CGRect) {
|
public override init(frame: CGRect) {
|
||||||
|
self.arrowView = UIImageView()
|
||||||
|
|
||||||
super.init(frame: frame)
|
super.init(frame: frame)
|
||||||
|
|
||||||
|
self.addSubview(self.arrowView)
|
||||||
|
|
||||||
self.highligthedChanged = { [weak self] highlighted in
|
self.highligthedChanged = { [weak self] highlighted in
|
||||||
if let self {
|
if let self {
|
||||||
|
let transition: ComponentTransition = highlighted ? .immediate : .easeInOut(duration: 0.2)
|
||||||
if highlighted {
|
if highlighted {
|
||||||
self.layer.removeAnimation(forKey: "opacity")
|
transition.setAlpha(view: self.arrowView, alpha: 0.65)
|
||||||
self.alpha = 0.65
|
if let titleView = self.title.view {
|
||||||
|
transition.setAlpha(view: titleView, alpha: 0.65)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
self.alpha = 1.0
|
transition.setAlpha(view: self.arrowView, alpha: 1.0)
|
||||||
self.layer.animateAlpha(from: 0.65, to: 1.0, duration: 0.2)
|
if let titleView = self.title.view {
|
||||||
|
transition.setAlpha(view: titleView, alpha: 1.0)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -64,22 +73,44 @@ public final class BackButtonComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||||
return super.hitTest(point, with: event)
|
if self.isHidden || self.alpha.isZero || self.isUserInteractionEnabled == false {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.bounds.insetBy(dx: -8.0, dy: -8.0).contains(point) {
|
||||||
|
return self
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(component: BackButtonComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
func update(component: BackButtonComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
|
||||||
let sideInset: CGFloat = 4.0
|
self.component = component
|
||||||
|
|
||||||
|
if self.arrowView.image == nil {
|
||||||
|
self.arrowView.image = NavigationBar.backArrowImage(color: .white)?.withRenderingMode(.alwaysTemplate)
|
||||||
|
}
|
||||||
|
self.arrowView.tintColor = component.color
|
||||||
|
|
||||||
let titleSize = self.title.update(
|
let titleSize = self.title.update(
|
||||||
transition: .immediate,
|
transition: .immediate,
|
||||||
component: AnyComponent(Text(text: component.title, font: Font.regular(17.0), color: component.color)),
|
component: AnyComponent(MultilineTextComponent(
|
||||||
|
text: .plain(NSAttributedString(string: component.title, font: Font.regular(17.0), textColor: component.color))
|
||||||
|
)),
|
||||||
environment: {},
|
environment: {},
|
||||||
containerSize: CGSize(width: availableSize.width - 4.0, height: availableSize.height)
|
containerSize: CGSize(width: availableSize.width - 4.0, height: availableSize.height)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
let arrowInset: CGFloat = 15.0
|
||||||
|
|
||||||
let size = CGSize(width: sideInset * 2.0 + titleSize.width, height: availableSize.height)
|
let size = CGSize(width: arrowInset + titleSize.width, height: titleSize.height)
|
||||||
|
|
||||||
|
if let arrowImage = self.arrowView.image {
|
||||||
|
let arrowFrame = CGRect(origin: CGPoint(x: -4.0, y: floor((size.height - arrowImage.size.height) * 0.5)), size: arrowImage.size)
|
||||||
|
transition.setFrame(view: self.arrowView, frame: arrowFrame)
|
||||||
|
}
|
||||||
|
|
||||||
let titleFrame = titleSize.centered(in: CGRect(origin: CGPoint(), size: size))
|
let titleFrame = CGRect(origin: CGPoint(x: arrowInset, y: floor((size.height - titleSize.height) * 0.5)), size: titleSize)
|
||||||
if let titleView = self.title.view {
|
if let titleView = self.title.view {
|
||||||
if titleView.superview == nil {
|
if titleView.superview == nil {
|
||||||
titleView.layer.anchorPoint = CGPoint()
|
titleView.layer.anchorPoint = CGPoint()
|
||||||
|
@ -906,13 +906,14 @@ public final class SharedAccountContextImpl: SharedAccountContext {
|
|||||||
strongSelf.groupCallController = groupCallController
|
strongSelf.groupCallController = groupCallController
|
||||||
navigationController.pushViewController(groupCallController)
|
navigationController.pushViewController(groupCallController)
|
||||||
} else {
|
} else {
|
||||||
|
strongSelf.hasGroupCallOnScreenPromise.set(true)
|
||||||
|
|
||||||
let _ = (makeVoiceChatControllerInitialData(sharedContext: strongSelf, accountContext: call.accountContext, call: call)
|
let _ = (makeVoiceChatControllerInitialData(sharedContext: strongSelf, accountContext: call.accountContext, call: call)
|
||||||
|> deliverOnMainQueue).start(next: { [weak strongSelf, weak navigationController] initialData in
|
|> deliverOnMainQueue).start(next: { [weak strongSelf, weak navigationController] initialData in
|
||||||
guard let strongSelf, let navigationController else {
|
guard let strongSelf, let navigationController else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
strongSelf.hasGroupCallOnScreenPromise.set(true)
|
|
||||||
let groupCallController = makeVoiceChatController(sharedContext: strongSelf, accountContext: call.accountContext, call: call, initialData: initialData)
|
let groupCallController = makeVoiceChatController(sharedContext: strongSelf, accountContext: call.accountContext, call: call, initialData: initialData)
|
||||||
groupCallController.onViewDidAppear = { [weak strongSelf] in
|
groupCallController.onViewDidAppear = { [weak strongSelf] in
|
||||||
if let strongSelf {
|
if let strongSelf {
|
||||||
|
@ -55,6 +55,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
|||||||
public var crashOnMemoryPressure: Bool
|
public var crashOnMemoryPressure: Bool
|
||||||
public var dustEffect: Bool
|
public var dustEffect: Bool
|
||||||
public var callV2: Bool
|
public var callV2: Bool
|
||||||
|
public var experimentalCallMute: Bool
|
||||||
public var allowWebViewInspection: Bool
|
public var allowWebViewInspection: Bool
|
||||||
public var disableReloginTokens: Bool
|
public var disableReloginTokens: Bool
|
||||||
public var liveStreamV2: Bool
|
public var liveStreamV2: Bool
|
||||||
@ -91,6 +92,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
|||||||
crashOnMemoryPressure: false,
|
crashOnMemoryPressure: false,
|
||||||
dustEffect: false,
|
dustEffect: false,
|
||||||
callV2: false,
|
callV2: false,
|
||||||
|
experimentalCallMute: false,
|
||||||
allowWebViewInspection: false,
|
allowWebViewInspection: false,
|
||||||
disableReloginTokens: false,
|
disableReloginTokens: false,
|
||||||
liveStreamV2: false
|
liveStreamV2: false
|
||||||
@ -128,6 +130,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
|||||||
crashOnMemoryPressure: Bool,
|
crashOnMemoryPressure: Bool,
|
||||||
dustEffect: Bool,
|
dustEffect: Bool,
|
||||||
callV2: Bool,
|
callV2: Bool,
|
||||||
|
experimentalCallMute: Bool,
|
||||||
allowWebViewInspection: Bool,
|
allowWebViewInspection: Bool,
|
||||||
disableReloginTokens: Bool,
|
disableReloginTokens: Bool,
|
||||||
liveStreamV2: Bool
|
liveStreamV2: Bool
|
||||||
@ -162,6 +165,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
|||||||
self.crashOnMemoryPressure = crashOnMemoryPressure
|
self.crashOnMemoryPressure = crashOnMemoryPressure
|
||||||
self.dustEffect = dustEffect
|
self.dustEffect = dustEffect
|
||||||
self.callV2 = callV2
|
self.callV2 = callV2
|
||||||
|
self.experimentalCallMute = experimentalCallMute
|
||||||
self.allowWebViewInspection = allowWebViewInspection
|
self.allowWebViewInspection = allowWebViewInspection
|
||||||
self.disableReloginTokens = disableReloginTokens
|
self.disableReloginTokens = disableReloginTokens
|
||||||
self.liveStreamV2 = liveStreamV2
|
self.liveStreamV2 = liveStreamV2
|
||||||
@ -200,6 +204,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
|||||||
self.crashOnMemoryPressure = try container.decodeIfPresent(Bool.self, forKey: "crashOnMemoryPressure") ?? false
|
self.crashOnMemoryPressure = try container.decodeIfPresent(Bool.self, forKey: "crashOnMemoryPressure") ?? false
|
||||||
self.dustEffect = try container.decodeIfPresent(Bool.self, forKey: "dustEffect") ?? false
|
self.dustEffect = try container.decodeIfPresent(Bool.self, forKey: "dustEffect") ?? false
|
||||||
self.callV2 = try container.decodeIfPresent(Bool.self, forKey: "callV2") ?? false
|
self.callV2 = try container.decodeIfPresent(Bool.self, forKey: "callV2") ?? false
|
||||||
|
self.experimentalCallMute = try container.decodeIfPresent(Bool.self, forKey: "experimentalCallMute") ?? false
|
||||||
self.allowWebViewInspection = try container.decodeIfPresent(Bool.self, forKey: "allowWebViewInspection") ?? false
|
self.allowWebViewInspection = try container.decodeIfPresent(Bool.self, forKey: "allowWebViewInspection") ?? false
|
||||||
self.disableReloginTokens = try container.decodeIfPresent(Bool.self, forKey: "disableReloginTokens") ?? false
|
self.disableReloginTokens = try container.decodeIfPresent(Bool.self, forKey: "disableReloginTokens") ?? false
|
||||||
self.liveStreamV2 = try container.decodeIfPresent(Bool.self, forKey: "liveStreamV2") ?? false
|
self.liveStreamV2 = try container.decodeIfPresent(Bool.self, forKey: "liveStreamV2") ?? false
|
||||||
@ -238,6 +243,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
|
|||||||
try container.encode(self.crashOnMemoryPressure, forKey: "crashOnMemoryPressure")
|
try container.encode(self.crashOnMemoryPressure, forKey: "crashOnMemoryPressure")
|
||||||
try container.encode(self.dustEffect, forKey: "dustEffect")
|
try container.encode(self.dustEffect, forKey: "dustEffect")
|
||||||
try container.encode(self.callV2, forKey: "callV2")
|
try container.encode(self.callV2, forKey: "callV2")
|
||||||
|
try container.encode(self.experimentalCallMute, forKey: "experimentalCallMute")
|
||||||
try container.encode(self.allowWebViewInspection, forKey: "allowWebViewInspection")
|
try container.encode(self.allowWebViewInspection, forKey: "allowWebViewInspection")
|
||||||
try container.encode(self.disableReloginTokens, forKey: "disableReloginTokens")
|
try container.encode(self.disableReloginTokens, forKey: "disableReloginTokens")
|
||||||
try container.encode(self.liveStreamV2, forKey: "liveStreamV2")
|
try container.encode(self.liveStreamV2, forKey: "liveStreamV2")
|
||||||
|
@ -463,7 +463,7 @@ public final class OngoingGroupCallContext {
|
|||||||
|
|
||||||
private let audioSessionActiveDisposable = MetaDisposable()
|
private let audioSessionActiveDisposable = MetaDisposable()
|
||||||
|
|
||||||
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
|
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, enableSystemMute: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
|
||||||
#if os(iOS)
|
#if os(iOS)
|
||||||
@ -574,6 +574,7 @@ public final class OngoingGroupCallContext {
|
|||||||
videoContentType: _videoContentType,
|
videoContentType: _videoContentType,
|
||||||
enableNoiseSuppression: enableNoiseSuppression,
|
enableNoiseSuppression: enableNoiseSuppression,
|
||||||
disableAudioInput: disableAudioInput,
|
disableAudioInput: disableAudioInput,
|
||||||
|
enableSystemMute: enableSystemMute,
|
||||||
preferX264: preferX264,
|
preferX264: preferX264,
|
||||||
logPath: logPath,
|
logPath: logPath,
|
||||||
onMutedSpeechActivityDetected: { value in
|
onMutedSpeechActivityDetected: { value in
|
||||||
@ -1112,10 +1113,10 @@ public final class OngoingGroupCallContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
|
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, enableSystemMute: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void) {
|
||||||
let queue = self.queue
|
let queue = self.queue
|
||||||
self.impl = QueueLocalObject(queue: queue, generate: {
|
self.impl = QueueLocalObject(queue: queue, generate: {
|
||||||
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected)
|
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, enableSystemMute: enableSystemMute, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -742,8 +742,8 @@ public final class OngoingCallContext {
|
|||||||
public final class AudioDevice {
|
public final class AudioDevice {
|
||||||
let impl: SharedCallAudioDevice
|
let impl: SharedCallAudioDevice
|
||||||
|
|
||||||
public static func create() -> AudioDevice? {
|
public static func create(enableSystemMute: Bool) -> AudioDevice? {
|
||||||
return AudioDevice(impl: SharedCallAudioDevice(disableRecording: false))
|
return AudioDevice(impl: SharedCallAudioDevice(disableRecording: false, enableSystemMute: enableSystemMute))
|
||||||
}
|
}
|
||||||
|
|
||||||
private init(impl: SharedCallAudioDevice) {
|
private init(impl: SharedCallAudioDevice) {
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
@interface SharedCallAudioDevice : NSObject
|
@interface SharedCallAudioDevice : NSObject
|
||||||
|
|
||||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording;
|
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording enableSystemMute:(bool)enableSystemMute;
|
||||||
|
|
||||||
+ (void)setupAudioSession;
|
+ (void)setupAudioSession;
|
||||||
|
|
||||||
@ -412,6 +412,7 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
|
|||||||
videoContentType:(OngoingGroupCallVideoContentType)videoContentType
|
videoContentType:(OngoingGroupCallVideoContentType)videoContentType
|
||||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||||
disableAudioInput:(bool)disableAudioInput
|
disableAudioInput:(bool)disableAudioInput
|
||||||
|
enableSystemMute:(bool)enableSystemMute
|
||||||
preferX264:(bool)preferX264
|
preferX264:(bool)preferX264
|
||||||
logPath:(NSString * _Nonnull)logPath
|
logPath:(NSString * _Nonnull)logPath
|
||||||
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
|
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
|
||||||
|
@ -79,9 +79,9 @@ public:
|
|||||||
|
|
||||||
class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule {
|
class SharedAudioDeviceModuleImpl: public tgcalls::SharedAudioDeviceModule {
|
||||||
public:
|
public:
|
||||||
SharedAudioDeviceModuleImpl(bool disableAudioInput) {
|
SharedAudioDeviceModuleImpl(bool disableAudioInput, bool enableSystemMute) {
|
||||||
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
RTC_DCHECK(tgcalls::StaticThreads::getThreads()->getWorkerThread()->IsCurrent());
|
||||||
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
_audioDeviceModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, enableSystemMute, disableAudioInput ? 2 : 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual ~SharedAudioDeviceModuleImpl() override {
|
virtual ~SharedAudioDeviceModuleImpl() override {
|
||||||
@ -129,11 +129,11 @@ private:
|
|||||||
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule;
|
std::shared_ptr<tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>> _audioDeviceModule;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording {
|
- (instancetype _Nonnull)initWithDisableRecording:(bool)disableRecording enableSystemMute:(bool)enableSystemMute {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self != nil) {
|
if (self != nil) {
|
||||||
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), [disableRecording]() mutable {
|
_audioDeviceModule.reset(new tgcalls::ThreadLocalObject<tgcalls::SharedAudioDeviceModule>(tgcalls::StaticThreads::getThreads()->getWorkerThread(), [disableRecording, enableSystemMute]() mutable {
|
||||||
return std::static_pointer_cast<tgcalls::SharedAudioDeviceModule>(std::make_shared<SharedAudioDeviceModuleImpl>(disableRecording));
|
return std::static_pointer_cast<tgcalls::SharedAudioDeviceModule>(std::make_shared<SharedAudioDeviceModuleImpl>(disableRecording, enableSystemMute));
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
@ -1278,7 +1278,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
|||||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||||
} else {
|
} else {
|
||||||
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, 1);
|
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, false, false, 1);
|
||||||
[queue dispatch:^{
|
[queue dispatch:^{
|
||||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||||
if (strongSelf) {
|
if (strongSelf) {
|
||||||
@ -1691,6 +1691,7 @@ private:
|
|||||||
videoContentType:(OngoingGroupCallVideoContentType)videoContentType
|
videoContentType:(OngoingGroupCallVideoContentType)videoContentType
|
||||||
enableNoiseSuppression:(bool)enableNoiseSuppression
|
enableNoiseSuppression:(bool)enableNoiseSuppression
|
||||||
disableAudioInput:(bool)disableAudioInput
|
disableAudioInput:(bool)disableAudioInput
|
||||||
|
enableSystemMute:(bool)enableSystemMute
|
||||||
preferX264:(bool)preferX264
|
preferX264:(bool)preferX264
|
||||||
logPath:(NSString * _Nonnull)logPath
|
logPath:(NSString * _Nonnull)logPath
|
||||||
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
|
onMutedSpeechActivityDetected:(void (^ _Nullable)(bool))onMutedSpeechActivityDetected
|
||||||
@ -1886,6 +1887,7 @@ audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
|
|||||||
.outgoingAudioBitrateKbit = outgoingAudioBitrateKbit,
|
.outgoingAudioBitrateKbit = outgoingAudioBitrateKbit,
|
||||||
.disableOutgoingAudioProcessing = disableOutgoingAudioProcessing,
|
.disableOutgoingAudioProcessing = disableOutgoingAudioProcessing,
|
||||||
.disableAudioInput = disableAudioInput,
|
.disableAudioInput = disableAudioInput,
|
||||||
|
.ios_enableSystemMute = enableSystemMute,
|
||||||
.videoContentType = _videoContentType,
|
.videoContentType = _videoContentType,
|
||||||
.videoCodecPreferences = videoCodecPreferences,
|
.videoCodecPreferences = videoCodecPreferences,
|
||||||
.initialEnableNoiseSuppression = enableNoiseSuppression,
|
.initialEnableNoiseSuppression = enableNoiseSuppression,
|
||||||
@ -1922,12 +1924,12 @@ audioDevice:(SharedCallAudioDevice * _Nullable)audioDevice {
|
|||||||
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
|
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
|
||||||
},
|
},
|
||||||
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
|
.minOutgoingVideoBitrateKbit = minOutgoingVideoBitrateKbit,
|
||||||
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput, audioDeviceModule, onMutedSpeechActivityDetected = _onMutedSpeechActivityDetected](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
.createAudioDeviceModule = [weakSelf, queue, disableAudioInput, enableSystemMute, audioDeviceModule, onMutedSpeechActivityDetected = _onMutedSpeechActivityDetected](webrtc::TaskQueueFactory *taskQueueFactory) -> rtc::scoped_refptr<webrtc::AudioDeviceModule> {
|
||||||
if (audioDeviceModule) {
|
if (audioDeviceModule) {
|
||||||
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
return audioDeviceModule->getSyncAssumingSameThread()->audioDeviceModule();
|
||||||
} else {
|
} else {
|
||||||
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
rtc::Thread *audioDeviceModuleThread = rtc::Thread::Current();
|
||||||
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, disableAudioInput ? 2 : 1);
|
auto resultModule = rtc::make_ref_counted<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS>(false, disableAudioInput, enableSystemMute, disableAudioInput ? 2 : 1);
|
||||||
if (resultModule) {
|
if (resultModule) {
|
||||||
resultModule->mutedSpeechDetectionChanged = ^(bool value) {
|
resultModule->mutedSpeechDetectionChanged = ^(bool value) {
|
||||||
if (onMutedSpeechActivityDetected) {
|
if (onMutedSpeechActivityDetected) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user