Conference updates

This commit is contained in:
Isaac 2025-04-08 18:20:10 +04:00
parent 2ab4af656b
commit 654b258d97
5 changed files with 148 additions and 43 deletions

View File

@ -224,6 +224,7 @@ public struct PresentationGroupCallState: Equatable {
public var subscribedToScheduled: Bool public var subscribedToScheduled: Bool
public var isVideoEnabled: Bool public var isVideoEnabled: Bool
public var isVideoWatchersLimitReached: Bool public var isVideoWatchersLimitReached: Bool
public var isMyVideoActive: Bool
public init( public init(
myPeerId: EnginePeer.Id, myPeerId: EnginePeer.Id,
@ -238,7 +239,8 @@ public struct PresentationGroupCallState: Equatable {
scheduleTimestamp: Int32?, scheduleTimestamp: Int32?,
subscribedToScheduled: Bool, subscribedToScheduled: Bool,
isVideoEnabled: Bool, isVideoEnabled: Bool,
isVideoWatchersLimitReached: Bool isVideoWatchersLimitReached: Bool,
isMyVideoActive: Bool
) { ) {
self.myPeerId = myPeerId self.myPeerId = myPeerId
self.networkState = networkState self.networkState = networkState
@ -253,6 +255,7 @@ public struct PresentationGroupCallState: Equatable {
self.subscribedToScheduled = subscribedToScheduled self.subscribedToScheduled = subscribedToScheduled
self.isVideoEnabled = isVideoEnabled self.isVideoEnabled = isVideoEnabled
self.isVideoWatchersLimitReached = isVideoWatchersLimitReached self.isVideoWatchersLimitReached = isVideoWatchersLimitReached
self.isMyVideoActive = isMyVideoActive
} }
} }

View File

@ -1048,14 +1048,10 @@ private func selectivePrivacySettingsControllerEntries(presentationData: Present
entries.append(.settingInfo(presentationData.theme, settingInfoText, settingInfoLink)) entries.append(.settingInfo(presentationData.theme, settingInfoText, settingInfoLink))
} }
if case .phoneNumber = kind, state.setting == .nobody { entries.append(.phoneDiscoveryHeader(presentationData.theme, presentationData.strings.PrivacyPhoneNumberSettings_DiscoveryHeader))
if state.phoneDiscoveryEnabled == false || phoneNumber.hasPrefix("888") { entries.append(.phoneDiscoveryEverybody(presentationData.theme, presentationData.strings.PrivacySettings_LastSeenEverybody, state.phoneDiscoveryEnabled != false))
entries.append(.phoneDiscoveryHeader(presentationData.theme, presentationData.strings.PrivacyPhoneNumberSettings_DiscoveryHeader)) entries.append(.phoneDiscoveryMyContacts(presentationData.theme, presentationData.strings.PrivacySettings_LastSeenContacts, state.phoneDiscoveryEnabled == false))
entries.append(.phoneDiscoveryEverybody(presentationData.theme, presentationData.strings.PrivacySettings_LastSeenEverybody, state.phoneDiscoveryEnabled != false)) entries.append(.phoneDiscoveryInfo(presentationData.theme, state.phoneDiscoveryEnabled != false ? presentationData.strings.PrivacyPhoneNumberSettings_CustomPublicLink("+\(phoneNumber)").string : presentationData.strings.PrivacyPhoneNumberSettings_CustomDisabledHelp, phoneLink))
entries.append(.phoneDiscoveryMyContacts(presentationData.theme, presentationData.strings.PrivacySettings_LastSeenContacts, state.phoneDiscoveryEnabled == false))
entries.append(.phoneDiscoveryInfo(presentationData.theme, state.phoneDiscoveryEnabled != false ? presentationData.strings.PrivacyPhoneNumberSettings_CustomPublicLink("+\(phoneNumber)").string : presentationData.strings.PrivacyPhoneNumberSettings_CustomDisabledHelp, phoneLink))
}
}
if case .voiceMessages = kind, !isPremium { if case .voiceMessages = kind, !isPremium {

View File

@ -34,7 +34,8 @@ private extension PresentationGroupCallState {
scheduleTimestamp: scheduleTimestamp, scheduleTimestamp: scheduleTimestamp,
subscribedToScheduled: subscribedToScheduled, subscribedToScheduled: subscribedToScheduled,
isVideoEnabled: false, isVideoEnabled: false,
isVideoWatchersLimitReached: false isVideoWatchersLimitReached: false,
isMyVideoActive: false
) )
} }
} }
@ -1708,6 +1709,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
var prioritizeVP8 = false var prioritizeVP8 = false
#if DEBUG
prioritizeVP8 = "".isEmpty
#endif
if let data = self.accountContext.currentAppConfiguration.with({ $0 }).data, let value = data["ios_calls_prioritize_vp8"] as? Double { if let data = self.accountContext.currentAppConfiguration.with({ $0 }).data, let value = data["ios_calls_prioritize_vp8"] as? Double {
prioritizeVP8 = value != 0.0 prioritizeVP8 = value != 0.0
} }
@ -3130,6 +3134,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if let videoCapturer = self.videoCapturer { if let videoCapturer = self.videoCapturer {
self.requestVideo(capturer: videoCapturer) self.requestVideo(capturer: videoCapturer)
var stateValue = self.stateValue
stateValue.isMyVideoActive = true
self.stateValue = stateValue
} }
} }
@ -3152,6 +3160,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
})) }))
self.updateLocalVideoState() self.updateLocalVideoState()
var stateValue = self.stateValue
stateValue.isMyVideoActive = true
self.stateValue = stateValue
} }
} }
@ -3165,6 +3177,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.isVideoMuted = true self.isVideoMuted = true
self.updateLocalVideoState() self.updateLocalVideoState()
var stateValue = self.stateValue
stateValue.isMyVideoActive = false
self.stateValue = stateValue
} }
} }

View File

@ -33,11 +33,13 @@ final class VideoChatActionButtonComponent: Component {
case audio(audio: Audio) case audio(audio: Audio)
case video case video
case rotateCamera
case leave case leave
} }
case audio(audio: Audio, isEnabled: Bool) case audio(audio: Audio, isEnabled: Bool)
case video(isActive: Bool) case video(isActive: Bool)
case rotateCamera
case leave case leave
fileprivate var iconType: IconType { fileprivate var iconType: IconType {
@ -55,6 +57,8 @@ final class VideoChatActionButtonComponent: Component {
return .audio(audio: mappedAudio) return .audio(audio: mappedAudio)
case .video: case .video:
return .video return .video
case .rotateCamera:
return .rotateCamera
case .leave: case .leave:
return .leave return .leave
} }
@ -176,6 +180,19 @@ final class VideoChatActionButtonComponent: Component {
backgroundColor = UIColor(rgb: 0x3252EF) backgroundColor = UIColor(rgb: 0x3252EF)
} }
iconDiameter = 60.0 iconDiameter = 60.0
case .rotateCamera:
titleText = ""
switch component.microphoneState {
case .connecting:
backgroundColor = UIColor(white: 0.1, alpha: 1.0)
case .muted:
backgroundColor = UIColor(rgb: 0x027FFF)
case .unmuted:
backgroundColor = UIColor(rgb: 0x34C659)
case .raiseHand, .scheduled:
backgroundColor = UIColor(rgb: 0x3252EF)
}
iconDiameter = 60.0
case .leave: case .leave:
titleText = component.strings.VoiceChat_Leave titleText = component.strings.VoiceChat_Leave
backgroundColor = UIColor(rgb: 0x47191E) backgroundColor = UIColor(rgb: 0x47191E)
@ -206,6 +223,8 @@ final class VideoChatActionButtonComponent: Component {
self.contentImage = UIImage(bundleImageName: iconName)?.precomposed().withRenderingMode(.alwaysTemplate) self.contentImage = UIImage(bundleImageName: iconName)?.precomposed().withRenderingMode(.alwaysTemplate)
case .video: case .video:
self.contentImage = UIImage(bundleImageName: "Call/CallCameraButton")?.precomposed().withRenderingMode(.alwaysTemplate) self.contentImage = UIImage(bundleImageName: "Call/CallCameraButton")?.precomposed().withRenderingMode(.alwaysTemplate)
case .rotateCamera:
self.contentImage = UIImage(bundleImageName: "Call/CallSwitchCameraButton")?.precomposed().withRenderingMode(.alwaysTemplate)
case .leave: case .leave:
self.contentImage = generateImage(CGSize(width: 28.0, height: 28.0), opaque: false, rotatedContext: { size, context in self.contentImage = generateImage(CGSize(width: 28.0, height: 28.0), opaque: false, rotatedContext: { size, context in
let bounds = CGRect(origin: CGPoint(), size: size) let bounds = CGRect(origin: CGPoint(), size: size)
@ -277,8 +296,10 @@ final class VideoChatActionButtonComponent: Component {
if iconView.superview == nil { if iconView.superview == nil {
self.addSubview(iconView) self.addSubview(iconView)
} }
transition.setFrame(view: iconView, frame: iconFrame) transition.setPosition(view: iconView, position: iconFrame.center)
transition.setBounds(view: iconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
transition.setAlpha(view: iconView, alpha: isEnabled ? 1.0 : 0.6) transition.setAlpha(view: iconView, alpha: isEnabled ? 1.0 : 0.6)
transition.setScale(view: iconView, scale: availableSize.height / 56.0)
} }
self.isEnabled = isEnabled self.isEnabled = isEnabled

View File

@ -227,6 +227,7 @@ final class VideoChatScreenComponent: Component {
var isEncryptionKeyExpanded: Bool = false var isEncryptionKeyExpanded: Bool = false
let videoButton = ComponentView<Empty>() let videoButton = ComponentView<Empty>()
let videoControlButton = ComponentView<Empty>()
let leaveButton = ComponentView<Empty>() let leaveButton = ComponentView<Empty>()
let microphoneButton = ComponentView<Empty>() let microphoneButton = ComponentView<Empty>()
@ -1130,7 +1131,8 @@ final class VideoChatScreenComponent: Component {
scheduleTimestamp: nil, scheduleTimestamp: nil,
subscribedToScheduled: false, subscribedToScheduled: false,
isVideoEnabled: true, isVideoEnabled: true,
isVideoWatchersLimitReached: false isVideoWatchersLimitReached: false,
isMyVideoActive: false
) )
return .single((callState, invitedPeers.compactMap({ peer -> VideoChatScreenComponent.InvitedPeer? in return .single((callState, invitedPeers.compactMap({ peer -> VideoChatScreenComponent.InvitedPeer? in
@ -2657,38 +2659,84 @@ final class VideoChatScreenComponent: Component {
} }
let videoButtonContent: VideoChatActionButtonComponent.Content let videoButtonContent: VideoChatActionButtonComponent.Content
if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute { let videoControlButtonContent: VideoChatActionButtonComponent.Content
var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker
var buttonIsEnabled = false var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { var buttonIsEnabled = false
buttonIsEnabled = availableOutputs.count > 1 if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
switch currentOutput { buttonIsEnabled = availableOutputs.count > 1
case .builtin: switch currentOutput {
buttonAudio = .builtin case .builtin:
case .speaker: buttonAudio = .builtin
buttonAudio = .speaker case .speaker:
case .headphones: buttonAudio = .speaker
buttonAudio = .headphones case .headphones:
case let .port(port): buttonAudio = .headphones
var type: VideoChatActionButtonComponent.Content.BluetoothType = .generic case let .port(port):
let portName = port.name.lowercased() var type: VideoChatActionButtonComponent.Content.BluetoothType = .generic
if portName.contains("airpods max") { let portName = port.name.lowercased()
type = .airpodsMax if portName.contains("airpods max") {
} else if portName.contains("airpods pro") { type = .airpodsMax
type = .airpodsPro } else if portName.contains("airpods pro") {
} else if portName.contains("airpods") { type = .airpodsPro
type = .airpods } else if portName.contains("airpods") {
} type = .airpods
buttonAudio = .bluetooth(type)
}
if availableOutputs.count <= 1 {
buttonAudio = .none
} }
buttonAudio = .bluetooth(type)
}
if availableOutputs.count <= 1 {
buttonAudio = .none
} }
videoButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
} else {
videoButtonContent = .video(isActive: false)
} }
if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute {
videoButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
videoControlButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
} else {
let isVideoActive = self.callState?.isMyVideoActive ?? false
videoButtonContent = .video(isActive: isVideoActive)
if isVideoActive {
videoControlButtonContent = .rotateCamera
} else {
videoControlButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
}
}
var displayVideoControlButton = true
if areButtonsCollapsed {
displayVideoControlButton = false
} else if let expandedParticipantsVideoState = self.expandedParticipantsVideoState, !expandedParticipantsVideoState.isUIHidden {
displayVideoControlButton = false
}
let videoControlButtonSize = self.videoControlButton.update(
transition: transition,
component: AnyComponent(PlainButtonComponent(
content: AnyComponent(VideoChatActionButtonComponent(
strings: environment.strings,
content: videoControlButtonContent,
microphoneState: actionButtonMicrophoneState,
isCollapsed: true
)),
effectAlignment: .center,
action: { [weak self] in
guard let self else {
return
}
if let state = self.callState, state.isMyVideoActive {
if case let .group(groupCall) = self.currentCall {
groupCall.switchVideoCamera()
}
} else {
self.onAudioRoutePressed()
}
},
animateAlpha: false
)),
environment: {},
containerSize: CGSize(width: 32.0, height: 32.0)
)
let _ = self.videoButton.update( let _ = self.videoButton.update(
transition: transition, transition: transition,
component: AnyComponent(PlainButtonComponent( component: AnyComponent(PlainButtonComponent(
@ -2714,12 +2762,33 @@ final class VideoChatScreenComponent: Component {
environment: {}, environment: {},
containerSize: CGSize(width: actionButtonDiameter, height: actionButtonDiameter) containerSize: CGSize(width: actionButtonDiameter, height: actionButtonDiameter)
) )
let videoControlButtonSpacing: CGFloat = 8.0
var videoButtonFrame = leftActionButtonFrame
if displayVideoControlButton {
let totalVideoButtonsHeight = actionButtonDiameter + videoControlButtonSpacing + videoControlButtonSize.height
videoButtonFrame.origin.y = videoButtonFrame.minY + floor((videoButtonFrame.height - totalVideoButtonsHeight) / 2.0) + videoControlButtonSpacing + videoControlButtonSize.height
}
let videoControlButtonFrame = CGRect(origin: CGPoint(x: videoButtonFrame.minX + floor((videoButtonFrame.width - videoControlButtonSize.width) / 2.0), y: videoButtonFrame.minY - videoControlButtonSpacing - videoControlButtonSize.height), size: videoControlButtonSize)
if let videoControlButtonView = self.videoControlButton.view {
if videoControlButtonView.superview == nil {
self.containerView.addSubview(videoControlButtonView)
}
transition.setPosition(view: videoControlButtonView, position: videoControlButtonFrame.center)
transition.setBounds(view: videoControlButtonView, bounds: CGRect(origin: CGPoint(), size: videoControlButtonFrame.size))
alphaTransition.setAlpha(view: videoControlButtonView, alpha: displayVideoControlButton ? 1.0 : 0.0)
transition.setScale(view: videoControlButtonView, scale: displayVideoControlButton ? 1.0 : 0.001)
}
if let videoButtonView = self.videoButton.view { if let videoButtonView = self.videoButton.view {
if videoButtonView.superview == nil { if videoButtonView.superview == nil {
self.containerView.addSubview(videoButtonView) self.containerView.addSubview(videoButtonView)
} }
transition.setPosition(view: videoButtonView, position: leftActionButtonFrame.center) transition.setPosition(view: videoButtonView, position: videoButtonFrame.center)
transition.setBounds(view: videoButtonView, bounds: CGRect(origin: CGPoint(), size: leftActionButtonFrame.size)) transition.setBounds(view: videoButtonView, bounds: CGRect(origin: CGPoint(), size: videoButtonFrame.size))
} }
let _ = self.leaveButton.update( let _ = self.leaveButton.update(