Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin 2025-04-08 20:07:15 +04:00
commit 55953feeb1
5 changed files with 148 additions and 43 deletions

View File

@ -224,6 +224,7 @@ public struct PresentationGroupCallState: Equatable {
public var subscribedToScheduled: Bool
public var isVideoEnabled: Bool
public var isVideoWatchersLimitReached: Bool
public var isMyVideoActive: Bool
public init(
myPeerId: EnginePeer.Id,
@ -238,7 +239,8 @@ public struct PresentationGroupCallState: Equatable {
scheduleTimestamp: Int32?,
subscribedToScheduled: Bool,
isVideoEnabled: Bool,
isVideoWatchersLimitReached: Bool
isVideoWatchersLimitReached: Bool,
isMyVideoActive: Bool
) {
self.myPeerId = myPeerId
self.networkState = networkState
@ -253,6 +255,7 @@ public struct PresentationGroupCallState: Equatable {
self.subscribedToScheduled = subscribedToScheduled
self.isVideoEnabled = isVideoEnabled
self.isVideoWatchersLimitReached = isVideoWatchersLimitReached
self.isMyVideoActive = isMyVideoActive
}
}

View File

@ -1048,14 +1048,10 @@ private func selectivePrivacySettingsControllerEntries(presentationData: Present
entries.append(.settingInfo(presentationData.theme, settingInfoText, settingInfoLink))
}
if case .phoneNumber = kind, state.setting == .nobody {
if state.phoneDiscoveryEnabled == false || phoneNumber.hasPrefix("888") {
entries.append(.phoneDiscoveryHeader(presentationData.theme, presentationData.strings.PrivacyPhoneNumberSettings_DiscoveryHeader))
entries.append(.phoneDiscoveryEverybody(presentationData.theme, presentationData.strings.PrivacySettings_LastSeenEverybody, state.phoneDiscoveryEnabled != false))
entries.append(.phoneDiscoveryMyContacts(presentationData.theme, presentationData.strings.PrivacySettings_LastSeenContacts, state.phoneDiscoveryEnabled == false))
entries.append(.phoneDiscoveryInfo(presentationData.theme, state.phoneDiscoveryEnabled != false ? presentationData.strings.PrivacyPhoneNumberSettings_CustomPublicLink("+\(phoneNumber)").string : presentationData.strings.PrivacyPhoneNumberSettings_CustomDisabledHelp, phoneLink))
}
}
entries.append(.phoneDiscoveryHeader(presentationData.theme, presentationData.strings.PrivacyPhoneNumberSettings_DiscoveryHeader))
entries.append(.phoneDiscoveryEverybody(presentationData.theme, presentationData.strings.PrivacySettings_LastSeenEverybody, state.phoneDiscoveryEnabled != false))
entries.append(.phoneDiscoveryMyContacts(presentationData.theme, presentationData.strings.PrivacySettings_LastSeenContacts, state.phoneDiscoveryEnabled == false))
entries.append(.phoneDiscoveryInfo(presentationData.theme, state.phoneDiscoveryEnabled != false ? presentationData.strings.PrivacyPhoneNumberSettings_CustomPublicLink("+\(phoneNumber)").string : presentationData.strings.PrivacyPhoneNumberSettings_CustomDisabledHelp, phoneLink))
if case .voiceMessages = kind, !isPremium {

View File

@ -34,7 +34,8 @@ private extension PresentationGroupCallState {
scheduleTimestamp: scheduleTimestamp,
subscribedToScheduled: subscribedToScheduled,
isVideoEnabled: false,
isVideoWatchersLimitReached: false
isVideoWatchersLimitReached: false,
isMyVideoActive: false
)
}
}
@ -1708,6 +1709,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
var prioritizeVP8 = false
#if DEBUG
prioritizeVP8 = "".isEmpty
#endif
if let data = self.accountContext.currentAppConfiguration.with({ $0 }).data, let value = data["ios_calls_prioritize_vp8"] as? Double {
prioritizeVP8 = value != 0.0
}
@ -3130,6 +3134,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if let videoCapturer = self.videoCapturer {
self.requestVideo(capturer: videoCapturer)
var stateValue = self.stateValue
stateValue.isMyVideoActive = true
self.stateValue = stateValue
}
}
@ -3152,6 +3160,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}))
self.updateLocalVideoState()
var stateValue = self.stateValue
stateValue.isMyVideoActive = true
self.stateValue = stateValue
}
}
@ -3165,6 +3177,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.isVideoMuted = true
self.updateLocalVideoState()
var stateValue = self.stateValue
stateValue.isMyVideoActive = false
self.stateValue = stateValue
}
}

View File

@ -33,11 +33,13 @@ final class VideoChatActionButtonComponent: Component {
case audio(audio: Audio)
case video
case rotateCamera
case leave
}
case audio(audio: Audio, isEnabled: Bool)
case video(isActive: Bool)
case rotateCamera
case leave
fileprivate var iconType: IconType {
@ -55,6 +57,8 @@ final class VideoChatActionButtonComponent: Component {
return .audio(audio: mappedAudio)
case .video:
return .video
case .rotateCamera:
return .rotateCamera
case .leave:
return .leave
}
@ -176,6 +180,19 @@ final class VideoChatActionButtonComponent: Component {
backgroundColor = UIColor(rgb: 0x3252EF)
}
iconDiameter = 60.0
case .rotateCamera:
titleText = ""
switch component.microphoneState {
case .connecting:
backgroundColor = UIColor(white: 0.1, alpha: 1.0)
case .muted:
backgroundColor = UIColor(rgb: 0x027FFF)
case .unmuted:
backgroundColor = UIColor(rgb: 0x34C659)
case .raiseHand, .scheduled:
backgroundColor = UIColor(rgb: 0x3252EF)
}
iconDiameter = 60.0
case .leave:
titleText = component.strings.VoiceChat_Leave
backgroundColor = UIColor(rgb: 0x47191E)
@ -206,6 +223,8 @@ final class VideoChatActionButtonComponent: Component {
self.contentImage = UIImage(bundleImageName: iconName)?.precomposed().withRenderingMode(.alwaysTemplate)
case .video:
self.contentImage = UIImage(bundleImageName: "Call/CallCameraButton")?.precomposed().withRenderingMode(.alwaysTemplate)
case .rotateCamera:
self.contentImage = UIImage(bundleImageName: "Call/CallSwitchCameraButton")?.precomposed().withRenderingMode(.alwaysTemplate)
case .leave:
self.contentImage = generateImage(CGSize(width: 28.0, height: 28.0), opaque: false, rotatedContext: { size, context in
let bounds = CGRect(origin: CGPoint(), size: size)
@ -277,8 +296,10 @@ final class VideoChatActionButtonComponent: Component {
if iconView.superview == nil {
self.addSubview(iconView)
}
transition.setFrame(view: iconView, frame: iconFrame)
transition.setPosition(view: iconView, position: iconFrame.center)
transition.setBounds(view: iconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
transition.setAlpha(view: iconView, alpha: isEnabled ? 1.0 : 0.6)
transition.setScale(view: iconView, scale: availableSize.height / 56.0)
}
self.isEnabled = isEnabled

View File

@ -227,6 +227,7 @@ final class VideoChatScreenComponent: Component {
var isEncryptionKeyExpanded: Bool = false
let videoButton = ComponentView<Empty>()
let videoControlButton = ComponentView<Empty>()
let leaveButton = ComponentView<Empty>()
let microphoneButton = ComponentView<Empty>()
@ -1130,7 +1131,8 @@ final class VideoChatScreenComponent: Component {
scheduleTimestamp: nil,
subscribedToScheduled: false,
isVideoEnabled: true,
isVideoWatchersLimitReached: false
isVideoWatchersLimitReached: false,
isMyVideoActive: false
)
return .single((callState, invitedPeers.compactMap({ peer -> VideoChatScreenComponent.InvitedPeer? in
@ -2657,38 +2659,84 @@ final class VideoChatScreenComponent: Component {
}
let videoButtonContent: VideoChatActionButtonComponent.Content
if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute {
var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker
var buttonIsEnabled = false
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
buttonIsEnabled = availableOutputs.count > 1
switch currentOutput {
case .builtin:
buttonAudio = .builtin
case .speaker:
buttonAudio = .speaker
case .headphones:
buttonAudio = .headphones
case let .port(port):
var type: VideoChatActionButtonComponent.Content.BluetoothType = .generic
let portName = port.name.lowercased()
if portName.contains("airpods max") {
type = .airpodsMax
} else if portName.contains("airpods pro") {
type = .airpodsPro
} else if portName.contains("airpods") {
type = .airpods
}
buttonAudio = .bluetooth(type)
}
if availableOutputs.count <= 1 {
buttonAudio = .none
let videoControlButtonContent: VideoChatActionButtonComponent.Content
var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker
var buttonIsEnabled = false
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
buttonIsEnabled = availableOutputs.count > 1
switch currentOutput {
case .builtin:
buttonAudio = .builtin
case .speaker:
buttonAudio = .speaker
case .headphones:
buttonAudio = .headphones
case let .port(port):
var type: VideoChatActionButtonComponent.Content.BluetoothType = .generic
let portName = port.name.lowercased()
if portName.contains("airpods max") {
type = .airpodsMax
} else if portName.contains("airpods pro") {
type = .airpodsPro
} else if portName.contains("airpods") {
type = .airpods
}
buttonAudio = .bluetooth(type)
}
if availableOutputs.count <= 1 {
buttonAudio = .none
}
videoButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
} else {
videoButtonContent = .video(isActive: false)
}
if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute {
videoButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
videoControlButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
} else {
let isVideoActive = self.callState?.isMyVideoActive ?? false
videoButtonContent = .video(isActive: isVideoActive)
if isVideoActive {
videoControlButtonContent = .rotateCamera
} else {
videoControlButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
}
}
var displayVideoControlButton = true
if areButtonsCollapsed {
displayVideoControlButton = false
} else if let expandedParticipantsVideoState = self.expandedParticipantsVideoState, !expandedParticipantsVideoState.isUIHidden {
displayVideoControlButton = false
}
let videoControlButtonSize = self.videoControlButton.update(
transition: transition,
component: AnyComponent(PlainButtonComponent(
content: AnyComponent(VideoChatActionButtonComponent(
strings: environment.strings,
content: videoControlButtonContent,
microphoneState: actionButtonMicrophoneState,
isCollapsed: true
)),
effectAlignment: .center,
action: { [weak self] in
guard let self else {
return
}
if let state = self.callState, state.isMyVideoActive {
if case let .group(groupCall) = self.currentCall {
groupCall.switchVideoCamera()
}
} else {
self.onAudioRoutePressed()
}
},
animateAlpha: false
)),
environment: {},
containerSize: CGSize(width: 32.0, height: 32.0)
)
let _ = self.videoButton.update(
transition: transition,
component: AnyComponent(PlainButtonComponent(
@ -2714,12 +2762,33 @@ final class VideoChatScreenComponent: Component {
environment: {},
containerSize: CGSize(width: actionButtonDiameter, height: actionButtonDiameter)
)
let videoControlButtonSpacing: CGFloat = 8.0
var videoButtonFrame = leftActionButtonFrame
if displayVideoControlButton {
let totalVideoButtonsHeight = actionButtonDiameter + videoControlButtonSpacing + videoControlButtonSize.height
videoButtonFrame.origin.y = videoButtonFrame.minY + floor((videoButtonFrame.height - totalVideoButtonsHeight) / 2.0) + videoControlButtonSpacing + videoControlButtonSize.height
}
let videoControlButtonFrame = CGRect(origin: CGPoint(x: videoButtonFrame.minX + floor((videoButtonFrame.width - videoControlButtonSize.width) / 2.0), y: videoButtonFrame.minY - videoControlButtonSpacing - videoControlButtonSize.height), size: videoControlButtonSize)
if let videoControlButtonView = self.videoControlButton.view {
if videoControlButtonView.superview == nil {
self.containerView.addSubview(videoControlButtonView)
}
transition.setPosition(view: videoControlButtonView, position: videoControlButtonFrame.center)
transition.setBounds(view: videoControlButtonView, bounds: CGRect(origin: CGPoint(), size: videoControlButtonFrame.size))
alphaTransition.setAlpha(view: videoControlButtonView, alpha: displayVideoControlButton ? 1.0 : 0.0)
transition.setScale(view: videoControlButtonView, scale: displayVideoControlButton ? 1.0 : 0.001)
}
if let videoButtonView = self.videoButton.view {
if videoButtonView.superview == nil {
self.containerView.addSubview(videoButtonView)
}
transition.setPosition(view: videoButtonView, position: leftActionButtonFrame.center)
transition.setBounds(view: videoButtonView, bounds: CGRect(origin: CGPoint(), size: leftActionButtonFrame.size))
transition.setPosition(view: videoButtonView, position: videoButtonFrame.center)
transition.setBounds(view: videoButtonView, bounds: CGRect(origin: CGPoint(), size: videoButtonFrame.size))
}
let _ = self.leaveButton.update(