mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-04 21:41:45 +00:00
Various improvements
This commit is contained in:
parent
6976839bfc
commit
a91658373b
@ -2034,7 +2034,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
} else {
|
} else {
|
||||||
let step: CGFloat = CGFloat(i - firstStep) / CGFloat(numSteps - firstStep - 1)
|
let step: CGFloat = CGFloat(i - firstStep) / CGFloat(numSteps - firstStep - 1)
|
||||||
let value: CGFloat = 1.0 - Display.bezierPoint(0.42, 0.0, 0.58, 1.0, step)
|
let value: CGFloat = 1.0 - Display.bezierPoint(0.42, 0.0, 0.58, 1.0, step)
|
||||||
return UIColor(white: 0.0, alpha: baseGradientAlpha * value).multipliedWith(.red)
|
return UIColor(white: 0.0, alpha: baseGradientAlpha * value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let locations = (0 ..< numSteps).map { i -> CGFloat in
|
let locations = (0 ..< numSteps).map { i -> CGFloat in
|
||||||
|
|||||||
@ -2206,7 +2206,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
|
|
||||||
let landscapeControlsWidth: CGFloat = 104.0
|
let landscapeControlsWidth: CGFloat = 104.0
|
||||||
var landscapeControlsOffsetX: CGFloat = 0.0
|
var landscapeControlsOffsetX: CGFloat = 0.0
|
||||||
let landscapeControlsSpacing: CGFloat = 30.0
|
let landscapeControlsSpacing: CGFloat = 20.0
|
||||||
|
|
||||||
var leftInset: CGFloat = max(environment.safeInsets.left, 16.0)
|
var leftInset: CGFloat = max(environment.safeInsets.left, 16.0)
|
||||||
|
|
||||||
@ -2554,11 +2554,58 @@ final class VideoChatScreenComponent: Component {
|
|||||||
self.encryptionKeyBackground = nil
|
self.encryptionKeyBackground = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let videoButtonContent: VideoChatActionButtonComponent.Content?
|
||||||
|
let videoControlButtonContent: VideoChatActionButtonComponent.Content
|
||||||
|
|
||||||
|
var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker
|
||||||
|
var buttonIsEnabled = false
|
||||||
|
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
|
||||||
|
buttonIsEnabled = availableOutputs.count > 1
|
||||||
|
switch currentOutput {
|
||||||
|
case .builtin:
|
||||||
|
buttonAudio = .builtin
|
||||||
|
case .speaker:
|
||||||
|
buttonAudio = .speaker
|
||||||
|
case .headphones:
|
||||||
|
buttonAudio = .headphones
|
||||||
|
case let .port(port):
|
||||||
|
var type: VideoChatActionButtonComponent.Content.BluetoothType = .generic
|
||||||
|
let portName = port.name.lowercased()
|
||||||
|
if portName.contains("airpods max") {
|
||||||
|
type = .airpodsMax
|
||||||
|
} else if portName.contains("airpods pro") {
|
||||||
|
type = .airpodsPro
|
||||||
|
} else if portName.contains("airpods") {
|
||||||
|
type = .airpods
|
||||||
|
}
|
||||||
|
buttonAudio = .bluetooth(type)
|
||||||
|
}
|
||||||
|
if availableOutputs.count <= 1 {
|
||||||
|
buttonAudio = .none
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute {
|
||||||
|
videoButtonContent = nil
|
||||||
|
videoControlButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
|
||||||
|
} else {
|
||||||
|
let isVideoActive = self.callState?.isMyVideoActive ?? false
|
||||||
|
videoButtonContent = .video(isActive: isVideoActive)
|
||||||
|
if isVideoActive {
|
||||||
|
videoControlButtonContent = .rotateCamera
|
||||||
|
} else {
|
||||||
|
videoControlButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
let actionButtonDiameter: CGFloat = 56.0
|
let actionButtonDiameter: CGFloat = 56.0
|
||||||
let expandedMicrophoneButtonDiameter: CGFloat = actionButtonDiameter
|
let expandedMicrophoneButtonDiameter: CGFloat = actionButtonDiameter
|
||||||
let collapsedMicrophoneButtonDiameter: CGFloat = actionButtonDiameter // 116.0
|
let collapsedMicrophoneButtonDiameter: CGFloat = actionButtonDiameter // 116.0
|
||||||
|
|
||||||
let buttonsWidth: CGFloat = actionButtonDiameter * 5.0
|
let buttonsCount = videoButtonContent == nil ? 4 : 5
|
||||||
|
|
||||||
|
let buttonsWidth: CGFloat = actionButtonDiameter * CGFloat(buttonsCount)
|
||||||
let remainingButtonsSpace: CGFloat
|
let remainingButtonsSpace: CGFloat
|
||||||
if isTwoColumnLayout {
|
if isTwoColumnLayout {
|
||||||
remainingButtonsSpace = mainColumnWidth - buttonsWidth
|
remainingButtonsSpace = mainColumnWidth - buttonsWidth
|
||||||
@ -2566,7 +2613,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
remainingButtonsSpace = availableSize.width - 16.0 * 2.0 - buttonsWidth
|
remainingButtonsSpace = availableSize.width - 16.0 * 2.0 - buttonsWidth
|
||||||
}
|
}
|
||||||
|
|
||||||
let actionMicrophoneButtonSpacing = min(25.0, floor(remainingButtonsSpace * 0.25))
|
let actionMicrophoneButtonSpacing = min(33.0, floor(remainingButtonsSpace / CGFloat(buttonsCount - 1)))
|
||||||
|
|
||||||
var collapsedMicrophoneButtonFrame: CGRect = CGRect(origin: CGPoint(x: floor((availableSize.width - collapsedMicrophoneButtonDiameter) * 0.5), y: availableSize.height - 48.0 - environment.safeInsets.bottom - collapsedMicrophoneButtonDiameter), size: CGSize(width: collapsedMicrophoneButtonDiameter, height: collapsedMicrophoneButtonDiameter))
|
var collapsedMicrophoneButtonFrame: CGRect = CGRect(origin: CGPoint(x: floor((availableSize.width - collapsedMicrophoneButtonDiameter) * 0.5), y: availableSize.height - 48.0 - environment.safeInsets.bottom - collapsedMicrophoneButtonDiameter), size: CGSize(width: collapsedMicrophoneButtonDiameter, height: collapsedMicrophoneButtonDiameter))
|
||||||
if self.isAnimatedOutFromPrivateCall {
|
if self.isAnimatedOutFromPrivateCall {
|
||||||
@ -2607,7 +2654,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let microphoneButtonFrame: CGRect
|
var microphoneButtonFrame: CGRect
|
||||||
if areButtonsCollapsed {
|
if areButtonsCollapsed {
|
||||||
microphoneButtonFrame = expandedMicrophoneButtonFrame
|
microphoneButtonFrame = expandedMicrophoneButtonFrame
|
||||||
} else {
|
} else {
|
||||||
@ -2631,25 +2678,40 @@ final class VideoChatScreenComponent: Component {
|
|||||||
} else {
|
} else {
|
||||||
expandedParticipantsClippingY = expandedMicrophoneButtonFrame.minY - 24.0
|
expandedParticipantsClippingY = expandedMicrophoneButtonFrame.minY - 24.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
let actionButtonSize = CGSize(width: actionButtonDiameter, height: actionButtonDiameter)
|
let actionButtonSize = CGSize(width: actionButtonDiameter, height: actionButtonDiameter)
|
||||||
var firstActionButtonFrame = CGRect(origin: CGPoint(x: microphoneButtonFrame.minX - (actionButtonDiameter + actionMicrophoneButtonSpacing) * 2.0, y: microphoneButtonFrame.minY + floor((microphoneButtonFrame.height - actionButtonDiameter) * 0.5)), size: actionButtonSize)
|
var firstActionButtonFrame = CGRect(origin: CGPoint(x: microphoneButtonFrame.minX - (actionButtonDiameter + actionMicrophoneButtonSpacing) * 2.0, y: microphoneButtonFrame.minY + floor((microphoneButtonFrame.height - actionButtonDiameter) * 0.5)), size: actionButtonSize)
|
||||||
var secondActionButtonFrame = CGRect(origin: CGPoint(x: firstActionButtonFrame.minX + actionMicrophoneButtonSpacing + actionButtonDiameter, y: microphoneButtonFrame.minY + floor((microphoneButtonFrame.height - actionButtonDiameter) * 0.5)), size: actionButtonSize)
|
var secondActionButtonFrame = CGRect(origin: CGPoint(x: firstActionButtonFrame.minX + actionMicrophoneButtonSpacing + actionButtonDiameter, y: microphoneButtonFrame.minY + floor((microphoneButtonFrame.height - actionButtonDiameter) * 0.5)), size: actionButtonSize)
|
||||||
|
|
||||||
var fourthActionButtonFrame = CGRect(origin: CGPoint(x: microphoneButtonFrame.maxX + 2.0 * actionMicrophoneButtonSpacing + actionButtonDiameter, y: microphoneButtonFrame.minY + floor((microphoneButtonFrame.height - actionButtonDiameter) * 0.5)), size: actionButtonSize)
|
var fourthActionButtonFrame = CGRect(origin: CGPoint(x: microphoneButtonFrame.maxX + 2.0 * actionMicrophoneButtonSpacing + actionButtonDiameter, y: microphoneButtonFrame.minY + floor((microphoneButtonFrame.height - actionButtonDiameter) * 0.5)), size: actionButtonSize)
|
||||||
var thirdActionButtonFrame = CGRect(origin: CGPoint(x: microphoneButtonFrame.maxX + actionMicrophoneButtonSpacing, y: microphoneButtonFrame.minY + floor((microphoneButtonFrame.height - actionButtonDiameter) * 0.5)), size: actionButtonSize)
|
var thirdActionButtonFrame = CGRect(origin: CGPoint(x: microphoneButtonFrame.maxX + actionMicrophoneButtonSpacing, y: microphoneButtonFrame.minY + floor((microphoneButtonFrame.height - actionButtonDiameter) * 0.5)), size: actionButtonSize)
|
||||||
|
|
||||||
if buttonsOnTheSide {
|
if buttonsCount == 4 {
|
||||||
secondActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
if buttonsOnTheSide {
|
||||||
secondActionButtonFrame.origin.y = microphoneButtonFrame.minY - landscapeControlsSpacing - actionButtonDiameter
|
firstActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
||||||
|
secondActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
||||||
|
thirdActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
||||||
|
fourthActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
||||||
|
|
||||||
|
microphoneButtonFrame.origin.y = availableSize.height * 0.5 - landscapeControlsSpacing * 0.5 - actionButtonDiameter
|
||||||
|
firstActionButtonFrame.origin.y = microphoneButtonFrame.minY - landscapeControlsSpacing - actionButtonDiameter
|
||||||
|
thirdActionButtonFrame.origin.y = microphoneButtonFrame.maxY + landscapeControlsSpacing
|
||||||
|
fourthActionButtonFrame.origin.y = microphoneButtonFrame.maxY + landscapeControlsSpacing + actionButtonDiameter + landscapeControlsSpacing
|
||||||
|
} else {
|
||||||
|
microphoneButtonFrame.origin.x = availableSize.width * 0.5 - actionMicrophoneButtonSpacing * 0.5 - actionButtonDiameter
|
||||||
|
firstActionButtonFrame.origin.x = microphoneButtonFrame.minX - actionMicrophoneButtonSpacing - actionButtonDiameter
|
||||||
|
thirdActionButtonFrame.origin.x = microphoneButtonFrame.maxX + actionMicrophoneButtonSpacing
|
||||||
|
fourthActionButtonFrame.origin.x = microphoneButtonFrame.maxX + actionMicrophoneButtonSpacing + actionButtonDiameter + actionMicrophoneButtonSpacing
|
||||||
|
}
|
||||||
|
} else if buttonsOnTheSide {
|
||||||
firstActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
firstActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
||||||
firstActionButtonFrame.origin.y = secondActionButtonFrame.minY - landscapeControlsSpacing - actionButtonDiameter
|
secondActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
||||||
|
|
||||||
thirdActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
thirdActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
||||||
thirdActionButtonFrame.origin.y = microphoneButtonFrame.maxY + landscapeControlsSpacing
|
|
||||||
|
|
||||||
fourthActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
fourthActionButtonFrame.origin.x = microphoneButtonFrame.minX
|
||||||
|
|
||||||
|
secondActionButtonFrame.origin.y = microphoneButtonFrame.minY - landscapeControlsSpacing - actionButtonDiameter
|
||||||
|
firstActionButtonFrame.origin.y = secondActionButtonFrame.minY - landscapeControlsSpacing - actionButtonDiameter
|
||||||
|
thirdActionButtonFrame.origin.y = microphoneButtonFrame.maxY + landscapeControlsSpacing
|
||||||
fourthActionButtonFrame.origin.y = thirdActionButtonFrame.maxY + landscapeControlsSpacing
|
fourthActionButtonFrame.origin.y = thirdActionButtonFrame.maxY + landscapeControlsSpacing
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3065,50 +3127,42 @@ final class VideoChatScreenComponent: Component {
|
|||||||
transition.setBounds(view: microphoneButtonView, bounds: CGRect(origin: CGPoint(), size: microphoneButtonFrame.size))
|
transition.setBounds(view: microphoneButtonView, bounds: CGRect(origin: CGPoint(), size: microphoneButtonFrame.size))
|
||||||
}
|
}
|
||||||
|
|
||||||
let videoButtonContent: VideoChatActionButtonComponent.Content?
|
|
||||||
let videoControlButtonContent: VideoChatActionButtonComponent.Content
|
|
||||||
|
|
||||||
var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker
|
let _ = self.speakerButton.update(
|
||||||
var buttonIsEnabled = false
|
transition: transition,
|
||||||
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
|
component: AnyComponent(PlainButtonComponent(
|
||||||
buttonIsEnabled = availableOutputs.count > 1
|
content: AnyComponent(VideoChatActionButtonComponent(
|
||||||
switch currentOutput {
|
strings: environment.strings,
|
||||||
case .builtin:
|
content: videoControlButtonContent,
|
||||||
buttonAudio = .builtin
|
microphoneState: actionButtonMicrophoneState,
|
||||||
case .speaker:
|
isCollapsed: areButtonsActuallyCollapsed || buttonsOnTheSide
|
||||||
buttonAudio = .speaker
|
)),
|
||||||
case .headphones:
|
effectAlignment: .center,
|
||||||
buttonAudio = .headphones
|
action: { [weak self] in
|
||||||
case let .port(port):
|
guard let self else {
|
||||||
var type: VideoChatActionButtonComponent.Content.BluetoothType = .generic
|
return
|
||||||
let portName = port.name.lowercased()
|
}
|
||||||
if portName.contains("airpods max") {
|
if let state = self.callState, state.isMyVideoActive {
|
||||||
type = .airpodsMax
|
if case let .group(groupCall) = self.currentCall {
|
||||||
} else if portName.contains("airpods pro") {
|
groupCall.switchVideoCamera()
|
||||||
type = .airpodsPro
|
}
|
||||||
} else if portName.contains("airpods") {
|
} else {
|
||||||
type = .airpods
|
self.onAudioRoutePressed()
|
||||||
}
|
}
|
||||||
buttonAudio = .bluetooth(type)
|
},
|
||||||
}
|
animateAlpha: false
|
||||||
if availableOutputs.count <= 1 {
|
)),
|
||||||
buttonAudio = .none
|
environment: {},
|
||||||
|
containerSize: CGSize(width: actionButtonDiameter, height: actionButtonDiameter)
|
||||||
|
)
|
||||||
|
if let speakerButtonView = self.speakerButton.view {
|
||||||
|
if speakerButtonView.superview == nil {
|
||||||
|
self.containerView.addSubview(speakerButtonView)
|
||||||
}
|
}
|
||||||
|
transition.setPosition(view: speakerButtonView, position: firstActionButtonFrame.center)
|
||||||
|
transition.setBounds(view: speakerButtonView, bounds: CGRect(origin: CGPoint(), size: firstActionButtonFrame.size))
|
||||||
}
|
}
|
||||||
|
|
||||||
if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute {
|
|
||||||
videoButtonContent = nil
|
|
||||||
videoControlButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
|
|
||||||
} else {
|
|
||||||
let isVideoActive = self.callState?.isMyVideoActive ?? false
|
|
||||||
videoButtonContent = .video(isActive: isVideoActive)
|
|
||||||
if isVideoActive {
|
|
||||||
videoControlButtonContent = .rotateCamera
|
|
||||||
} else {
|
|
||||||
videoControlButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let videoButtonContent {
|
if let videoButtonContent {
|
||||||
let _ = self.videoButton.update(
|
let _ = self.videoButton.update(
|
||||||
transition: transition,
|
transition: transition,
|
||||||
@ -3150,41 +3204,6 @@ final class VideoChatScreenComponent: Component {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = self.speakerButton.update(
|
|
||||||
transition: transition,
|
|
||||||
component: AnyComponent(PlainButtonComponent(
|
|
||||||
content: AnyComponent(VideoChatActionButtonComponent(
|
|
||||||
strings: environment.strings,
|
|
||||||
content: videoControlButtonContent,
|
|
||||||
microphoneState: actionButtonMicrophoneState,
|
|
||||||
isCollapsed: areButtonsActuallyCollapsed || buttonsOnTheSide
|
|
||||||
)),
|
|
||||||
effectAlignment: .center,
|
|
||||||
action: { [weak self] in
|
|
||||||
guard let self else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if let state = self.callState, state.isMyVideoActive {
|
|
||||||
if case let .group(groupCall) = self.currentCall {
|
|
||||||
groupCall.switchVideoCamera()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.onAudioRoutePressed()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
animateAlpha: false
|
|
||||||
)),
|
|
||||||
environment: {},
|
|
||||||
containerSize: CGSize(width: actionButtonDiameter, height: actionButtonDiameter)
|
|
||||||
)
|
|
||||||
if let speakerButtonView = self.speakerButton.view {
|
|
||||||
if speakerButtonView.superview == nil {
|
|
||||||
self.containerView.addSubview(speakerButtonView)
|
|
||||||
}
|
|
||||||
transition.setPosition(view: speakerButtonView, position: firstActionButtonFrame.center)
|
|
||||||
transition.setBounds(view: speakerButtonView, bounds: CGRect(origin: CGPoint(), size: firstActionButtonFrame.size))
|
|
||||||
}
|
|
||||||
|
|
||||||
let _ = self.messageButton.update(
|
let _ = self.messageButton.update(
|
||||||
transition: transition,
|
transition: transition,
|
||||||
component: AnyComponent(PlainButtonComponent(
|
component: AnyComponent(PlainButtonComponent(
|
||||||
@ -3246,7 +3265,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
var inputPanelBottomInset: CGFloat = 0.0
|
var inputPanelBottomInset: CGFloat = 0.0
|
||||||
var inputPanelSize: CGSize = .zero
|
var inputPanelSize: CGSize = .zero
|
||||||
if self.inputPanelIsActive {
|
if self.inputPanelIsActive {
|
||||||
let inputPanelAvailableWidth = availableSize.width
|
let inputPanelAvailableWidth = availableSize.width - environment.safeInsets.left - environment.safeInsets.right
|
||||||
var inputPanelAvailableHeight = 103.0
|
var inputPanelAvailableHeight = 103.0
|
||||||
|
|
||||||
let keyboardWasHidden = self.inputPanelExternalState.isKeyboardHidden
|
let keyboardWasHidden = self.inputPanelExternalState.isKeyboardHidden
|
||||||
@ -3469,7 +3488,11 @@ final class VideoChatScreenComponent: Component {
|
|||||||
inputPanelBottomInset = inputHeight - environment.safeInsets.bottom
|
inputPanelBottomInset = inputHeight - environment.safeInsets.bottom
|
||||||
} else {
|
} else {
|
||||||
if self.inputPanelExternalState.isEditing {
|
if self.inputPanelExternalState.isEditing {
|
||||||
inputPanelBottomInset = availableSize.height - microphoneButtonFrame.minY
|
if buttonsOnTheSide {
|
||||||
|
inputPanelBottomInset = 16.0
|
||||||
|
} else {
|
||||||
|
inputPanelBottomInset = availableSize.height - microphoneButtonFrame.minY
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
inputPanelBottomInset = -inputPanelSize.height - environment.safeInsets.bottom
|
inputPanelBottomInset = -inputPanelSize.height - environment.safeInsets.bottom
|
||||||
}
|
}
|
||||||
@ -3765,7 +3788,8 @@ final class VideoChatScreenComponent: Component {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let messagesBottomInset: CGFloat = max(inputPanelBottomInset + inputPanelSize.height + 31.0, availableSize.height - microphoneButtonFrame.minY + 16.0) + reactionsInset
|
let normalMessagesBottomInset: CGFloat = buttonsOnTheSide ? 16.0 : availableSize.height - microphoneButtonFrame.minY + 16.0
|
||||||
|
let messagesBottomInset: CGFloat = max(inputPanelBottomInset + inputPanelSize.height + 31.0, normalMessagesBottomInset) + reactionsInset
|
||||||
let messagesListSize = self.messagesList.update(
|
let messagesListSize = self.messagesList.update(
|
||||||
transition: transition,
|
transition: transition,
|
||||||
component: AnyComponent(MessageListComponent(
|
component: AnyComponent(MessageListComponent(
|
||||||
@ -3775,9 +3799,9 @@ final class VideoChatScreenComponent: Component {
|
|||||||
sendActionTransition: sendActionTransition
|
sendActionTransition: sendActionTransition
|
||||||
)),
|
)),
|
||||||
environment: {},
|
environment: {},
|
||||||
containerSize: CGSize(width: availableSize.width, height: availableSize.height - messagesBottomInset)
|
containerSize: CGSize(width: availableSize.width - environment.safeInsets.left - environment.safeInsets.right, height: availableSize.height - messagesBottomInset)
|
||||||
)
|
)
|
||||||
let messagesListFrame = CGRect(origin: CGPoint(x: 0.0, y: availableSize.height - messagesListSize.height - messagesBottomInset), size: messagesListSize)
|
let messagesListFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - messagesListSize.width) / 2.0), y: availableSize.height - messagesListSize.height - messagesBottomInset), size: messagesListSize)
|
||||||
if let messagesListView = self.messagesList.view {
|
if let messagesListView = self.messagesList.view {
|
||||||
if messagesListView.superview == nil {
|
if messagesListView.superview == nil {
|
||||||
messagesListView.isUserInteractionEnabled = false
|
messagesListView.isUserInteractionEnabled = false
|
||||||
|
|||||||
@ -1512,7 +1512,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: presentationData.strings.PeerInfo_Notes, rightLabel: presentationData.strings.PeerInfo_NotesInfo, text: note.text, entities: note.entities, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: []), action: nil, linkItemAction: bioLinkAction, button: nil, contextAction: noteContextAction, requestLayout: { animated in
|
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: presentationData.strings.PeerInfo_Notes, rightLabel: presentationData.strings.PeerInfo_NotesInfo, text: note.text, entities: entities, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: []), action: nil, linkItemAction: bioLinkAction, button: nil, contextAction: noteContextAction, requestLayout: { animated in
|
||||||
interaction.requestLayout(animated)
|
interaction.requestLayout(animated)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user