Video chat improvements

This commit is contained in:
Isaac 2024-10-01 21:26:30 +08:00
parent 5eeb6088f7
commit fab8c09a37
17 changed files with 468 additions and 129 deletions

View File

@ -67,7 +67,7 @@ private class AvatarNodeParameters: NSObject {
} }
} }
private func calculateColors(context: AccountContext?, explicitColorIndex: Int?, peerId: EnginePeer.Id?, nameColor: PeerNameColor?, icon: AvatarNodeIcon, theme: PresentationTheme?) -> [UIColor] { public func calculateAvatarColors(context: AccountContext?, explicitColorIndex: Int?, peerId: EnginePeer.Id?, nameColor: PeerNameColor?, icon: AvatarNodeIcon, theme: PresentationTheme?) -> [UIColor] {
let colorIndex: Int let colorIndex: Int
if let explicitColorIndex = explicitColorIndex { if let explicitColorIndex = explicitColorIndex {
colorIndex = explicitColorIndex colorIndex = explicitColorIndex
@ -183,7 +183,7 @@ private func ==(lhs: AvatarNodeState, rhs: AvatarNodeState) -> Bool {
} }
} }
private enum AvatarNodeIcon: Equatable { public enum AvatarNodeIcon: Equatable {
case none case none
case savedMessagesIcon case savedMessagesIcon
case repliesIcon case repliesIcon
@ -577,7 +577,7 @@ public final class AvatarNode: ASDisplayNode {
self.editOverlayNode?.isHidden = true self.editOverlayNode?.isHidden = true
} }
parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer.id, colors: calculateColors(context: nil, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle) parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer.id, colors: calculateAvatarColors(context: nil, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle)
} else { } else {
self.imageReady.set(.single(true)) self.imageReady.set(.single(true))
self.displaySuspended = false self.displaySuspended = false
@ -586,7 +586,7 @@ public final class AvatarNode: ASDisplayNode {
} }
self.editOverlayNode?.isHidden = true self.editOverlayNode?.isHidden = true
let colors = calculateColors(context: nil, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme) let colors = calculateAvatarColors(context: nil, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme)
parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer?.id ?? EnginePeer.Id(0), colors: colors, letters: peer?.displayLetters ?? [], font: self.font, icon: icon, explicitColorIndex: nil, hasImage: false, clipStyle: clipStyle) parameters = AvatarNodeParameters(theme: theme, accountPeerId: accountPeerId, peerId: peer?.id ?? EnginePeer.Id(0), colors: colors, letters: peer?.displayLetters ?? [], font: self.font, icon: icon, explicitColorIndex: nil, hasImage: false, clipStyle: clipStyle)
if let badgeView = self.badgeView { if let badgeView = self.badgeView {
@ -754,7 +754,7 @@ public final class AvatarNode: ASDisplayNode {
self.editOverlayNode?.isHidden = true self.editOverlayNode?.isHidden = true
} }
parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer.id, colors: calculateColors(context: genericContext, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle) parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer.id, colors: calculateAvatarColors(context: genericContext, explicitColorIndex: nil, peerId: peer.id, nameColor: peer.nameColor, icon: icon, theme: theme), letters: peer.displayLetters, font: self.font, icon: icon, explicitColorIndex: nil, hasImage: true, clipStyle: clipStyle)
} else { } else {
self.imageReady.set(.single(true)) self.imageReady.set(.single(true))
self.displaySuspended = false self.displaySuspended = false
@ -763,7 +763,7 @@ public final class AvatarNode: ASDisplayNode {
} }
self.editOverlayNode?.isHidden = true self.editOverlayNode?.isHidden = true
let colors = calculateColors(context: genericContext, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme) let colors = calculateAvatarColors(context: genericContext, explicitColorIndex: nil, peerId: peer?.id ?? EnginePeer.Id(0), nameColor: peer?.nameColor, icon: icon, theme: theme)
parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer?.id ?? EnginePeer.Id(0), colors: colors, letters: peer?.displayLetters ?? [], font: self.font, icon: icon, explicitColorIndex: nil, hasImage: false, clipStyle: clipStyle) parameters = AvatarNodeParameters(theme: theme, accountPeerId: account.peerId, peerId: peer?.id ?? EnginePeer.Id(0), colors: colors, letters: peer?.displayLetters ?? [], font: self.font, icon: icon, explicitColorIndex: nil, hasImage: false, clipStyle: clipStyle)
if let badgeView = self.badgeView { if let badgeView = self.badgeView {
@ -800,9 +800,9 @@ public final class AvatarNode: ASDisplayNode {
let parameters: AvatarNodeParameters let parameters: AvatarNodeParameters
if let icon = icon, case .phone = icon { if let icon = icon, case .phone = icon {
parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .phoneIcon, theme: nil), letters: [], font: self.font, icon: .phoneIcon, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round) parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateAvatarColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .phoneIcon, theme: nil), letters: [], font: self.font, icon: .phoneIcon, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round)
} else { } else {
parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .none, theme: nil), letters: letters, font: self.font, icon: .none, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round) parameters = AvatarNodeParameters(theme: nil, accountPeerId: nil, peerId: nil, colors: calculateAvatarColors(context: nil, explicitColorIndex: explicitIndex, peerId: nil, nameColor: nil, icon: .none, theme: nil), letters: letters, font: self.font, icon: .none, explicitColorIndex: explicitIndex, hasImage: false, clipStyle: .round)
} }
self.displaySuspended = true self.displaySuspended = true

View File

@ -8,11 +8,15 @@ public final class BundleIconComponent: Component {
public let name: String public let name: String
public let tintColor: UIColor? public let tintColor: UIColor?
public let maxSize: CGSize? public let maxSize: CGSize?
public let shadowColor: UIColor?
public let shadowBlur: CGFloat
public init(name: String, tintColor: UIColor?, maxSize: CGSize? = nil) { public init(name: String, tintColor: UIColor?, maxSize: CGSize? = nil, shadowColor: UIColor? = nil, shadowBlur: CGFloat = 0.0) {
self.name = name self.name = name
self.tintColor = tintColor self.tintColor = tintColor
self.maxSize = maxSize self.maxSize = maxSize
self.shadowColor = shadowColor
self.shadowBlur = shadowBlur
} }
public static func ==(lhs: BundleIconComponent, rhs: BundleIconComponent) -> Bool { public static func ==(lhs: BundleIconComponent, rhs: BundleIconComponent) -> Bool {
@ -25,6 +29,12 @@ public final class BundleIconComponent: Component {
if lhs.maxSize != rhs.maxSize { if lhs.maxSize != rhs.maxSize {
return false return false
} }
if lhs.shadowColor != rhs.shadowColor {
return false
}
if lhs.shadowBlur != rhs.shadowBlur {
return false
}
return true return true
} }
@ -40,12 +50,24 @@ public final class BundleIconComponent: Component {
} }
func update(component: BundleIconComponent, availableSize: CGSize, transition: ComponentTransition) -> CGSize { func update(component: BundleIconComponent, availableSize: CGSize, transition: ComponentTransition) -> CGSize {
if self.component?.name != component.name || self.component?.tintColor != component.tintColor { if self.component?.name != component.name || self.component?.tintColor != component.tintColor || self.component?.shadowColor != component.shadowColor || self.component?.shadowBlur != component.shadowBlur {
var image: UIImage?
if let tintColor = component.tintColor { if let tintColor = component.tintColor {
self.image = generateTintedImage(image: UIImage(bundleImageName: component.name), color: tintColor, backgroundColor: nil) image = generateTintedImage(image: UIImage(bundleImageName: component.name), color: tintColor, backgroundColor: nil)
} else { } else {
self.image = UIImage(bundleImageName: component.name) image = UIImage(bundleImageName: component.name)
} }
if let imageValue = image, let shadowColor = component.shadowColor, component.shadowBlur != 0.0 {
image = generateImage(CGSize(width: imageValue.size.width + component.shadowBlur * 2.0, height: imageValue.size.height + component.shadowBlur * 2.0), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setShadow(offset: CGSize(), blur: component.shadowBlur, color: shadowColor.cgColor)
if let cgImage = imageValue.cgImage {
context.draw(cgImage, in: CGRect(origin: CGPoint(x: component.shadowBlur, y: component.shadowBlur), size: imageValue.size))
}
})
}
self.image = image
} }
self.component = component self.component = component

View File

@ -36,13 +36,13 @@ final class VideoChatActionButtonComponent: Component {
case leave case leave
} }
case audio(audio: Audio) case audio(audio: Audio, isEnabled: Bool)
case video(isActive: Bool) case video(isActive: Bool)
case leave case leave
fileprivate var iconType: IconType { fileprivate var iconType: IconType {
switch self { switch self {
case let .audio(audio): case let .audio(audio, _):
let mappedAudio: IconType.Audio let mappedAudio: IconType.Audio
switch audio { switch audio {
case .none, .builtin, .speaker: case .none, .builtin, .speaker:
@ -136,14 +136,16 @@ final class VideoChatActionButtonComponent: Component {
let titleText: String let titleText: String
let backgroundColor: UIColor let backgroundColor: UIColor
let iconDiameter: CGFloat let iconDiameter: CGFloat
var isEnabled: Bool = true
switch component.content { switch component.content {
case let .audio(audio): case let .audio(audio, isEnabledValue):
var isActive = false var isActive = false
switch audio { switch audio {
case .none, .builtin: case .none, .builtin:
titleText = component.strings.Call_Speaker titleText = component.strings.Call_Speaker
case .speaker: case .speaker:
isActive = true isEnabled = isEnabledValue
isActive = isEnabledValue
titleText = component.strings.Call_Speaker titleText = component.strings.Call_Speaker
case .headphones: case .headphones:
titleText = component.strings.Call_Audio titleText = component.strings.Call_Audio
@ -276,8 +278,11 @@ final class VideoChatActionButtonComponent: Component {
self.addSubview(iconView) self.addSubview(iconView)
} }
transition.setFrame(view: iconView, frame: iconFrame) transition.setFrame(view: iconView, frame: iconFrame)
transition.setAlpha(view: iconView, alpha: isEnabled ? 1.0 : 0.6)
} }
self.isEnabled = isEnabled
return size return size
} }
} }

View File

@ -16,13 +16,19 @@ final class VideoChatMuteIconComponent: Component {
let color: UIColor let color: UIColor
let content: Content let content: Content
let shadowColor: UIColor?
let shadowBlur: CGFloat
init( init(
color: UIColor, color: UIColor,
content: Content content: Content,
shadowColor: UIColor? = nil,
shadowBlur: CGFloat = 0.0
) { ) {
self.color = color self.color = color
self.content = content self.content = content
self.shadowColor = shadowColor
self.shadowBlur = shadowBlur
} }
static func ==(lhs: VideoChatMuteIconComponent, rhs: VideoChatMuteIconComponent) -> Bool { static func ==(lhs: VideoChatMuteIconComponent, rhs: VideoChatMuteIconComponent) -> Bool {
@ -32,6 +38,12 @@ final class VideoChatMuteIconComponent: Component {
if lhs.content != rhs.content { if lhs.content != rhs.content {
return false return false
} }
if lhs.shadowColor != rhs.shadowColor {
return false
}
if lhs.shadowBlur != rhs.shadowBlur {
return false
}
return true return true
} }
@ -75,9 +87,9 @@ final class VideoChatMuteIconComponent: Component {
} }
let animationSize = availableSize let animationSize = availableSize
let animationFrame = animationSize.centered(in: CGRect(origin: CGPoint(), size: availableSize)) let animationFrame = animationSize.centered(in: CGRect(origin: CGPoint(), size: availableSize)).insetBy(dx: -component.shadowBlur, dy: -component.shadowBlur)
transition.setFrame(view: icon.view, frame: animationFrame) transition.setFrame(view: icon.view, frame: animationFrame)
icon.update(state: VoiceChatMicrophoneNode.State(muted: isMuted, filled: isFilled, color: component.color), animated: !transition.animation.isImmediate) icon.update(state: VoiceChatMicrophoneNode.State(muted: isMuted, filled: isFilled, color: component.color, shadowColor: component.shadowColor, shadowBlur: component.shadowBlur), animated: !transition.animation.isImmediate)
} else { } else {
if let icon = self.icon { if let icon = self.icon {
self.icon = nil self.icon = nil
@ -97,7 +109,9 @@ final class VideoChatMuteIconComponent: Component {
transition: transition, transition: transition,
component: AnyComponent(BundleIconComponent( component: AnyComponent(BundleIconComponent(
name: "Call/StatusScreen", name: "Call/StatusScreen",
tintColor: component.color tintColor: component.color,
shadowColor: component.shadowColor,
shadowBlur: component.shadowBlur
)), )),
environment: {}, environment: {},
containerSize: availableSize containerSize: availableSize

View File

@ -12,6 +12,9 @@ import AccountContext
import SwiftSignalKit import SwiftSignalKit
import DirectMediaImageCache import DirectMediaImageCache
import FastBlur import FastBlur
import ContextUI
import ComponentDisplayAdapters
import AvatarNode
private func blurredAvatarImage(_ dataImage: UIImage) -> UIImage? { private func blurredAvatarImage(_ dataImage: UIImage) -> UIImage? {
let imageContextSize = CGSize(width: 64.0, height: 64.0) let imageContextSize = CGSize(width: 64.0, height: 64.0)
@ -35,6 +38,7 @@ private let activityBorderImage: UIImage = {
}() }()
final class VideoChatParticipantVideoComponent: Component { final class VideoChatParticipantVideoComponent: Component {
let theme: PresentationTheme
let strings: PresentationStrings let strings: PresentationStrings
let call: PresentationGroupCall let call: PresentationGroupCall
let participant: GroupCallParticipantsContext.Participant let participant: GroupCallParticipantsContext.Participant
@ -47,8 +51,12 @@ final class VideoChatParticipantVideoComponent: Component {
let controlInsets: UIEdgeInsets let controlInsets: UIEdgeInsets
let interfaceOrientation: UIInterfaceOrientation let interfaceOrientation: UIInterfaceOrientation
let action: (() -> Void)? let action: (() -> Void)?
let contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?
let activatePinch: ((PinchSourceContainerNode) -> Void)?
let deactivatedPinch: (() -> Void)?
init( init(
theme: PresentationTheme,
strings: PresentationStrings, strings: PresentationStrings,
call: PresentationGroupCall, call: PresentationGroupCall,
participant: GroupCallParticipantsContext.Participant, participant: GroupCallParticipantsContext.Participant,
@ -60,8 +68,12 @@ final class VideoChatParticipantVideoComponent: Component {
contentInsets: UIEdgeInsets, contentInsets: UIEdgeInsets,
controlInsets: UIEdgeInsets, controlInsets: UIEdgeInsets,
interfaceOrientation: UIInterfaceOrientation, interfaceOrientation: UIInterfaceOrientation,
action: (() -> Void)? action: (() -> Void)?,
contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?,
activatePinch: ((PinchSourceContainerNode) -> Void)?,
deactivatedPinch: (() -> Void)?
) { ) {
self.theme = theme
self.strings = strings self.strings = strings
self.call = call self.call = call
self.participant = participant self.participant = participant
@ -74,6 +86,9 @@ final class VideoChatParticipantVideoComponent: Component {
self.controlInsets = controlInsets self.controlInsets = controlInsets
self.interfaceOrientation = interfaceOrientation self.interfaceOrientation = interfaceOrientation
self.action = action self.action = action
self.contextAction = contextAction
self.activatePinch = activatePinch
self.deactivatedPinch = deactivatedPinch
} }
static func ==(lhs: VideoChatParticipantVideoComponent, rhs: VideoChatParticipantVideoComponent) -> Bool { static func ==(lhs: VideoChatParticipantVideoComponent, rhs: VideoChatParticipantVideoComponent) -> Bool {
@ -107,6 +122,15 @@ final class VideoChatParticipantVideoComponent: Component {
if (lhs.action == nil) != (rhs.action == nil) { if (lhs.action == nil) != (rhs.action == nil) {
return false return false
} }
if (lhs.contextAction == nil) != (rhs.contextAction == nil) {
return false
}
if (lhs.activatePinch == nil) != (rhs.activatePinch == nil) {
return false
}
if (lhs.deactivatedPinch == nil) != (rhs.deactivatedPinch == nil) {
return false
}
return true return true
} }
@ -144,7 +168,7 @@ final class VideoChatParticipantVideoComponent: Component {
} }
} }
final class View: HighlightTrackingButton { final class View: ContextControllerSourceView {
private var component: VideoChatParticipantVideoComponent? private var component: VideoChatParticipantVideoComponent?
private weak var componentState: EmptyComponentState? private weak var componentState: EmptyComponentState?
private var isUpdating: Bool = false private var isUpdating: Bool = false
@ -158,6 +182,8 @@ final class VideoChatParticipantVideoComponent: Component {
private var blurredAvatarDisposable: Disposable? private var blurredAvatarDisposable: Disposable?
private var blurredAvatarView: UIImageView? private var blurredAvatarView: UIImageView?
private let pinchContainerNode: PinchSourceContainerNode
private let extractedContainerView: ContextExtractedContentContainingView
private var videoSource: AdaptedCallVideoSource? private var videoSource: AdaptedCallVideoSource?
private var videoDisposable: Disposable? private var videoDisposable: Disposable?
private var videoBackgroundLayer: SimpleLayer? private var videoBackgroundLayer: SimpleLayer?
@ -173,16 +199,44 @@ final class VideoChatParticipantVideoComponent: Component {
override init(frame: CGRect) { override init(frame: CGRect) {
self.backgroundGradientView = UIImageView() self.backgroundGradientView = UIImageView()
self.pinchContainerNode = PinchSourceContainerNode()
self.extractedContainerView = ContextExtractedContentContainingView()
super.init(frame: frame) super.init(frame: frame)
self.addSubview(self.backgroundGradientView) self.addSubview(self.extractedContainerView)
self.targetViewForActivationProgress = self.extractedContainerView
self.extractedContainerView.contentView.addSubview(self.pinchContainerNode.view)
self.pinchContainerNode.contentNode.view.addSubview(self.backgroundGradientView)
//TODO:release optimize //TODO:release optimize
self.clipsToBounds = true self.pinchContainerNode.contentNode.view.layer.cornerRadius = 10.0
self.layer.cornerRadius = 10.0 self.pinchContainerNode.contentNode.view.clipsToBounds = true
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside) self.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
self.pinchContainerNode.activate = { [weak self] sourceNode in
guard let self, let component = self.component else {
return
}
component.activatePinch?(sourceNode)
}
self.pinchContainerNode.animatedOut = { [weak self] in
guard let self, let component = self.component else {
return
}
component.deactivatedPinch?()
}
self.activated = { [weak self] gesture, _ in
guard let self, let component = self.component else {
gesture.cancel()
return
}
component.contextAction?(EnginePeer(component.participant.peer), self.extractedContainerView, gesture)
}
} }
required init?(coder: NSCoder) { required init?(coder: NSCoder) {
@ -194,11 +248,13 @@ final class VideoChatParticipantVideoComponent: Component {
self.blurredAvatarDisposable?.dispose() self.blurredAvatarDisposable?.dispose()
} }
@objc private func pressed() { @objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
guard let component = self.component, let action = component.action else { if case .ended = recognizer.state {
return guard let component = self.component, let action = component.action else {
return
}
action()
} }
action()
} }
func update(component: VideoChatParticipantVideoComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize { func update(component: VideoChatParticipantVideoComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
@ -211,6 +267,19 @@ final class VideoChatParticipantVideoComponent: Component {
self.component = component self.component = component
self.componentState = state self.componentState = state
self.isGestureEnabled = !component.isExpanded
self.pinchContainerNode.isPinchGestureEnabled = component.activatePinch != nil
transition.setPosition(view: self.pinchContainerNode.view, position: CGRect(origin: CGPoint(), size: availableSize).center)
transition.setBounds(view: self.pinchContainerNode.view, bounds: CGRect(origin: CGPoint(), size: availableSize))
self.pinchContainerNode.update(size: availableSize, transition: transition.containedViewLayoutTransition)
transition.setPosition(view: self.extractedContainerView, position: CGRect(origin: CGPoint(), size: availableSize).center)
transition.setBounds(view: self.extractedContainerView, bounds: CGRect(origin: CGPoint(), size: availableSize))
self.extractedContainerView.contentRect = CGRect(origin: CGPoint(), size: availableSize)
transition.setFrame(view: self.pinchContainerNode.contentNode.view, frame: CGRect(origin: CGPoint(), size: availableSize))
transition.setFrame(view: self.backgroundGradientView, frame: CGRect(origin: CGPoint(), size: availableSize)) transition.setFrame(view: self.backgroundGradientView, frame: CGRect(origin: CGPoint(), size: availableSize))
let alphaTransition: ComponentTransition let alphaTransition: ComponentTransition
@ -229,14 +298,10 @@ final class VideoChatParticipantVideoComponent: Component {
let controlsAlpha: CGFloat = component.isUIHidden ? 0.0 : 1.0 let controlsAlpha: CGFloat = component.isUIHidden ? 0.0 : 1.0
let nameColor = component.participant.peer.nameColor ?? .blue
let nameColors = component.call.accountContext.peerNameColors.get(nameColor, dark: true)
if previousComponent == nil { if previousComponent == nil {
self.backgroundGradientView.image = generateGradientImage(size: CGSize(width: 8.0, height: 32.0), colors: [ let colors = calculateAvatarColors(context: component.call.accountContext, explicitColorIndex: nil, peerId: component.participant.peer.id, nameColor: component.participant.peer.nameColor, icon: .none, theme: component.theme)
nameColors.main.withMultiplied(hue: 1.0, saturation: 1.1, brightness: 1.3),
nameColors.main.withMultiplied(hue: 1.0, saturation: 1.2, brightness: 1.0) self.backgroundGradientView.image = generateGradientImage(size: CGSize(width: 8.0, height: 32.0), colors: colors.reversed(), locations: [0.0, 1.0], direction: .vertical)
], locations: [0.0, 1.0], direction: .vertical)
} }
if let smallProfileImage = component.participant.peer.smallProfileImage { if let smallProfileImage = component.participant.peer.smallProfileImage {
@ -249,7 +314,7 @@ final class VideoChatParticipantVideoComponent: Component {
blurredAvatarView = UIImageView() blurredAvatarView = UIImageView()
blurredAvatarView.contentMode = .scaleAspectFill blurredAvatarView.contentMode = .scaleAspectFill
self.blurredAvatarView = blurredAvatarView self.blurredAvatarView = blurredAvatarView
self.insertSubview(blurredAvatarView, aboveSubview: self.backgroundGradientView) self.pinchContainerNode.contentNode.view.insertSubview(blurredAvatarView, aboveSubview: self.backgroundGradientView)
blurredAvatarView.frame = CGRect(origin: CGPoint(), size: availableSize) blurredAvatarView.frame = CGRect(origin: CGPoint(), size: availableSize)
} }
@ -292,7 +357,9 @@ final class VideoChatParticipantVideoComponent: Component {
transition: transition, transition: transition,
component: AnyComponent(VideoChatMuteIconComponent( component: AnyComponent(VideoChatMuteIconComponent(
color: .white, color: .white,
content: component.isPresentation ? .screenshare : .mute(isFilled: true, isMuted: component.participant.muteState != nil && !component.isSpeaking) content: component.isPresentation ? .screenshare : .mute(isFilled: true, isMuted: component.participant.muteState != nil && !component.isSpeaking),
shadowColor: UIColor(white: 0.0, alpha: 0.7),
shadowBlur: 8.0
)), )),
environment: {}, environment: {},
containerSize: CGSize(width: 36.0, height: 36.0) containerSize: CGSize(width: 36.0, height: 36.0)
@ -305,14 +372,8 @@ final class VideoChatParticipantVideoComponent: Component {
} }
if let muteStatusView = self.muteStatus.view { if let muteStatusView = self.muteStatus.view {
if muteStatusView.superview == nil { if muteStatusView.superview == nil {
self.addSubview(muteStatusView) self.pinchContainerNode.contentNode.view.addSubview(muteStatusView)
muteStatusView.alpha = controlsAlpha muteStatusView.alpha = controlsAlpha
//TODO:release
muteStatusView.layer.shadowOpacity = 0.7
muteStatusView.layer.shadowColor = UIColor(white: 0.0, alpha: 1.0).cgColor
muteStatusView.layer.shadowOffset = CGSize(width: 0.0, height: 1.0)
muteStatusView.layer.shadowRadius = 8.0
} }
transition.setPosition(view: muteStatusView, position: muteStatusFrame.center) transition.setPosition(view: muteStatusView, position: muteStatusFrame.center)
transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size)) transition.setBounds(view: muteStatusView, bounds: CGRect(origin: CGPoint(), size: muteStatusFrame.size))
@ -320,31 +381,29 @@ final class VideoChatParticipantVideoComponent: Component {
alphaTransition.setAlpha(view: muteStatusView, alpha: controlsAlpha) alphaTransition.setAlpha(view: muteStatusView, alpha: controlsAlpha)
} }
let titleInnerInsets = UIEdgeInsets(top: 8.0, left: 8.0, bottom: 8.0, right: 8.0)
let titleSize = self.title.update( let titleSize = self.title.update(
transition: .immediate, transition: .immediate,
component: AnyComponent(MultilineTextComponent( component: AnyComponent(MultilineTextComponent(
text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white)) text: .plain(NSAttributedString(string: component.participant.peer.debugDisplayTitle, font: Font.semibold(16.0), textColor: .white)),
insets: titleInnerInsets,
textShadowColor: UIColor(white: 0.0, alpha: 0.7),
textShadowBlur: 8.0
)), )),
environment: {}, environment: {},
containerSize: CGSize(width: availableSize.width - 8.0 * 2.0 - 4.0, height: 100.0) containerSize: CGSize(width: availableSize.width - 8.0 * 2.0 - 4.0, height: 100.0)
) )
let titleFrame: CGRect let titleFrame: CGRect
if component.isExpanded { if component.isExpanded {
titleFrame = CGRect(origin: CGPoint(x: 36.0, y: availableSize.height - component.controlInsets.bottom - 8.0 - titleSize.height), size: titleSize) titleFrame = CGRect(origin: CGPoint(x: 36.0 - titleInnerInsets.left, y: availableSize.height - component.controlInsets.bottom - 8.0 - titleSize.height + titleInnerInsets.top), size: titleSize)
} else { } else {
titleFrame = CGRect(origin: CGPoint(x: 29.0, y: availableSize.height - component.controlInsets.bottom - 4.0 - titleSize.height), size: titleSize) titleFrame = CGRect(origin: CGPoint(x: 29.0 - titleInnerInsets.left, y: availableSize.height - component.controlInsets.bottom - 4.0 - titleSize.height + titleInnerInsets.top + 1.0), size: titleSize)
} }
if let titleView = self.title.view { if let titleView = self.title.view {
if titleView.superview == nil { if titleView.superview == nil {
titleView.layer.anchorPoint = CGPoint() titleView.layer.anchorPoint = CGPoint()
self.addSubview(titleView) self.pinchContainerNode.contentNode.view.addSubview(titleView)
titleView.alpha = controlsAlpha titleView.alpha = controlsAlpha
//TODO:release
titleView.layer.shadowOpacity = 0.7
titleView.layer.shadowColor = UIColor(white: 0.0, alpha: 1.0).cgColor
titleView.layer.shadowOffset = CGSize(width: 0.0, height: 1.0)
titleView.layer.shadowRadius = 8.0
} }
transition.setPosition(view: titleView, position: titleFrame.origin) transition.setPosition(view: titleView, position: titleFrame.origin)
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size) titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
@ -377,9 +436,9 @@ final class VideoChatParticipantVideoComponent: Component {
videoBackgroundLayer.opacity = 0.0 videoBackgroundLayer.opacity = 0.0
self.videoBackgroundLayer = videoBackgroundLayer self.videoBackgroundLayer = videoBackgroundLayer
if let blurredAvatarView = self.blurredAvatarView { if let blurredAvatarView = self.blurredAvatarView {
self.layer.insertSublayer(videoBackgroundLayer, above: blurredAvatarView.layer) self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoBackgroundLayer, above: blurredAvatarView.layer)
} else { } else {
self.layer.insertSublayer(videoBackgroundLayer, above: self.backgroundGradientView.layer) self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoBackgroundLayer, above: self.backgroundGradientView.layer)
} }
videoBackgroundLayer.isHidden = true videoBackgroundLayer.isHidden = true
} }
@ -391,8 +450,8 @@ final class VideoChatParticipantVideoComponent: Component {
videoLayer = PrivateCallVideoLayer() videoLayer = PrivateCallVideoLayer()
self.videoLayer = videoLayer self.videoLayer = videoLayer
videoLayer.opacity = 0.0 videoLayer.opacity = 0.0
self.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer) self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
self.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer) self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)
videoLayer.blurredLayer.opacity = 0.0 videoLayer.blurredLayer.opacity = 0.0
@ -537,7 +596,7 @@ final class VideoChatParticipantVideoComponent: Component {
if videoStatusView.superview == nil { if videoStatusView.superview == nil {
videoStatusView.isUserInteractionEnabled = false videoStatusView.isUserInteractionEnabled = false
videoStatusView.alpha = 0.0 videoStatusView.alpha = 0.0
self.addSubview(videoStatusView) self.pinchContainerNode.contentNode.view.addSubview(videoStatusView)
} }
videoStatusTransition.setFrame(view: videoStatusView, frame: CGRect(origin: CGPoint(), size: availableSize)) videoStatusTransition.setFrame(view: videoStatusView, frame: CGRect(origin: CGPoint(), size: availableSize))
videoAlphaTransition.setAlpha(view: videoStatusView, alpha: 1.0) videoAlphaTransition.setAlpha(view: videoStatusView, alpha: 1.0)
@ -557,7 +616,7 @@ final class VideoChatParticipantVideoComponent: Component {
self.loadingEffectView = loadingEffectView self.loadingEffectView = loadingEffectView
loadingEffectView.alpha = 0.0 loadingEffectView.alpha = 0.0
loadingEffectView.isUserInteractionEnabled = false loadingEffectView.isUserInteractionEnabled = false
self.addSubview(loadingEffectView) self.pinchContainerNode.contentNode.view.addSubview(loadingEffectView)
if let referenceLocation = self.referenceLocation { if let referenceLocation = self.referenceLocation {
self.updateHorizontalReferenceLocation(containerWidth: referenceLocation.containerWidth, positionX: referenceLocation.positionX, transition: .immediate) self.updateHorizontalReferenceLocation(containerWidth: referenceLocation.containerWidth, positionX: referenceLocation.positionX, transition: .immediate)
} }
@ -578,7 +637,7 @@ final class VideoChatParticipantVideoComponent: Component {
} else { } else {
activityBorderView = UIImageView() activityBorderView = UIImageView()
self.activityBorderView = activityBorderView self.activityBorderView = activityBorderView
self.addSubview(activityBorderView) self.pinchContainerNode.contentNode.view.addSubview(activityBorderView)
activityBorderView.image = activityBorderImage activityBorderView.image = activityBorderImage
activityBorderView.tintColor = UIColor(rgb: 0x33C758) activityBorderView.tintColor = UIColor(rgb: 0x33C758)

View File

@ -10,6 +10,7 @@ import SwiftSignalKit
import MultilineTextComponent import MultilineTextComponent
import TelegramPresentationData import TelegramPresentationData
import PeerListItemComponent import PeerListItemComponent
import ContextUI
final class VideoChatParticipantsComponent: Component { final class VideoChatParticipantsComponent: Component {
struct Layout: Equatable { struct Layout: Equatable {
@ -645,6 +646,8 @@ final class VideoChatParticipantsComponent: Component {
private var appliedGridIsEmpty: Bool = true private var appliedGridIsEmpty: Bool = true
private var isPinchToZoomActive: Bool = false
private var currentLoadMoreToken: String? private var currentLoadMoreToken: String?
private var mainScrollViewEventCycleState: EventCycleState? private var mainScrollViewEventCycleState: EventCycleState?
@ -986,7 +989,10 @@ final class VideoChatParticipantsComponent: Component {
var itemControlInsets: UIEdgeInsets var itemControlInsets: UIEdgeInsets
if isItemExpanded { if isItemExpanded {
itemControlInsets = itemContentInsets itemControlInsets = itemContentInsets
itemControlInsets.bottom = max(itemControlInsets.bottom, 96.0) if let expandedVideoState = component.expandedVideoState, expandedVideoState.isUIHidden {
} else {
itemControlInsets.bottom = max(itemControlInsets.bottom, 96.0)
}
} else { } else {
itemControlInsets = itemContentInsets itemControlInsets = itemContentInsets
} }
@ -1003,6 +1009,7 @@ final class VideoChatParticipantsComponent: Component {
let _ = itemView.view.update( let _ = itemView.view.update(
transition: itemTransition, transition: itemTransition,
component: AnyComponent(VideoChatParticipantVideoComponent( component: AnyComponent(VideoChatParticipantVideoComponent(
theme: component.theme,
strings: component.strings, strings: component.strings,
call: component.call, call: component.call,
participant: videoParticipant.participant, participant: videoParticipant.participant,
@ -1010,7 +1017,7 @@ final class VideoChatParticipantsComponent: Component {
isPresentation: videoParticipant.isPresentation, isPresentation: videoParticipant.isPresentation,
isSpeaking: component.speakingParticipants.contains(videoParticipant.participant.peer.id), isSpeaking: component.speakingParticipants.contains(videoParticipant.participant.peer.id),
isExpanded: isItemExpanded, isExpanded: isItemExpanded,
isUIHidden: isItemUIHidden, isUIHidden: isItemUIHidden || self.isPinchToZoomActive,
contentInsets: itemContentInsets, contentInsets: itemContentInsets,
controlInsets: itemControlInsets, controlInsets: itemControlInsets,
interfaceOrientation: component.interfaceOrientation, interfaceOrientation: component.interfaceOrientation,
@ -1032,7 +1039,31 @@ final class VideoChatParticipantsComponent: Component {
component.updateMainParticipant(videoParticipantKey, nil) component.updateMainParticipant(videoParticipantKey, nil)
} }
} }
} },
contextAction: !isItemExpanded ? { [weak self] peer, sourceView, gesture in
guard let self, let component = self.component else {
return
}
component.openParticipantContextMenu(peer.id, sourceView, gesture)
} : nil,
activatePinch: isItemExpanded ? { [weak self] sourceNode in
guard let self, let component = self.component else {
return
}
self.isPinchToZoomActive = true
self.state?.updated(transition: .immediate, isLocal: true)
let pinchController = PinchController(sourceNode: sourceNode, getContentAreaInScreenSpace: {
return UIScreen.main.bounds
})
component.call.accountContext.sharedContext.mainWindow?.presentInGlobalOverlay(pinchController)
} : nil,
deactivatedPinch: isItemExpanded ? { [weak self] in
guard let self else {
return
}
self.isPinchToZoomActive = false
self.state?.updated(transition: .spring(duration: 0.4), isLocal: true)
} : nil
)), )),
environment: {}, environment: {},
containerSize: itemFrame.size containerSize: itemFrame.size
@ -1158,7 +1189,7 @@ final class VideoChatParticipantsComponent: Component {
if participant.peer.id == component.call.accountContext.account.peerId { if participant.peer.id == component.call.accountContext.account.peerId {
subtitle = PeerListItemComponent.Subtitle(text: "this is you", color: .accent) subtitle = PeerListItemComponent.Subtitle(text: "this is you", color: .accent)
} else if component.speakingParticipants.contains(participant.peer.id) { } else if component.speakingParticipants.contains(participant.peer.id) {
if let volume = participant.volume, volume != 10000 { if let volume = participant.volume, volume / 100 != 100 {
subtitle = PeerListItemComponent.Subtitle(text: "\(volume / 100)% speaking", color: .constructive) subtitle = PeerListItemComponent.Subtitle(text: "\(volume / 100)% speaking", color: .constructive)
} else { } else {
subtitle = PeerListItemComponent.Subtitle(text: "speaking", color: .constructive) subtitle = PeerListItemComponent.Subtitle(text: "speaking", color: .constructive)
@ -1322,17 +1353,8 @@ final class VideoChatParticipantsComponent: Component {
)) ))
}*/ }*/
let expandedControlsAlpha: CGFloat = expandedVideoState.isUIHidden ? 0.0 : 1.0 let expandedControlsAlpha: CGFloat = (expandedVideoState.isUIHidden || self.isPinchToZoomActive) ? 0.0 : 1.0
let expandedThumbnailsAlpha: CGFloat = expandedControlsAlpha let expandedThumbnailsAlpha: CGFloat = expandedControlsAlpha
/*if itemLayout.layout.videoColumn == nil {
if expandedVideoState.isUIHidden {
expandedThumbnailsAlpha = 0.0
} else {
expandedThumbnailsAlpha = 1.0
}
} else {
expandedThumbnailsAlpha = 0.0
}*/
var expandedThumbnailsTransition = transition var expandedThumbnailsTransition = transition
let expandedThumbnailsView: ComponentView<Empty> let expandedThumbnailsView: ComponentView<Empty>

View File

@ -22,6 +22,7 @@ import ShareController
import AvatarNode import AvatarNode
import TelegramAudio import TelegramAudio
import LegacyComponents import LegacyComponents
import TooltipUI
final class VideoChatScreenComponent: Component { final class VideoChatScreenComponent: Component {
typealias EnvironmentType = ViewControllerComponentContainer.Environment typealias EnvironmentType = ViewControllerComponentContainer.Environment
@ -83,6 +84,7 @@ final class VideoChatScreenComponent: Component {
var scheduleInfo: ComponentView<Empty>? var scheduleInfo: ComponentView<Empty>?
var reconnectedAsEventsDisposable: Disposable? var reconnectedAsEventsDisposable: Disposable?
var memberEventsDisposable: Disposable?
var peer: EnginePeer? var peer: EnginePeer?
var callState: PresentationGroupCallState? var callState: PresentationGroupCallState?
@ -144,6 +146,7 @@ final class VideoChatScreenComponent: Component {
self.membersDisposable?.dispose() self.membersDisposable?.dispose()
self.applicationStateDisposable?.dispose() self.applicationStateDisposable?.dispose()
self.reconnectedAsEventsDisposable?.dispose() self.reconnectedAsEventsDisposable?.dispose()
self.memberEventsDisposable?.dispose()
self.displayAsPeersDisposable?.dispose() self.displayAsPeersDisposable?.dispose()
self.audioOutputStateDisposable?.dispose() self.audioOutputStateDisposable?.dispose()
self.inviteLinksDisposable?.dispose() self.inviteLinksDisposable?.dispose()
@ -819,7 +822,7 @@ final class VideoChatScreenComponent: Component {
self.members = members self.members = members
if let members, let _ = self.expandedParticipantsVideoState { if let members, let expandedParticipantsVideoState = self.expandedParticipantsVideoState, !expandedParticipantsVideoState.isUIHidden {
var videoCount = 0 var videoCount = 0
for participant in members.participants { for participant in members.participants {
if participant.presentationDescription != nil { if participant.presentationDescription != nil {
@ -1008,6 +1011,31 @@ final class VideoChatScreenComponent: Component {
} }
self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false }) self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false })
}) })
self.memberEventsDisposable = (component.call.memberEvents
|> deliverOnMainQueue).start(next: { [weak self] event in
guard let self, let members = self.members, let component = self.component, let environment = self.environment else {
return
}
if event.joined {
var displayEvent = false
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
displayEvent = false
}
if members.totalCount < 250 {
displayEvent = true
} else if event.peer.isVerified {
displayEvent = true
} else if event.isContact || event.isInChatList {
displayEvent = true
}
if displayEvent {
let text = environment.strings.VoiceChat_PeerJoinedText(event.peer.displayTitle(strings: environment.strings, displayOrder: component.call.accountContext.sharedContext.currentPresentationData.with({ $0 }).nameDisplayOrder)).string
self.presentUndoOverlay(content: .invitedToVoiceChat(context: component.call.accountContext, peer: event.peer, title: nil, text: text, action: nil, duration: 3), action: { _ in return false })
}
}
})
} }
self.isPresentedValue.set(environment.isVisible) self.isPresentedValue.set(environment.isVisible)
@ -1072,7 +1100,7 @@ final class VideoChatScreenComponent: Component {
} else { } else {
containerOffset = verticalPanState.fraction * availableSize.height containerOffset = verticalPanState.fraction * availableSize.height
} }
self.containerView.layer.cornerRadius = environment.deviceMetrics.screenCornerRadius self.containerView.layer.cornerRadius = containerOffset.isZero ? 0.0 : environment.deviceMetrics.screenCornerRadius
} }
transition.setFrame(view: self.containerView, frame: CGRect(origin: CGPoint(x: 0.0, y: containerOffset), size: availableSize), completion: { [weak self] completed in transition.setFrame(view: self.containerView, frame: CGRect(origin: CGPoint(x: 0.0, y: containerOffset), size: availableSize), completion: { [weak self] completed in
@ -1249,13 +1277,49 @@ final class VideoChatScreenComponent: Component {
} else { } else {
idleTitleStatusText = " " idleTitleStatusText = " "
} }
let canManageCall = self.callState?.canManageCall ?? false
let titleSize = self.title.update( let titleSize = self.title.update(
transition: transition, transition: transition,
component: AnyComponent(VideoChatTitleComponent( component: AnyComponent(VideoChatTitleComponent(
title: self.callState?.title ?? self.peer?.debugDisplayTitle ?? " ", title: self.callState?.title ?? self.peer?.debugDisplayTitle ?? " ",
status: idleTitleStatusText, status: idleTitleStatusText,
isRecording: self.callState?.recordingStartTimestamp != nil, isRecording: self.callState?.recordingStartTimestamp != nil,
strings: environment.strings strings: environment.strings,
tapAction: self.callState?.recordingStartTimestamp != nil ? { [weak self] in
guard let self, let component = self.component, let environment = self.environment else {
return
}
guard let titleView = self.title.view as? VideoChatTitleComponent.View, let recordingIndicatorView = titleView.recordingIndicatorView else {
return
}
var hasTooltipAlready = false
environment.controller()?.forEachController { controller -> Bool in
if controller is TooltipScreen {
hasTooltipAlready = true
}
return true
}
if !hasTooltipAlready {
let location = recordingIndicatorView.convert(recordingIndicatorView.bounds, to: self)
let text: String
if case let .channel(channel) = self.peer, case .broadcast = channel.info {
text = environment.strings.LiveStream_RecordingInProgress
} else {
text = environment.strings.VoiceChat_RecordingInProgress
}
environment.controller()?.present(TooltipScreen(account: component.call.accountContext.account, sharedContext: component.call.accountContext.sharedContext, text: .plain(text: text), icon: nil, location: .point(location.offsetBy(dx: 1.0, dy: 0.0), .top), displayDuration: .custom(3.0), shouldDismissOnTouch: { _, _ in
return .dismiss(consume: true)
}), in: .current)
}
} : nil,
longTapAction: canManageCall ? { [weak self] in
guard let self else {
return
}
self.openTitleEditing()
} : nil
)), )),
environment: {}, environment: {},
containerSize: CGSize(width: availableSize.width - sideInset * 2.0 - navigationButtonAreaWidth * 2.0 - 4.0 * 2.0, height: 100.0) containerSize: CGSize(width: availableSize.width - sideInset * 2.0 - navigationButtonAreaWidth * 2.0 - 4.0 * 2.0, height: 100.0)
@ -1263,7 +1327,6 @@ final class VideoChatScreenComponent: Component {
let titleFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - titleSize.width) * 0.5), y: topInset + floor((navigationBarHeight - titleSize.height) * 0.5)), size: titleSize) let titleFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - titleSize.width) * 0.5), y: topInset + floor((navigationBarHeight - titleSize.height) * 0.5)), size: titleSize)
if let titleView = self.title.view { if let titleView = self.title.view {
if titleView.superview == nil { if titleView.superview == nil {
titleView.isUserInteractionEnabled = false
self.containerView.addSubview(titleView) self.containerView.addSubview(titleView)
} }
transition.setFrame(view: titleView, frame: titleFrame) transition.setFrame(view: titleView, frame: titleFrame)
@ -1436,7 +1499,7 @@ final class VideoChatScreenComponent: Component {
component: AnyComponent(VideoChatParticipantsComponent( component: AnyComponent(VideoChatParticipantsComponent(
call: component.call, call: component.call,
participants: mappedParticipants, participants: mappedParticipants,
speakingParticipants: members?.speakingParticipants ?? Set(), speakingParticipants: self.members?.speakingParticipants ?? Set(),
expandedVideoState: self.expandedParticipantsVideoState, expandedVideoState: self.expandedParticipantsVideoState,
theme: environment.theme, theme: environment.theme,
strings: environment.strings, strings: environment.strings,
@ -1699,7 +1762,9 @@ final class VideoChatScreenComponent: Component {
let videoButtonContent: VideoChatActionButtonComponent.Content let videoButtonContent: VideoChatActionButtonComponent.Content
if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute { if let callState = self.callState, let muteState = callState.muteState, !muteState.canUnmute {
var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker var buttonAudio: VideoChatActionButtonComponent.Content.Audio = .speaker
var buttonIsEnabled = false
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
buttonIsEnabled = availableOutputs.count > 1
switch currentOutput { switch currentOutput {
case .builtin: case .builtin:
buttonAudio = .builtin buttonAudio = .builtin
@ -1723,7 +1788,7 @@ final class VideoChatScreenComponent: Component {
buttonAudio = .none buttonAudio = .none
} }
} }
videoButtonContent = .audio(audio: buttonAudio) videoButtonContent = .audio(audio: buttonAudio, isEnabled: buttonIsEnabled)
} else { } else {
//TODO:release //TODO:release
videoButtonContent = .video(isActive: false) videoButtonContent = .video(isActive: false)

View File

@ -12,17 +12,23 @@ final class VideoChatTitleComponent: Component {
let status: String let status: String
let isRecording: Bool let isRecording: Bool
let strings: PresentationStrings let strings: PresentationStrings
let tapAction: (() -> Void)?
let longTapAction: (() -> Void)?
init( init(
title: String, title: String,
status: String, status: String,
isRecording: Bool, isRecording: Bool,
strings: PresentationStrings strings: PresentationStrings,
tapAction: (() -> Void)?,
longTapAction: (() -> Void)?
) { ) {
self.title = title self.title = title
self.status = status self.status = status
self.isRecording = isRecording self.isRecording = isRecording
self.strings = strings self.strings = strings
self.tapAction = tapAction
self.longTapAction = longTapAction
} }
static func ==(lhs: VideoChatTitleComponent, rhs: VideoChatTitleComponent) -> Bool { static func ==(lhs: VideoChatTitleComponent, rhs: VideoChatTitleComponent) -> Bool {
@ -38,6 +44,12 @@ final class VideoChatTitleComponent: Component {
if lhs.strings !== rhs.strings { if lhs.strings !== rhs.strings {
return false return false
} }
if (lhs.tapAction == nil) != (rhs.tapAction == nil) {
return false
}
if (lhs.longTapAction == nil) != (rhs.longTapAction == nil) {
return false
}
return true return true
} }
@ -55,6 +67,12 @@ final class VideoChatTitleComponent: Component {
private var currentActivityStatus: String? private var currentActivityStatus: String?
private var currentSize: CGSize? private var currentSize: CGSize?
private var tapRecognizer: TapLongTapOrDoubleTapGestureRecognizer?
public var recordingIndicatorView: UIView? {
return self.recordingImageView
}
override init(frame: CGRect) { override init(frame: CGRect) {
self.hierarchyTrackingLayer = HierarchyTrackingLayer() self.hierarchyTrackingLayer = HierarchyTrackingLayer()
@ -67,12 +85,33 @@ final class VideoChatTitleComponent: Component {
} }
self.updateAnimations() self.updateAnimations()
} }
let tapRecognizer = TapLongTapOrDoubleTapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))
tapRecognizer.tapActionAtPoint = { _ in
return .waitForSingleTap
}
self.addGestureRecognizer(tapRecognizer)
} }
required init?(coder: NSCoder) { required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented") fatalError("init(coder:) has not been implemented")
} }
@objc private func tapGesture(_ recognizer: TapLongTapOrDoubleTapGestureRecognizer) {
guard let component = self.component else {
return
}
if case .ended = recognizer.state {
if let (gesture, _) = recognizer.lastRecognizedGestureAndLocation {
if case .tap = gesture {
component.tapAction?()
} else if case .longTap = gesture {
component.longTapAction?()
}
}
}
}
private func updateAnimations() { private func updateAnimations() {
if let recordingImageView = self.recordingImageView { if let recordingImageView = self.recordingImageView {
if recordingImageView.layer.animation(forKey: "blink") == nil { if recordingImageView.layer.animation(forKey: "blink") == nil {
@ -153,15 +192,22 @@ final class VideoChatTitleComponent: Component {
self.component = component self.component = component
self.tapRecognizer?.isEnabled = component.longTapAction != nil || component.tapAction != nil
let spacing: CGFloat = 1.0 let spacing: CGFloat = 1.0
var maxTitleWidth = availableSize.width
if component.isRecording {
maxTitleWidth -= 10.0
}
let titleSize = self.title.update( let titleSize = self.title.update(
transition: .immediate, transition: .immediate,
component: AnyComponent(MultilineTextComponent( component: AnyComponent(MultilineTextComponent(
text: .plain(NSAttributedString(string: component.title, font: Font.semibold(17.0), textColor: .white)) text: .plain(NSAttributedString(string: component.title, font: Font.semibold(17.0), textColor: .white))
)), )),
environment: {}, environment: {},
containerSize: CGSize(width: availableSize.width, height: 100.0) containerSize: CGSize(width: maxTitleWidth, height: 100.0)
) )
let statusComponent: AnyComponent<Empty> let statusComponent: AnyComponent<Empty>
@ -181,15 +227,18 @@ final class VideoChatTitleComponent: Component {
let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: 0.0), size: titleSize) let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: 0.0), size: titleSize)
if let titleView = self.title.view { if let titleView = self.title.view {
if titleView.superview == nil { if titleView.superview == nil {
titleView.layer.anchorPoint = CGPoint()
titleView.isUserInteractionEnabled = false
self.addSubview(titleView) self.addSubview(titleView)
} }
transition.setPosition(view: titleView, position: titleFrame.center) transition.setPosition(view: titleView, position: titleFrame.origin)
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size) titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
} }
let statusFrame = CGRect(origin: CGPoint(x: floor((size.width - statusSize.width) * 0.5), y: titleFrame.maxY + spacing), size: statusSize) let statusFrame = CGRect(origin: CGPoint(x: floor((size.width - statusSize.width) * 0.5), y: titleFrame.maxY + spacing), size: statusSize)
if let statusView = self.status.view { if let statusView = self.status.view {
if statusView.superview == nil { if statusView.superview == nil {
statusView.isUserInteractionEnabled = false
self.addSubview(statusView) self.addSubview(statusView)
} }
transition.setPosition(view: statusView, position: statusFrame.center) transition.setPosition(view: statusView, position: statusFrame.center)

View File

@ -5,12 +5,16 @@ import Display
private final class VoiceChatMicrophoneNodeDrawingState: NSObject { private final class VoiceChatMicrophoneNodeDrawingState: NSObject {
let color: UIColor let color: UIColor
let shadowColor: UIColor?
let shadowBlur: CGFloat
let filled: Bool let filled: Bool
let transition: CGFloat let transition: CGFloat
let reverse: Bool let reverse: Bool
init(color: UIColor, filled: Bool, transition: CGFloat, reverse: Bool) { init(color: UIColor, shadowColor: UIColor?, shadowBlur: CGFloat, filled: Bool, transition: CGFloat, reverse: Bool) {
self.color = color self.color = color
self.shadowColor = shadowColor
self.shadowBlur = shadowBlur
self.filled = filled self.filled = filled
self.transition = transition self.transition = transition
self.reverse = reverse self.reverse = reverse
@ -24,11 +28,15 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
let muted: Bool let muted: Bool
let color: UIColor let color: UIColor
let filled: Bool let filled: Bool
let shadowColor: UIColor?
let shadowBlur: CGFloat
init(muted: Bool, filled: Bool, color: UIColor) { init(muted: Bool, filled: Bool, color: UIColor, shadowColor: UIColor? = nil, shadowBlur: CGFloat = 0.0) {
self.muted = muted self.muted = muted
self.filled = filled self.filled = filled
self.color = color self.color = color
self.shadowColor = shadowColor
self.shadowBlur = shadowBlur
} }
static func ==(lhs: State, rhs: State) -> Bool { static func ==(lhs: State, rhs: State) -> Bool {
@ -41,6 +49,12 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
if lhs.filled != rhs.filled { if lhs.filled != rhs.filled {
return false return false
} }
if lhs.shadowColor != rhs.shadowColor {
return false
}
if lhs.shadowBlur != rhs.shadowBlur {
return false
}
return true return true
} }
} }
@ -122,6 +136,8 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? { override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
var transitionFraction: CGFloat = self.state.muted ? 1.0 : 0.0 var transitionFraction: CGFloat = self.state.muted ? 1.0 : 0.0
var color = self.state.color var color = self.state.color
var shadowColor = self.state.shadowColor
var shadowBlur = self.state.shadowBlur
var reverse = false var reverse = false
if let transitionContext = self.transitionContext { if let transitionContext = self.transitionContext {
@ -138,9 +154,17 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
if transitionContext.previousState.color.rgb != color.rgb { if transitionContext.previousState.color.rgb != color.rgb {
color = transitionContext.previousState.color.interpolateTo(color, fraction: t)! color = transitionContext.previousState.color.interpolateTo(color, fraction: t)!
} }
if let previousShadowColor = transitionContext.previousState.shadowColor, let shadowColorValue = shadowColor, previousShadowColor.rgb != shadowColorValue.rgb {
shadowColor = previousShadowColor.interpolateTo(shadowColorValue, fraction: t)!
}
if transitionContext.previousState.shadowBlur != shadowBlur {
shadowBlur = transitionContext.previousState.shadowBlur * (1.0 - t) + shadowBlur * t
}
} }
return VoiceChatMicrophoneNodeDrawingState(color: color, filled: self.state.filled, transition: transitionFraction, reverse: reverse) return VoiceChatMicrophoneNodeDrawingState(color: color, shadowColor: shadowColor, shadowBlur: shadowBlur, filled: self.state.filled, transition: transitionFraction, reverse: reverse)
} }
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) { @objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
@ -155,9 +179,18 @@ final class VoiceChatMicrophoneNode: ASDisplayNode {
guard let parameters = parameters as? VoiceChatMicrophoneNodeDrawingState else { guard let parameters = parameters as? VoiceChatMicrophoneNodeDrawingState else {
return return
} }
var bounds = bounds
bounds = bounds.insetBy(dx: parameters.shadowBlur, dy: parameters.shadowBlur)
context.translateBy(x: bounds.minX, y: bounds.minY)
context.setFillColor(parameters.color.cgColor) context.setFillColor(parameters.color.cgColor)
if let shadowColor = parameters.shadowColor, parameters.shadowBlur != 0.0 {
context.setShadow(offset: CGSize(), blur: parameters.shadowBlur, color: shadowColor.cgColor)
}
var clearLineWidth: CGFloat = 2.0 var clearLineWidth: CGFloat = 2.0
var lineWidth: CGFloat = 1.0 + UIScreenPixel var lineWidth: CGFloat = 1.0 + UIScreenPixel
if bounds.size.width > 36.0 { if bounds.size.width > 36.0 {

View File

@ -343,15 +343,7 @@ public class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
} }
if oldValue != self.visibility { if oldValue != self.visibility {
switch self.visibility { self.updateVisibility()
case .none:
self.textNode.visibilityRect = nil
case let .visible(_, subRect):
var subRect = subRect
subRect.origin.x = 0.0
subRect.size.width = 10000.0
self.textNode.visibilityRect = subRect
}
} }
} }
} }
@ -594,6 +586,21 @@ public class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
let isPlaying = self.visibilityStatus == true && !self.forceStopAnimations let isPlaying = self.visibilityStatus == true && !self.forceStopAnimations
var effectiveVisibility = self.visibility
if !isPlaying {
effectiveVisibility = .none
}
switch effectiveVisibility {
case .none:
self.textNode.visibilityRect = nil
case let .visible(_, subRect):
var subRect = subRect
subRect.origin.x = 0.0
subRect.size.width = 10000.0
self.textNode.visibilityRect = subRect
}
var canPlayEffects = isPlaying var canPlayEffects = isPlaying
if !item.controllerInteraction.canReadHistory { if !item.controllerInteraction.canReadHistory {
canPlayEffects = false canPlayEffects = false

View File

@ -680,22 +680,6 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
override public var visibility: ListViewItemNodeVisibility { override public var visibility: ListViewItemNodeVisibility {
didSet { didSet {
if self.visibility != oldValue { if self.visibility != oldValue {
for contentNode in self.contentNodes {
contentNode.visibility = mapVisibility(self.visibility, boundsSize: self.bounds.size, insets: self.insets, to: contentNode)
}
if let threadInfoNode = self.threadInfoNode {
threadInfoNode.visibility = self.visibility != .none
}
if let replyInfoNode = self.replyInfoNode {
replyInfoNode.visibility = self.visibility != .none
}
if let unlockButtonNode = self.unlockButtonNode {
unlockButtonNode.visibility = self.visibility != .none
}
self.visibilityStatus = self.visibility != .none self.visibilityStatus = self.visibility != .none
self.updateVisibility() self.updateVisibility()
@ -718,6 +702,8 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
} }
} }
private var forceStopAnimations: Bool = false
required public init(rotated: Bool) { required public init(rotated: Bool) {
self.mainContextSourceNode = ContextExtractedContentContainingNode() self.mainContextSourceNode = ContextExtractedContentContainingNode()
self.mainContainerNode = ContextControllerSourceNode() self.mainContainerNode = ContextControllerSourceNode()
@ -6207,6 +6193,11 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
return false return false
} }
override public func updateStickerSettings(forceStopAnimations: Bool) {
self.forceStopAnimations = forceStopAnimations
self.updateVisibility()
}
private func updateVisibility() { private func updateVisibility() {
guard let item = self.item else { guard let item = self.item else {
return return
@ -6223,11 +6214,35 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
if !item.controllerInteraction.canReadHistory { if !item.controllerInteraction.canReadHistory {
isPlaying = false isPlaying = false
} }
if self.forceStopAnimations {
isPlaying = false
}
if !isPlaying { if !isPlaying {
self.removeEffectAnimations() self.removeEffectAnimations()
} }
var effectiveVisibility = self.visibility
if !isPlaying {
effectiveVisibility = .none
}
for contentNode in self.contentNodes {
contentNode.visibility = mapVisibility(effectiveVisibility, boundsSize: self.bounds.size, insets: self.insets, to: contentNode)
}
if let threadInfoNode = self.threadInfoNode {
threadInfoNode.visibility = effectiveVisibility != .none
}
if let replyInfoNode = self.replyInfoNode {
replyInfoNode.visibility = effectiveVisibility != .none
}
if let unlockButtonNode = self.unlockButtonNode {
unlockButtonNode.visibility = effectiveVisibility != .none
}
if isPlaying { if isPlaying {
var alreadySeen = true var alreadySeen = true
if item.message.flags.contains(.Incoming) { if item.message.flags.contains(.Incoming) {

View File

@ -1425,6 +1425,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
if let strongSelf = self { if let strongSelf = self {
if file.isAnimated { if file.isAnimated {
strongSelf.fetchDisposable.set(fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .peer(message.id.peerId), userContentType: MediaResourceUserContentType(file: file), reference: AnyMediaReference.message(message: MessageReference(message), media: file).resourceReference(file.resource), statsCategory: statsCategoryForFileWithAttributes(file.attributes)).startStrict()) strongSelf.fetchDisposable.set(fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .peer(message.id.peerId), userContentType: MediaResourceUserContentType(file: file), reference: AnyMediaReference.message(message: MessageReference(message), media: file).resourceReference(file.resource), statsCategory: statsCategoryForFileWithAttributes(file.attributes)).startStrict())
} else if NativeVideoContent.isHLSVideo(file: file) {
strongSelf.fetchDisposable.set(nil)
} else { } else {
strongSelf.fetchDisposable.set(messageMediaFileInteractiveFetched(context: context, message: message, file: file, userInitiated: manual, storeToDownloadsPeerId: storeToDownloadsPeerId).startStrict()) strongSelf.fetchDisposable.set(messageMediaFileInteractiveFetched(context: context, message: message, file: file, userInitiated: manual, storeToDownloadsPeerId: storeToDownloadsPeerId).startStrict())
} }
@ -1659,16 +1661,12 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
let loopVideo = updatedVideoFile.isAnimated let loopVideo = updatedVideoFile.isAnimated
let videoContent: UniversalVideoContent let videoContent: UniversalVideoContent
if !"".isEmpty && NativeVideoContent.isHLSVideo(file: updatedVideoFile) { videoContent = NativeVideoContent(id: .message(message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: streamVideo ? .conservative : .none, loopVideo: loopVideo, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo, placeholderColor: emptyColor, captureProtected: message.isCopyProtected() || isExtendedMedia, storeAfterDownload: { [weak context] in
videoContent = HLSVideoContent(id: .message(message.id, message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: true, loopVideo: loopVideo) guard let context, let peerId else {
} else { return
videoContent = NativeVideoContent(id: .message(message.stableId, updatedVideoFile.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: updatedVideoFile), streamVideo: streamVideo ? .conservative : .none, loopVideo: loopVideo, enableSound: false, fetchAutomatically: false, onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false), continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo, placeholderColor: emptyColor, captureProtected: message.isCopyProtected() || isExtendedMedia, storeAfterDownload: { [weak context] in }
guard let context, let peerId else { let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone()
return })
}
let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone()
})
}
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded) let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded)
videoNode.isUserInteractionEnabled = false videoNode.isUserInteractionEnabled = false
videoNode.ownsContentNodeUpdated = { [weak self] owns in videoNode.ownsContentNodeUpdated = { [weak self] owns in
@ -1850,7 +1848,32 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
} }
} }
if case .full = automaticDownload { if automaticDownload != .none, let file = media as? TelegramMediaFile, NativeVideoContent.isHLSVideo(file: file) {
let postbox = context.account.postbox
let fetchSignal = HLSVideoContent.minimizedHLSQualityPreloadData(postbox: context.account.postbox, file: .message(message: MessageReference(message), media: file), userLocation: .peer(message.id.peerId), prefixSeconds: 10, autofetchPlaylist: true)
|> mapToSignal { fileAndRange -> Signal<Never, NoError> in
guard let fileAndRange else {
return .complete()
}
return freeMediaFileResourceInteractiveFetched(postbox: postbox, userLocation: .peer(message.id.peerId), fileReference: fileAndRange.0, resource: fileAndRange.0.media.resource, range: (fileAndRange.1, .default))
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in
return .complete()
}
}
let visibilityAwareFetchSignal = strongSelf.visibilityPromise.get()
|> mapToSignal { visibility -> Signal<Void, NoError> in
if visibility {
return fetchSignal
|> mapToSignal { _ -> Signal<Void, NoError> in
}
} else {
return .complete()
}
}
strongSelf.fetchDisposable.set(visibilityAwareFetchSignal.startStrict())
} else if case .full = automaticDownload {
if let _ = media as? TelegramMediaImage { if let _ = media as? TelegramMediaImage {
updatedFetchControls.fetch(false) updatedFetchControls.fetch(false)
} else if let image = media as? TelegramMediaWebFile { } else if let image = media as? TelegramMediaWebFile {

View File

@ -24,6 +24,7 @@ swift_library(
"//submodules/TelegramUI/Components/Chat/ChatMessageBubbleContentNode", "//submodules/TelegramUI/Components/Chat/ChatMessageBubbleContentNode",
"//submodules/TelegramUI/Components/Chat/ChatMessageItemCommon", "//submodules/TelegramUI/Components/Chat/ChatMessageItemCommon",
"//submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode", "//submodules/TelegramUI/Components/Chat/ChatMessageInteractiveMediaNode",
"//submodules/TelegramUniversalVideoContent",
], ],
visibility = [ visibility = [
"//visibility:public", "//visibility:public",

View File

@ -16,6 +16,7 @@ import ChatMessageItemCommon
import ChatMessageInteractiveMediaNode import ChatMessageInteractiveMediaNode
import ChatControllerInteraction import ChatControllerInteraction
import InvisibleInkDustNode import InvisibleInkDustNode
import TelegramUniversalVideoContent
public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode { public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode {
override public var supportsMosaic: Bool { override public var supportsMosaic: Bool {
@ -163,7 +164,9 @@ public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode {
automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil
} }
} else if (telegramFile.isVideo && !telegramFile.isAnimated) && item.context.sharedContext.energyUsageSettings.autoplayVideo { } else if (telegramFile.isVideo && !telegramFile.isAnimated) && item.context.sharedContext.energyUsageSettings.autoplayVideo {
if case .full = automaticDownload { if NativeVideoContent.isHLSVideo(file: telegramFile) {
automaticPlayback = true
} else if case .full = automaticDownload {
automaticPlayback = true automaticPlayback = true
} else { } else {
automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil
@ -207,7 +210,9 @@ public class ChatMessageMediaBubbleContentNode: ChatMessageBubbleContentNode {
automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil
} }
} else if (telegramFile.isVideo && !telegramFile.isAnimated) && item.context.sharedContext.energyUsageSettings.autoplayVideo { } else if (telegramFile.isVideo && !telegramFile.isAnimated) && item.context.sharedContext.energyUsageSettings.autoplayVideo {
if case .full = automaticDownload { if NativeVideoContent.isHLSVideo(file: telegramFile) {
automaticPlayback = true
} else if case .full = automaticDownload {
automaticPlayback = true automaticPlayback = true
} else { } else {
automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil automaticPlayback = item.context.account.postbox.mediaBox.completedResourcePath(telegramFile.resource) != nil

View File

@ -101,6 +101,8 @@ public class ChatMessageStickerItemNode: ChatMessageItemView {
} }
} }
private var forceStopAnimations: Bool = false
required public init(rotated: Bool) { required public init(rotated: Bool) {
self.contextSourceNode = ContextExtractedContentContainingNode() self.contextSourceNode = ContextExtractedContentContainingNode()
self.containerNode = ContextControllerSourceNode() self.containerNode = ContextControllerSourceNode()
@ -2160,6 +2162,9 @@ public class ChatMessageStickerItemNode: ChatMessageItemView {
if !item.controllerInteraction.canReadHistory { if !item.controllerInteraction.canReadHistory {
isPlaying = false isPlaying = false
} }
if self.forceStopAnimations {
isPlaying = false
}
if !isPlaying { if !isPlaying {
self.removeEffectAnimations() self.removeEffectAnimations()
@ -2191,6 +2196,11 @@ public class ChatMessageStickerItemNode: ChatMessageItemView {
} }
} }
override public func updateStickerSettings(forceStopAnimations: Bool) {
self.forceStopAnimations = forceStopAnimations
self.updateVisibility()
}
override public func messageEffectTargetView() -> UIView? { override public func messageEffectTargetView() -> UIView? {
if let result = self.dateAndStatusNode.messageEffectTargetView() { if let result = self.dateAndStatusNode.messageEffectTargetView() {
return result return result

View File

@ -6946,12 +6946,21 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
} }
}) })
self.stickerSettingsDisposable = combineLatest(queue: Queue.mainQueue(), context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.stickerSettings]), self.disableStickerAnimationsPromise.get()).startStrict(next: { [weak self] sharedData, disableStickerAnimations in self.stickerSettingsDisposable = combineLatest(queue: Queue.mainQueue(),
context.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.stickerSettings]),
self.disableStickerAnimationsPromise.get(),
context.sharedContext.hasGroupCallOnScreen
).startStrict(next: { [weak self] sharedData, disableStickerAnimations, hasGroupCallOnScreen in
var stickerSettings = StickerSettings.defaultSettings var stickerSettings = StickerSettings.defaultSettings
if let value = sharedData.entries[ApplicationSpecificSharedDataKeys.stickerSettings]?.get(StickerSettings.self) { if let value = sharedData.entries[ApplicationSpecificSharedDataKeys.stickerSettings]?.get(StickerSettings.self) {
stickerSettings = value stickerSettings = value
} }
var disableStickerAnimations = disableStickerAnimations
if hasGroupCallOnScreen {
disableStickerAnimations = true
}
let chatStickerSettings = ChatInterfaceStickerSettings(stickerSettings: stickerSettings) let chatStickerSettings = ChatInterfaceStickerSettings(stickerSettings: stickerSettings)
if let strongSelf = self, strongSelf.stickerSettings != chatStickerSettings || strongSelf.disableStickerAnimationsValue != disableStickerAnimations { if let strongSelf = self, strongSelf.stickerSettings != chatStickerSettings || strongSelf.disableStickerAnimationsValue != disableStickerAnimations {
strongSelf.stickerSettings = chatStickerSettings strongSelf.stickerSettings = chatStickerSettings

@ -1 +1 @@
Subproject commit 846f7040480f52b8bc0382fb9e2e78e8ef60c633 Subproject commit b6e7349b98c5d3999f45e9468eee068aff86ee37