Pre-release fixes

This commit is contained in:
Ali 2021-05-27 00:53:13 +04:00
parent e0c9559a5c
commit ce23616a4c
18 changed files with 307 additions and 198 deletions

View File

@ -436,7 +436,7 @@ public protocol PresentationGroupCall: class {
var inviteLinks: Signal<GroupCallInviteLinks?, NoError> { get }
func makeIncomingVideoView(endpointId: String, completion: @escaping (PresentationCallVideoView?) -> Void)
func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void)
func makeOutgoingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void)
func loadMoreMembers(token: String)

View File

@ -90,9 +90,10 @@ final class SettingsThemeWallpaperNode: ASDisplayNode {
gradientNode.isUserInteractionEnabled = false
self.gradientNode = gradientNode
gradientNode.updateColors(colors: colors.map { UIColor(rgb: $0) })
self.insertSubnode(gradientNode, aboveSubnode: self.backgroundNode)
self.insertSubnode(gradientNode, belowSubnode: self.imageNode)
}
self.imageNode.layer.compositingFilter = "softLightBlendMode"
self.backgroundNode.image = nil
} else {
if let gradientNode = self.gradientNode {
@ -132,7 +133,7 @@ final class SettingsThemeWallpaperNode: ASDisplayNode {
apply()
case let .image(representations, _):
let convertedRepresentations: [ImageRepresentationWithReference] = representations.map({ ImageRepresentationWithReference(representation: $0, reference: .wallpaper(wallpaper: nil, resource: $0.resource)) })
self.imageNode.alpha = 10
self.imageNode.alpha = 1.0
self.imageNode.setSignal(wallpaperImage(account: context.account, accountManager: context.sharedContext.accountManager, representations: convertedRepresentations, thumbnail: true, autoFetchFullSize: true, synchronousLoad: synchronousLoad))
let apply = self.imageNode.asyncLayout()(TransformImageArguments(corners: corners, imageSize: largestImageRepresentation(representations)!.dimensions.cgSize.aspectFilled(size), boundingSize: size, intrinsicInsets: UIEdgeInsets()))
@ -161,7 +162,7 @@ final class SettingsThemeWallpaperNode: ASDisplayNode {
self.imageNode.alpha = CGFloat(file.settings.intensity ?? 50) / 100.0
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: UIColor(white: 0.0, alpha: 0.3))
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: UIColor(white: 0.0, alpha: 1.0))
imageSignal = patternWallpaperImage(account: context.account, accountManager: context.sharedContext.accountManager, representations: convertedRepresentations, mode: .thumbnail, autoFetchFullSize: true)
} else {
self.imageNode.alpha = 1.0

View File

@ -56,6 +56,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
private let instantChatBackgroundNode: WallpaperBackgroundNode
private let remoteChatBackgroundNode: TransformImageNode
private let blurredNode: BlurredImageNode
private let wallpaperNode: WallpaperBackgroundNode
private var dateHeaderNode: ListViewItemHeaderNode?
private var messageNodes: [ListViewItemNode]?
@ -69,11 +70,15 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
private var fetchDisposable = MetaDisposable()
private var dismissed = false
private var wallpaper: TelegramWallpaper
init(context: AccountContext, previewTheme: PresentationTheme, initialWallpaper: TelegramWallpaper?, dismiss: @escaping () -> Void, apply: @escaping () -> Void, isPreview: Bool, ready: Promise<Bool>) {
self.context = context
self.previewTheme = previewTheme
self.isPreview = isPreview
self.wallpaper = initialWallpaper ?? previewTheme.chat.defaultWallpaper
self.ready = ready
@ -105,8 +110,6 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
self.instantChatBackgroundNode = WallpaperBackgroundNode(context: context)
self.instantChatBackgroundNode.displaysAsynchronously = false
let wallpaper = initialWallpaper ?? previewTheme.chat.defaultWallpaper
self.ready.set(.single(true))
self.instantChatBackgroundNode.update(wallpaper: wallpaper)
@ -117,6 +120,8 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
self.blurredNode = BlurredImageNode()
self.blurredNode.blurView.contentMode = .scaleAspectFill
self.wallpaperNode = WallpaperBackgroundNode(context: context)
self.toolbarNode = WallpaperGalleryToolbarNode(theme: self.previewTheme, strings: self.presentationData.strings, doneButtonType: .set)
@ -143,7 +148,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
self.chatListBackgroundNode.backgroundColor = self.previewTheme.chatList.backgroundColor
self.maskNode.image = generateMaskImage(color: self.previewTheme.chatList.backgroundColor)
if case let .color(value) = self.previewTheme.chat.defaultWallpaper {
if case let .color(value) = self.wallpaper {
self.instantChatBackgroundNode.backgroundColor = UIColor(rgb: value)
}
@ -178,11 +183,21 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
}
}
}
if case let .file(file) = self.previewTheme.chat.defaultWallpaper {
var gradientColors: [UInt32] = []
if case let .file(file) = self.wallpaper {
gradientColors = file.settings.colors
if file.settings.blur {
self.chatContainerNode.insertSubnode(self.blurredNode, belowSubnode: self.messagesContainerNode)
}
} else if case let .gradient(colors, _) = self.wallpaper {
gradientColors = colors
}
if gradientColors.count >= 3 {
self.chatContainerNode.insertSubnode(self.wallpaperNode, belowSubnode: self.messagesContainerNode)
self.wallpaperNode.update(wallpaper: self.wallpaper)
}
self.remoteChatBackgroundNode.imageUpdated = { [weak self] image in
@ -309,7 +324,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
self.chatListBackgroundNode.backgroundColor = self.previewTheme.chatList.backgroundColor
self.maskNode.image = generateMaskImage(color: self.previewTheme.chatList.backgroundColor)
if case let .color(value) = self.previewTheme.chat.defaultWallpaper {
if case let .color(value) = self.wallpaper {
self.instantChatBackgroundNode.backgroundColor = UIColor(rgb: value)
}
@ -483,7 +498,7 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
sampleMessages.append(message8)
items = sampleMessages.reversed().map { message in
self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.previewTheme, strings: self.presentationData.strings, wallpaper: self.previewTheme.chat.defaultWallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: nil, clickThroughMessage: nil)
self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.previewTheme, strings: self.presentationData.strings, wallpaper: self.wallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: nil, clickThroughMessage: nil)
}
let width: CGFloat
@ -590,6 +605,8 @@ final class ThemePreviewControllerNode: ASDisplayNode, UIScrollViewDelegate {
self.instantChatBackgroundNode.updateLayout(size: self.instantChatBackgroundNode.bounds.size, transition: .immediate)
self.remoteChatBackgroundNode.frame = self.chatContainerNode.bounds
self.blurredNode.frame = self.chatContainerNode.bounds
self.wallpaperNode.frame = self.chatContainerNode.bounds
self.wallpaperNode.updateLayout(size: self.wallpaperNode.bounds.size, transition: .immediate)
transition.updateFrame(node: self.toolbarNode, frame: CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - toolbarHeight), size: CGSize(width: layout.size.width, height: toolbarHeight)))
self.toolbarNode.updateLayout(size: CGSize(width: layout.size.width, height: 49.0), layout: layout, transition: transition)

View File

@ -614,8 +614,10 @@ class ThemeSettingsThemeItemNode: ListViewItemNode, ItemListItemNode {
var entries: [ThemeSettingsThemeEntry] = []
var index: Int = 0
for var theme in item.themes {
if !item.displayUnsupported, case let .cloud(theme) = theme, theme.theme.file == nil {
continue
if case let .cloud(theme) = theme {
if !item.displayUnsupported && theme.theme.file == nil {
continue
}
}
let title = themeDisplayName(strings: item.strings, reference: theme)
var accentColor = item.themeSpecificAccentColors[theme.generalThemeReference.index]
@ -625,8 +627,15 @@ class ThemeSettingsThemeItemNode: ListViewItemNode, ItemListItemNode {
}
accentColor = nil
}
var themeWallpaper: TelegramWallpaper?
if case let .cloud(theme) = theme {
themeWallpaper = theme.resolvedWallpaper ?? theme.theme.settings?.wallpaper
}
let customWallpaper = item.themeSpecificChatWallpapers[theme.generalThemeReference.index]
let wallpaper = accentColor?.wallpaper
let wallpaper = accentColor?.wallpaper ?? customWallpaper ?? themeWallpaper
entries.append(ThemeSettingsThemeEntry(index: index, themeReference: theme, title: title, accentColor: accentColor, selected: item.currentTheme.index == theme.index, theme: item.theme, wallpaper: wallpaper))
index += 1
}

View File

@ -902,10 +902,12 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
} else if self.colorsButtonNode.isSelected {
additionalYOffset = -235.0
}*/
let buttonSpacing: CGFloat = 18.0
let leftButtonFrame = CGRect(origin: CGPoint(x: floor(layout.size.width / 2.0 - buttonSize.width - 10.0) + offset.x, y: layout.size.height - 49.0 - layout.intrinsicInsets.bottom - 54.0 + offset.y + additionalYOffset), size: buttonSize)
let leftButtonFrame = CGRect(origin: CGPoint(x: floor(layout.size.width / 2.0 - buttonSize.width - buttonSpacing) + offset.x, y: layout.size.height - 49.0 - layout.intrinsicInsets.bottom - 54.0 + offset.y + additionalYOffset), size: buttonSize)
let centerButtonFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - buttonSize.width) / 2.0) + offset.x, y: layout.size.height - 49.0 - layout.intrinsicInsets.bottom - 54.0 + offset.y + additionalYOffset), size: buttonSize)
let rightButtonFrame = CGRect(origin: CGPoint(x: ceil(layout.size.width / 2.0 + 10.0) + offset.x, y: layout.size.height - 49.0 - layout.intrinsicInsets.bottom - 54.0 + offset.y + additionalYOffset), size: buttonSize)
let rightButtonFrame = CGRect(origin: CGPoint(x: ceil(layout.size.width / 2.0 + buttonSpacing) + offset.x, y: layout.size.height - 49.0 - layout.intrinsicInsets.bottom - 54.0 + offset.y + additionalYOffset), size: buttonSize)
var patternAlpha: CGFloat = 0.0
var patternFrame = centerButtonFrame

View File

@ -24,7 +24,7 @@ final class PresentationCallToneRenderer {
private var toneRendererAudioSessionActivated = false
private let audioLevelPipe = ValuePipe<Float>()
init(tone: PresentationCallTone) {
init(tone: PresentationCallTone, completed: (() -> Void)? = nil) {
let queue = Queue.mainQueue()
self.queue = queue
@ -52,6 +52,7 @@ final class PresentationCallToneRenderer {
let toneDataOffset = Atomic<Int>(value: 0)
let toneData = Atomic<Data?>(value: nil)
let reportedCompletion = Atomic<Bool>(value: false)
self.toneRenderer.beginRequestingFrames(queue: DispatchQueue.global(), takeFrame: {
var data = toneData.with { $0 }
@ -63,6 +64,9 @@ final class PresentationCallToneRenderer {
}
guard let toneData = data else {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
@ -83,6 +87,11 @@ final class PresentationCallToneRenderer {
if let takeOffset = takeOffset {
if let toneDataMaxOffset = toneDataMaxOffset, takeOffset >= toneDataMaxOffset {
if !reportedCompletion.swap(true) {
Queue.mainQueue().after(1.0, {
completed?()
})
}
return .finished
}
@ -117,6 +126,9 @@ final class PresentationCallToneRenderer {
let status = CMBlockBufferCreateWithMemoryBlock(allocator: nil, memoryBlock: bytes, blockLength: frameSize, blockAllocator: nil, customBlockSource: nil, offsetToData: 0, dataLength: frameSize, flags: 0, blockBufferOut: &blockBuffer)
if status != noErr {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
@ -127,15 +139,24 @@ final class PresentationCallToneRenderer {
var sampleBuffer: CMSampleBuffer?
var sampleSize = frameSize
guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: nil, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
if let sampleBuffer = sampleBuffer {
return .frame(MediaTrackFrame(type: .audio, sampleBuffer: sampleBuffer, resetDecoder: false, decoded: true))
} else {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
} else {
if !reportedCompletion.swap(true) {
completed?()
}
return .finished
}
})

View File

@ -1566,9 +1566,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if (wasConnecting != isConnecting && strongSelf.didConnectOnce) {
if isConnecting {
let toneRenderer = PresentationCallToneRenderer(tone: .groupConnecting)
strongSelf.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(strongSelf.isAudioSessionActive)
strongSelf.beginTone(tone: .groupConnecting)
} else {
strongSelf.toneRenderer = nil
}
@ -1583,9 +1581,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
strongSelf.didConnectOnce = true
if !strongSelf.isScheduled {
let toneRenderer = PresentationCallToneRenderer(tone: .groupJoined)
strongSelf.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(strongSelf.isAudioSessionActive)
strongSelf.beginTone(tone: .groupJoined)
}
}
@ -2143,6 +2139,24 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
}
private func beginTone(tone: PresentationCallTone) {
var completed: (() -> Void)?
let toneRenderer = PresentationCallToneRenderer(tone: tone, completed: {
completed?()
})
completed = { [weak self, weak toneRenderer] in
Queue.mainQueue().async {
guard let strongSelf = self, let toneRenderer = toneRenderer, toneRenderer === strongSelf.toneRenderer else {
return
}
strongSelf.toneRenderer = nil
}
}
self.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
}
public func playTone(_ tone: PresentationGroupCallTone) {
let name: String
switch tone {
@ -2152,9 +2166,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
name = "voip_group_recording_started.mp3"
}
let toneRenderer = PresentationCallToneRenderer(tone: .custom(name: name, loopCount: 1))
self.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(self.isAudioSessionActive)
self.beginTone(tone: .custom(name: name, loopCount: 1))
}
private func markAsCanBeRemoved() {
@ -2182,10 +2194,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
strongSelf.wasRemoved.set(.single(true))
return
}
let toneRenderer = PresentationCallToneRenderer(tone: .groupLeft)
strongSelf.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(strongSelf.isAudioSessionActive)
strongSelf.beginTone(tone: .groupLeft)
Queue.mainQueue().after(1.0, {
strongSelf.wasRemoved.set(.single(true))
@ -2373,10 +2383,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return
}
strongSelf.updateSessionState(internalState: .active(callInfo), audioSessionControl: strongSelf.audioSessionControl)
let toneRenderer = PresentationCallToneRenderer(tone: .groupJoined)
strongSelf.toneRenderer = toneRenderer
toneRenderer.setAudioSessionActive(strongSelf.isAudioSessionActive)
strongSelf.beginTone(tone: .groupJoined)
}))
}
@ -2910,8 +2918,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.participantsContext?.updateDefaultParticipantsAreMuted(isMuted: isMuted)
}
public func makeIncomingVideoView(endpointId: String, completion: @escaping (PresentationCallVideoView?) -> Void) {
self.genericCallContext?.makeIncomingVideoView(endpointId: endpointId, completion: { view in
public func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void) {
self.genericCallContext?.makeIncomingVideoView(endpointId: endpointId, requestClone: requestClone, completion: { view, _ in
if let view = view {
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
let setOnOrientationUpdated = view.setOnOrientationUpdated
@ -2972,9 +2980,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
updateIsEnabled: { value in
updateIsEnabled(value)
}
))
), nil)
} else {
completion(nil)
completion(nil, nil)
}
})
}

View File

@ -3844,6 +3844,9 @@ public final class VoiceChatController: ViewController {
self.actionButton.update(size: centralButtonSize, buttonSize: CGSize(width: 112.0, height: 112.0), state: actionButtonState, title: actionButtonTitle, subtitle: actionButtonSubtitle, dark: self.isFullscreen, small: smallButtons, animated: true)
var hasCameraButton = self.callState?.isVideoEnabled ?? false
#if DEBUG
hasCameraButton = true
#endif
switch actionButtonState {
case let .active(state):
@ -4514,52 +4517,46 @@ public final class VoiceChatController: ViewController {
if !self.requestedVideoSources.contains(channel.endpointId) {
self.requestedVideoSources.insert(channel.endpointId)
self.call.makeIncomingVideoView(endpointId: channel.endpointId, completion: { [weak self] videoView in
Queue.mainQueue().async {
self?.call.makeIncomingVideoView(endpointId: channel.endpointId, completion: { [weak self] backdropVideoView in
Queue.mainQueue().async {
guard let strongSelf = self, let videoView = videoView else {
return
self.call.makeIncomingVideoView(endpointId: channel.endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
guard let strongSelf = self, let videoView = videoView else {
return
}
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView)
strongSelf.readyVideoDisposables.set((videoNode.ready
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue
).start(next: { [weak self, weak videoNode] _ in
if let strongSelf = self, let videoNode = videoNode {
Queue.mainQueue().after(0.1) {
strongSelf.readyVideoNodes.insert(channel.endpointId)
if videoNode.aspectRatio <= 0.77 {
strongSelf.wideVideoNodes.insert(channel.endpointId)
}
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView)
strongSelf.readyVideoDisposables.set((videoNode.ready
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue
).start(next: { [weak self, weak videoNode] _ in
if let strongSelf = self, let videoNode = videoNode {
Queue.mainQueue().after(0.1) {
strongSelf.readyVideoNodes.insert(channel.endpointId)
if videoNode.aspectRatio <= 0.77 {
strongSelf.wideVideoNodes.insert(channel.endpointId)
}
strongSelf.updateMembers()
if let interaction = strongSelf.itemInteraction {
loop: for i in 0 ..< strongSelf.currentFullscreenEntries.count {
let entry = strongSelf.currentFullscreenEntries[i]
switch entry {
case let .peer(peerEntry, _):
if peerEntry.effectiveVideoEndpointId == channel.endpointId {
let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme)
strongSelf.fullscreenListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.fullscreenItem(context: strongSelf.context, presentationData: presentationData, interaction: interaction), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
break loop
}
default:
break
}
}
strongSelf.updateMembers()
if let interaction = strongSelf.itemInteraction {
loop: for i in 0 ..< strongSelf.currentFullscreenEntries.count {
let entry = strongSelf.currentFullscreenEntries[i]
switch entry {
case let .peer(peerEntry, _):
if peerEntry.effectiveVideoEndpointId == channel.endpointId {
let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme)
strongSelf.fullscreenListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.fullscreenItem(context: strongSelf.context, presentationData: presentationData, interaction: interaction), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
break loop
}
default:
break
}
}
}), forKey: channel.endpointId)
strongSelf.videoNodes[channel.endpointId] = videoNode
if let _ = strongSelf.validLayout {
strongSelf.updateMembers()
}
}
})
}
}), forKey: channel.endpointId)
strongSelf.videoNodes[channel.endpointId] = videoNode
if let _ = strongSelf.validLayout {
strongSelf.updateMembers()
}
})
}

View File

@ -620,94 +620,90 @@ final class VoiceChatMainStageNode: ASDisplayNode {
self.setAvatarHidden(true)
}
self.call.makeIncomingVideoView(endpointId: endpointId, completion: { [weak self] videoView in
self.call.makeIncomingVideoView(endpointId: endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
Queue.mainQueue().async {
self?.call.makeIncomingVideoView(endpointId: endpointId, completion: { [weak self] backdropVideoView in
Queue.mainQueue().async {
guard let strongSelf = self, let videoView = videoView else {
return
}
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView)
videoNode.tapped = { [weak self] in
guard let strongSelf = self else {
return
guard let strongSelf = self, let videoView = videoView else {
return
}
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView)
videoNode.tapped = { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.tap()
}
videoNode.sourceContainerNode.activate = { [weak self] sourceNode in
guard let strongSelf = self else {
return
}
strongSelf.setControlsHidden(true, animated: false)
strongSelf.controlsHidden?(true)
let pinchController = PinchController(sourceNode: sourceNode, getContentAreaInScreenSpace: {
return UIScreen.main.bounds
})
strongSelf.context.sharedContext.mainWindow?.presentInGlobalOverlay(pinchController)
}
videoNode.sourceContainerNode.animatedOut = { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.controlsHidden?(false)
strongSelf.setControlsHidden(false, animated: true)
}
videoNode.isUserInteractionEnabled = true
let previousVideoNode = strongSelf.currentVideoNode
strongSelf.currentVideoNode = videoNode
strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backgroundNode)
if delayTransition {
videoNode.alpha = 0.0
}
if waitForFullSize {
strongSelf.videoReadyDisposable.set((videoNode.ready
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
Queue.mainQueue().after(0.07) {
completion?()
if let strongSelf = self {
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
}
}
strongSelf.tap()
}
videoNode.sourceContainerNode.activate = { [weak self] sourceNode in
guard let strongSelf = self else {
return
}
strongSelf.setControlsHidden(true, animated: false)
strongSelf.controlsHidden?(true)
let pinchController = PinchController(sourceNode: sourceNode, getContentAreaInScreenSpace: {
return UIScreen.main.bounds
})
strongSelf.context.sharedContext.mainWindow?.presentInGlobalOverlay(pinchController)
}
videoNode.sourceContainerNode.animatedOut = { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.controlsHidden?(false)
strongSelf.setControlsHidden(false, animated: true)
}
videoNode.isUserInteractionEnabled = true
let previousVideoNode = strongSelf.currentVideoNode
strongSelf.currentVideoNode = videoNode
strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backgroundNode)
if delayTransition {
videoNode.alpha = 0.0
}
if waitForFullSize {
strongSelf.videoReadyDisposable.set((videoNode.ready
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
Queue.mainQueue().after(0.07) {
completion?()
if let strongSelf = self {
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
}
}
if delayTransition {
if let videoNode = strongSelf.currentVideoNode {
videoNode.alpha = 1.0
videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.setAvatarHidden(true)
if let previousVideoNode = previousVideoNode {
previousVideoNode.removeFromSupernode()
}
}
})
}
} else {
Queue.mainQueue().after(0.07) {
if delayTransition {
if let videoNode = strongSelf.currentVideoNode {
videoNode.alpha = 1.0
videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.setAvatarHidden(true)
if let previousVideoNode = previousVideoNode {
previousVideoNode.removeFromSupernode()
}
}
})
}
} else {
Queue.mainQueue().after(0.07) {
if let previousVideoNode = previousVideoNode {
previousVideoNode.removeFromSupernode()
}
}
}))
} else {
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
}
if let previousVideoNode = previousVideoNode {
previousVideoNode.removeFromSupernode()
}
strongSelf.videoReadyDisposable.set(nil)
completion?()
}
}))
} else {
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
}
})
if let previousVideoNode = previousVideoNode {
previousVideoNode.removeFromSupernode()
}
strongSelf.videoReadyDisposable.set(nil)
completion?()
}
}
})
} else {

View File

@ -1658,7 +1658,8 @@ final class PeerInfoHeaderNode: ASDisplayNode {
private let backgroundNode: NavigationBackgroundNode
private let expandedBackgroundNode: NavigationBackgroundNode
let separatorNode: ASDisplayNode
let navigationBackgroundNode: NavigationBackgroundNode
let navigationBackgroundNode: ASDisplayNode
let navigationBackgroundBackgroundNode: ASDisplayNode
var navigationTitle: String?
let navigationTitleNode: ImmediateTextNode
let navigationSeparatorNode: ASDisplayNode
@ -1720,9 +1721,12 @@ final class PeerInfoHeaderNode: ASDisplayNode {
self.avatarOverlayNode = PeerInfoEditingAvatarOverlayNode(context: context)
self.avatarOverlayNode.isUserInteractionEnabled = false
self.navigationBackgroundNode = NavigationBackgroundNode(color: .clear)
self.navigationBackgroundNode = ASDisplayNode()
self.navigationBackgroundNode.isHidden = true
self.navigationBackgroundNode.isUserInteractionEnabled = false
self.navigationBackgroundBackgroundNode = ASDisplayNode()
self.navigationBackgroundBackgroundNode.isUserInteractionEnabled = false
self.navigationTitleNode = ImmediateTextNode()
@ -1762,6 +1766,7 @@ final class PeerInfoHeaderNode: ASDisplayNode {
self.addSubnode(self.editingContentNode)
self.addSubnode(self.avatarOverlayNode)
self.addSubnode(self.navigationBackgroundNode)
self.navigationBackgroundNode.addSubnode(self.navigationBackgroundBackgroundNode)
self.navigationBackgroundNode.addSubnode(self.navigationTitleNode)
self.navigationBackgroundNode.addSubnode(self.navigationSeparatorNode)
self.addSubnode(self.navigationButtonContainer)
@ -1963,7 +1968,7 @@ final class PeerInfoHeaderNode: ASDisplayNode {
} else {
let backgroundTransitionFraction: CGFloat = max(0.0, min(1.0, contentOffset / (112.0 + avatarSize)))
self.expandedBackgroundNode.updateColor(color: presentationData.theme.rootController.navigationBar.blurredBackgroundColor.mixedWith(presentationData.theme.list.itemBlocksBackgroundColor, alpha: 1.0 - backgroundTransitionFraction), forceKeepBlur: true, transition: transition)
self.expandedBackgroundNode.updateColor(color: presentationData.theme.rootController.navigationBar.opaqueBackgroundColor.mixedWith(presentationData.theme.list.itemBlocksBackgroundColor, alpha: 1.0 - backgroundTransitionFraction), forceKeepBlur: true, transition: transition)
}
self.avatarListNode.avatarContainerNode.updateTransitionFraction(transitionFraction, transition: transition)
@ -1978,11 +1983,15 @@ final class PeerInfoHeaderNode: ASDisplayNode {
self.navigationTitleNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((width - navigationTitleSize.width) / 2.0), y: navigationHeight - 44.0 + floorToScreenPixels((44.0 - navigationTitleSize.height) / 2.0)), size: navigationTitleSize)
self.navigationBackgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: width, height: navigationHeight))
self.navigationBackgroundNode.update(size: self.navigationBackgroundNode.bounds.size, transition: .immediate)
self.navigationBackgroundBackgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: width, height: navigationHeight))
self.navigationSeparatorNode.frame = CGRect(origin: CGPoint(x: 0.0, y: navigationHeight), size: CGSize(width: width, height: UIScreenPixel))
self.navigationBackgroundNode.color = presentationData.theme.rootController.navigationBar.blurredBackgroundColor
self.navigationBackgroundBackgroundNode.backgroundColor = presentationData.theme.rootController.navigationBar.opaqueBackgroundColor
self.navigationSeparatorNode.backgroundColor = presentationData.theme.rootController.navigationBar.separatorColor
transition.updateAlpha(node: self.navigationBackgroundNode, alpha: state.isEditing && self.isSettings ? min(1.0, contentOffset / (navigationHeight * 0.5)) : 0.0)
let separatorAlpha: CGFloat = state.isEditing && self.isSettings ? min(1.0, contentOffset / (navigationHeight * 0.5)) : 0.0
transition.updateAlpha(node: self.navigationBackgroundBackgroundNode, alpha: 1.0 - separatorAlpha)
transition.updateAlpha(node: self.navigationSeparatorNode, alpha: separatorAlpha)
self.separatorNode.backgroundColor = presentationData.theme.list.itemBlocksSeparatorColor
let defaultButtonSize: CGFloat = 40.0

View File

@ -467,43 +467,79 @@ public final class OngoingGroupCallContext {
self.context.switchAudioOutput(deviceId)
}
func makeIncomingVideoView(endpointId: String, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
self.context.makeIncomingVideoView(withEndpointId: endpointId, completion: { view in
if let view = view {
func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (OngoingCallContextPresentationCallVideoView?, OngoingCallContextPresentationCallVideoView?) -> Void) {
self.context.makeIncomingVideoView(withEndpointId: endpointId, requestClone: requestClone, completion: { mainView, cloneView in
if let mainView = mainView {
#if os(iOS)
completion(OngoingCallContextPresentationCallVideoView(
view: view,
setOnFirstFrameReceived: { [weak view] f in
view?.setOnFirstFrameReceived(f)
let mainVideoView = OngoingCallContextPresentationCallVideoView(
view: mainView,
setOnFirstFrameReceived: { [weak mainView] f in
mainView?.setOnFirstFrameReceived(f)
},
getOrientation: { [weak view] in
if let view = view {
return OngoingCallVideoOrientation(view.orientation)
getOrientation: { [weak mainView] in
if let mainView = mainView {
return OngoingCallVideoOrientation(mainView.orientation)
} else {
return .rotation0
}
},
getAspect: { [weak view] in
if let view = view {
return view.aspect
getAspect: { [weak mainView] in
if let mainView = mainView {
return mainView.aspect
} else {
return 0.0
}
},
setOnOrientationUpdated: { [weak view] f in
view?.setOnOrientationUpdated { value, aspect in
setOnOrientationUpdated: { [weak mainView] f in
mainView?.setOnOrientationUpdated { value, aspect in
f?(OngoingCallVideoOrientation(value), aspect)
}
},
setOnIsMirroredUpdated: { [weak view] f in
view?.setOnIsMirroredUpdated { value in
setOnIsMirroredUpdated: { [weak mainView] f in
mainView?.setOnIsMirroredUpdated { value in
f?(value)
}
},
updateIsEnabled: { [weak view] value in
view?.updateIsEnabled(value)
updateIsEnabled: { [weak mainView] value in
mainView?.updateIsEnabled(value)
}
))
)
let cloneVideoView = cloneView.flatMap { cloneView in
return OngoingCallContextPresentationCallVideoView(
view: cloneView,
setOnFirstFrameReceived: { [weak cloneView] f in
cloneView?.setOnFirstFrameReceived(f)
},
getOrientation: { [weak cloneView] in
if let cloneView = cloneView {
return OngoingCallVideoOrientation(cloneView.orientation)
} else {
return .rotation0
}
},
getAspect: { [weak cloneView] in
if let cloneView = cloneView {
return cloneView.aspect
} else {
return 0.0
}
},
setOnOrientationUpdated: { [weak cloneView] f in
cloneView?.setOnOrientationUpdated { value, aspect in
f?(OngoingCallVideoOrientation(value), aspect)
}
},
setOnIsMirroredUpdated: { [weak cloneView] f in
cloneView?.setOnIsMirroredUpdated { value in
f?(value)
}
},
updateIsEnabled: { [weak cloneView] value in
cloneView?.updateIsEnabled(value)
}
)
}
completion(mainVideoView, cloneVideoView)
#else
completion(OngoingCallContextPresentationCallVideoView(
view: view,
@ -539,7 +575,7 @@ public final class OngoingGroupCallContext {
))
#endif
} else {
completion(nil)
completion(nil, nil)
}
})
}
@ -697,9 +733,9 @@ public final class OngoingGroupCallContext {
}
}
public func makeIncomingVideoView(endpointId: String, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
public func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (OngoingCallContextPresentationCallVideoView?, OngoingCallContextPresentationCallVideoView?) -> Void) {
self.impl.with { impl in
impl.makeIncomingVideoView(endpointId: endpointId, completion: completion)
impl.makeIncomingVideoView(endpointId: endpointId, requestClone: requestClone, completion: completion)
}
}
}

View File

@ -275,7 +275,7 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId;
- (void)switchAudioInput:(NSString * _Nonnull)deviceId;
- (void)makeIncomingVideoViewWithEndpointId:(NSString * _Nonnull)endpointId completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
- (void)makeIncomingVideoViewWithEndpointId:(NSString * _Nonnull)endpointId requestClone:(bool)requestClone completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable, UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
@end

View File

@ -1278,7 +1278,7 @@ private:
}
}
- (void)makeIncomingVideoViewWithEndpointId:(NSString *)endpointId completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
- (void)makeIncomingVideoViewWithEndpointId:(NSString * _Nonnull)endpointId requestClone:(bool)requestClone completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable, UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
if (_instance) {
__weak GroupCallThreadLocalContext *weakSelf = self;
id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
@ -1296,7 +1296,7 @@ private:
}
}];
completion(remoteRenderer);
completion(remoteRenderer, nil);
} else if ([VideoMetalView isSupported]) {
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
#if TARGET_OS_IPHONE
@ -1314,7 +1314,7 @@ private:
}
}];
completion(remoteRenderer);
completion(remoteRenderer, nil);
} else {
GLVideoView *remoteRenderer = [[GLVideoView alloc] initWithFrame:CGRectZero];
// [remoteRenderer setVideoContentMode:kCAGravityResizeAspectFill];
@ -1327,7 +1327,7 @@ private:
}
}];
completion(remoteRenderer);
completion(remoteRenderer, nil);
}
});
}

@ -1 +1 @@
Subproject commit 16c65116bfd5f94b2c05d0733baf89c9adca2dfb
Subproject commit 721a34e3392f44f10669b9286ec69af3563660a9

View File

@ -7,7 +7,6 @@ swift_library(
"Sources/**/*.swift",
]),
copts = [
"-O",
],
deps = [
"//submodules/AsyncDisplayKit:AsyncDisplayKit",

View File

@ -173,8 +173,8 @@ public final class WallpaperBackgroundNode: ASDisplayNode {
var updated = true
if let previousWallpaper = previousWallpaper {
switch previousWallpaper {
case let .file(previousId, _, _, _, previousIsPattern, _, _, _, _):
if id == previousId && isPattern == previousIsPattern {
case let .file(previousId, _, _, _, previousIsPattern, _, _, previousFile, _):
if id == previousId && isPattern == previousIsPattern && file.id == previousFile.id {
updated = false
}
default:

View File

@ -455,6 +455,15 @@ public func patternWallpaperImageInternal(thumbnailData: Data?, fullSizeData: Da
c.setFillColor(color.cgColor)
c.fill(arguments.drawingRect)
}
} else if colors.count >= 3 {
let image = GradientBackgroundNode.generatePreview(size: CGSize(width: 60.0, height: 60.0), colors: colors)
c.translateBy(x: drawingRect.midX, y: drawingRect.midY)
c.scaleBy(x: 1.0, y: -1.0)
c.translateBy(x: -drawingRect.midX, y: -drawingRect.midY)
c.draw(image.cgImage!, in: drawingRect)
c.translateBy(x: drawingRect.midX, y: drawingRect.midY)
c.scaleBy(x: 1.0, y: -1.0)
c.translateBy(x: -drawingRect.midX, y: -drawingRect.midY)
} else {
let gradientColors = colors.map { $0.cgColor } as CFArray
let delta: CGFloat = 1.0 / (CGFloat(colors.count) - 1.0)
@ -487,12 +496,17 @@ public func patternWallpaperImageInternal(thumbnailData: Data?, fullSizeData: Da
fittedSize = fittedSize.aspectFilled(arguments.drawingRect.size)
let fittedRect = CGRect(origin: CGPoint(x: drawingRect.origin.x + (drawingRect.size.width - fittedSize.width) / 2.0, y: drawingRect.origin.y + (drawingRect.size.height - fittedSize.height) / 2.0), size: fittedSize)
c.setBlendMode(.normal)
c.interpolationQuality = customArguments.preview ? .low : .medium
c.clip(to: fittedRect, mask: image)
if colors.count == 1 {
c.setBlendMode(.normal)
if colors.count >= 3 && customArguments.customPatternColor == nil {
c.setBlendMode(.softLight)
c.setFillColor(UIColor(white: 0.0, alpha: 0.5).cgColor)
c.fill(arguments.drawingRect)
} else if colors.count == 1 {
c.setFillColor(customArguments.customPatternColor?.cgColor ?? patternColor(for: color, intensity: intensity, prominent: prominent).cgColor)
c.fill(arguments.drawingRect)
} else {

@ -1 +1 @@
Subproject commit 66c1ff6850fd53bcf5c17247569bea1d700d6247
Subproject commit 76a4e44563bcf0ec8917d9bf805d28040d371de4