Merge commit '1f23769a5b2fea8ba82050b3c4c01bb014204a9d'

This commit is contained in:
Ali 2020-12-26 15:50:51 +04:00
commit 2b326b70ea
47 changed files with 287 additions and 194 deletions

View File

@ -130,6 +130,7 @@ public func peerMessagesMediaPlaylistAndItemId(_ message: Message, isRecentActio
public enum MediaManagerPlayerType {
case voice
case music
case file
}
public protocol MediaManager: class {
@ -139,7 +140,7 @@ public protocol MediaManager: class {
var overlayMediaManager: OverlayMediaManager { get }
var globalMediaPlayerState: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)?, NoError> { get }
var musicMediaPlayerState: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading)?, NoError> { get }
var musicMediaPlayerState: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)?, NoError> { get }
var activeGlobalMediaPlayerAccountId: Signal<(AccountRecordId, Bool)?, NoError> { get }
func setPlaylist(_ playlist: (Account, SharedMediaPlaylist)?, type: MediaManagerPlayerType, control: SharedMediaPlayerControlAction)

View File

@ -740,6 +740,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
self.shimmerNode = ChatListSearchShimmerNode(key: key)
self.shimmerNode.isUserInteractionEnabled = false
self.shimmerNode.allowsGroupOpacity = true
self.listNode = ListView()
self.listNode.verticalScrollIndicatorColor = self.presentationData.theme.list.scrollIndicatorColor
@ -1528,7 +1529,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
loadMore()
}
if tagMask == .music || tagMask == .voiceOrInstantVideo {
if [.file, .music, .voiceOrInstantVideo].contains(tagMask) {
self.mediaStatusDisposable = (context.sharedContext.mediaManager.globalMediaPlayerState
|> mapToSignal { playlistStateAndType -> Signal<(Account, SharedMediaPlayerItemPlaybackState, MediaManagerPlayerType)?, NoError> in
if let (account, state, type) = playlistStateAndType {
@ -1544,6 +1545,10 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
if tagMask != .music {
return .single(nil) |> delay(0.2, queue: .mainQueue())
}
case .file:
if tagMask != .file {
return .single(nil) |> delay(0.2, queue: .mainQueue())
}
}
return .single((account, state, type))
} else {
@ -1887,8 +1892,8 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
let emptyTitleSize = self.emptyResultsTitleNode.updateLayout(CGSize(width: size.width - sideInset * 2.0 - padding * 2.0, height: CGFloat.greatestFiniteMagnitude))
let emptyTextSize = self.emptyResultsTextNode.updateLayout(CGSize(width: size.width - sideInset * 2.0 - padding * 2.0, height: CGFloat.greatestFiniteMagnitude))
var emptyAnimationHeight = self.animationSize.height
var emptyAnimationSpacing: CGFloat = 8.0
let emptyAnimationHeight = self.animationSize.height
let emptyAnimationSpacing: CGFloat = 8.0
// if case .landscape = layout.orientation, case .compact = layout.metrics.widthClass {
// emptyAnimationHeight = 0.0
// emptyAnimationSpacing = 0.0
@ -2069,10 +2074,14 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
strongSelf.emptyResultsAnimationNode.visibility = emptyResults
let displayPlaceholder = transition.isLoading && (strongSelf.key != .chats || (strongSelf.currentEntries?.isEmpty ?? true))
ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut).updateAlpha(node: strongSelf.shimmerNode, alpha: displayPlaceholder ? 1.0 : 0.0)
let targetAlpha: CGFloat = displayPlaceholder ? 1.0 : 0.0
if strongSelf.shimmerNode.alpha != targetAlpha {
let transition: ContainedViewLayoutTransition = displayPlaceholder ? .immediate : .animated(duration: 0.2, curve: .linear)
transition.updateAlpha(node: strongSelf.shimmerNode, alpha: targetAlpha, delay: 0.1)
}
strongSelf.recentListNode.isHidden = displayingResults || strongSelf.peersFilter.contains(.excludeRecent)
// strongSelf.dimNode.isHidden = displayingResults
strongSelf.backgroundColor = !displayingResults && strongSelf.peersFilter.contains(.excludeRecent) ? nil : strongSelf.presentationData.theme.chatList.backgroundColor
if !strongSelf.didSetReady && strongSelf.recentListNode.isHidden {
@ -2174,7 +2183,7 @@ private final class ShimmerEffectNode: ASDisplayNode {
}
func update(backgroundColor: UIColor, foregroundColor: UIColor) {
if let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.isEqual(backgroundColor), let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.isEqual(foregroundColor) {
if let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.argb == backgroundColor.argb, let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.argb == foregroundColor.argb {
return
}
self.currentBackgroundColor = backgroundColor

View File

@ -636,9 +636,7 @@ final class ChatListSearchMediaNode: ASDisplayNode, UIScrollViewDelegate {
var isReady: Signal<Bool, NoError> {
return self.ready.get()
}
let shouldReceiveExpandProgressUpdates: Bool = false
private let listDisposable = MetaDisposable()
private var hiddenMediaDisposable: Disposable?
private var mediaItems: [VisualMediaItem] = []

View File

@ -422,7 +422,7 @@ open class ListViewItemNode: ASDisplayNode, AccessibilityFocusableNode {
}
public func addInsetsAnimationToValue(_ value: UIEdgeInsets, duration: Double, beginAt: Double) {
let animation = ListViewAnimation(from: self.insets, to: value, duration: duration, curve: listViewAnimationCurveSystem, beginAt: beginAt, update: { [weak self] _, currentValue in
let animation = ListViewAnimation(from: self.insets, to: value, duration: duration, curve: self.preferredAnimationCurve, beginAt: beginAt, update: { [weak self] _, currentValue in
if let strongSelf = self {
strongSelf.insets = currentValue
}
@ -483,7 +483,7 @@ open class ListViewItemNode: ASDisplayNode, AccessibilityFocusableNode {
duration = 0.0
}
let animation = ListViewAnimation(from: self.apparentHeight, to: value, duration: duration, curve: listViewAnimationCurveSystem, beginAt: beginAt, update: { [weak self] _, currentValue in
let animation = ListViewAnimation(from: self.apparentHeight, to: value, duration: duration, curve: self.preferredAnimationCurve, beginAt: beginAt, update: { [weak self] _, currentValue in
if let strongSelf = self {
strongSelf.apparentHeight = currentValue
}

View File

@ -38,6 +38,7 @@ typedef enum
@property (nonatomic, readonly) NSString *uniformTypeIdentifier;
@property (nonatomic, readonly) NSString *fileName;
@property (nonatomic, readonly) NSInteger fileSize;
@property (nonatomic, readonly) bool isFavorite;
@property (nonatomic, readonly) TGMediaAssetType type;
@property (nonatomic, readonly) TGMediaAssetSubtype subtypes;

View File

@ -9,6 +9,8 @@
@property (nonatomic, readonly) TGImageView *imageView;
@property (nonatomic, readonly) TGCheckButtonView *checkButton;
@property (nonatomic, readonly) UIImageView *typeIconView;
- (void)setHidden:(bool)hidden animated:(bool)animated;
@property (nonatomic, strong) TGMediaSelectionContext *selectionContext;

View File

@ -102,6 +102,11 @@
return [self.uniformTypeIdentifier isEqualToString:(NSString *)kUTTypeGIF];
}
- (bool)isFavorite
{
return _backingAsset.isFavorite;
}
- (TGMediaAssetType)type
{
if (_cachedType == nil)

View File

@ -1,5 +1,7 @@
#import "TGMediaAssetsPhotoCell.h"
#import <LegacyComponents/TGMediaAsset.h>
#import "LegacyComponentsInternal.h"
NSString *const TGMediaAssetsPhotoCellKind = @"TGMediaAssetsPhotoCellKind";
@ -14,4 +16,15 @@ NSString *const TGMediaAssetsPhotoCellKind = @"TGMediaAssetsPhotoCellKind";
return self;
}
- (void)setItem:(NSObject *)item signal:(SSignal *)signal
{
[super setItem:item signal:signal];
TGMediaAsset *asset = (TGMediaAsset *)item;
if (![asset isKindOfClass:[TGMediaAsset class]])
return;
self.typeIconView.image = asset.isFavorite ? TGComponentsImageNamed(@"MediaGroupFavorites") : nil;
}
@end

View File

@ -15,7 +15,6 @@ NSString *const TGMediaAssetsVideoCellKind = @"TGMediaAssetsVideoCellKind";
@interface TGMediaAssetsVideoCell ()
{
UIImageView *_shadowView;
UIImageView *_iconView;
UILabel *_durationLabel;
SMetaDisposable *_adjustmentsDisposable;
@ -64,10 +63,7 @@ NSString *const TGMediaAssetsVideoCellKind = @"TGMediaAssetsVideoCellKind";
_shadowView = [[UIImageView alloc] initWithFrame:CGRectMake(0, frame.size.height - 20, frame.size.width, 20)];
_shadowView.image = shadowImage;
[self addSubview:_shadowView];
_iconView = [[UIImageView alloc] init];
_iconView.contentMode = UIViewContentModeCenter;
_durationLabel = [[UILabel alloc] init];
_durationLabel.textColor = [UIColor whiteColor];
_durationLabel.backgroundColor = [UIColor clearColor];
@ -81,7 +77,6 @@ NSString *const TGMediaAssetsVideoCellKind = @"TGMediaAssetsVideoCellKind";
if (iosMajorVersion() >= 11)
{
_shadowView.accessibilityIgnoresInvertColors = true;
_iconView.accessibilityIgnoresInvertColors = true;
_durationLabel.accessibilityIgnoresInvertColors = true;
}
@ -103,7 +98,6 @@ NSString *const TGMediaAssetsVideoCellKind = @"TGMediaAssetsVideoCellKind";
if (![asset isKindOfClass:[TGMediaAsset class]])
return;
NSString *durationString = nil;
int duration = (int)ceil(asset.videoDuration);
if (duration >= 3600)
@ -114,12 +108,7 @@ NSString *const TGMediaAssetsVideoCellKind = @"TGMediaAssetsVideoCellKind";
_durationLabel.text = durationString;
[_durationLabel sizeToFit];
if (asset.subtypes & TGMediaAssetSubtypeVideoTimelapse)
_iconView.image = TGComponentsImageNamed(@"ModernMediaItemTimelapseIcon");
else if (asset.subtypes & TGMediaAssetSubtypeVideoHighFrameRate)
_iconView.image = TGComponentsImageNamed(@"ModernMediaItemSloMoIcon");
else
_iconView.image = TGComponentsImageNamed(@"ModernMediaItemVideoIcon");
self.typeIconView.image = asset.isFavorite ? TGComponentsImageNamed(@"MediaGroupFavorites") : nil;
SSignal *adjustmentsSignal = [self.editingContext adjustmentsSignalForItem:(id<TGMediaEditableItem>)self.item];
@ -234,7 +223,6 @@ NSString *const TGMediaAssetsVideoCellKind = @"TGMediaAssetsVideoCellKind";
{
self.checkButton.frame = (CGRect){ { self.frame.size.width - self.checkButton.frame.size.width - 2, 2 }, self.checkButton.frame.size };
_shadowView.frame = (CGRect){ { 0, self.frame.size.height - _shadowView.frame.size.height }, {self.frame.size.width, _shadowView.frame.size.height } };
_iconView.frame = CGRectMake(0, self.frame.size.height - 19, 19, 19);
CGSize durationSize = _durationLabel.frame.size;
_durationLabel.frame = CGRectMake(self.frame.size.width - floor(durationSize.width) - 5.0, self.frame.size.height - floor(durationSize.height) - 4.0, durationSize.width, durationSize.height);

View File

@ -34,6 +34,10 @@
if (iosMajorVersion() >= 11)
_imageView.accessibilityIgnoresInvertColors = true;
_typeIconView = [[UIImageView alloc] init];
_typeIconView.contentMode = UIViewContentModeCenter;
[self addSubview:_typeIconView];
self.isAccessibilityElement = true;
}
return self;
@ -212,6 +216,8 @@
_imageView.frame = self.bounds;
_imageView.transform = transform;
_typeIconView.frame = CGRectMake(2.0, self.frame.size.height - 19 - 2, 19, 19);
_checkButton.frame = (CGRect){ { self.frame.size.width - _checkButton.frame.size.width - 2, 2 }, _checkButton.frame.size };
}

View File

@ -1072,7 +1072,7 @@ static id<LegacyComponentsContext> _defaultContext = nil;
+ (UIEdgeInsets)safeAreaInsetForOrientation:(UIInterfaceOrientation)orientation hasOnScreenNavigation:(bool)hasOnScreenNavigation
{
int height = (int)TGScreenSize().height;
if (!TGIsPad() && (height != 812 && height != 896 && height != 780 && height != 844 && height != 926))
if (!TGIsPad() && (height != 812 && height != 896 && height != 780 && height != 844 && height != 926) && !hasOnScreenNavigation)
return UIEdgeInsetsZero;
if (TGIsPad()) {

View File

@ -123,6 +123,7 @@ private final class MediaPlayerContext {
fileprivate let videoRenderer: VideoPlayerProxy
private var tickTimer: SwiftSignalKit.Timer?
private var fadeTimer: SwiftSignalKit.Timer?
private var lastStatusUpdateTimestamp: Double?
private let playerStatus: Promise<MediaPlayerStatus>
@ -224,6 +225,7 @@ private final class MediaPlayerContext {
deinit {
assert(self.queue.isCurrent())
self.fadeTimer?.invalidate()
self.tickTimer?.invalidate()
if case let .seeking(_, _, _, disposable, _, _) = self.state {
@ -383,7 +385,7 @@ private final class MediaPlayerContext {
if strongSelf.continuePlayingWithoutSoundOnLostAudioSession {
strongSelf.continuePlayingWithoutSound()
} else {
strongSelf.pause(lostAudioSession: true)
strongSelf.pause(lostAudioSession: true, faded: false)
}
} else {
strongSelf.seek(timestamp: 0.0, action: .play)
@ -440,7 +442,7 @@ private final class MediaPlayerContext {
}
}
fileprivate func play() {
fileprivate func play(faded: Bool = false) {
assert(self.queue.isCurrent())
switch self.state {
@ -461,7 +463,7 @@ private final class MediaPlayerContext {
if strongSelf.continuePlayingWithoutSoundOnLostAudioSession {
strongSelf.continuePlayingWithoutSound()
} else {
strongSelf.pause(lostAudioSession: true)
strongSelf.pause(lostAudioSession: true, faded: false)
}
} else {
strongSelf.seek(timestamp: 0.0, action: .play)
@ -477,6 +479,26 @@ private final class MediaPlayerContext {
self.state = .seeking(frameSource: frameSource, timestamp: timestamp, seekState: seekState, disposable: disposable, action: .play, enableSound: enableSound)
self.lastStatusUpdateTimestamp = nil
case let .paused(loadedState):
if faded {
self.fadeTimer?.invalidate()
var volume: Double = 0.0
let fadeTimer = SwiftSignalKit.Timer(timeout: 0.025, repeat: true, completion: { [weak self] in
if let strongSelf = self {
volume += 0.1
if volume < 1.0 {
strongSelf.audioRenderer?.renderer.setVolume(volume)
} else {
strongSelf.audioRenderer?.renderer.setVolume(1.0)
strongSelf.fadeTimer?.invalidate()
strongSelf.fadeTimer = nil
}
}
}, queue: self.queue)
self.fadeTimer = fadeTimer
fadeTimer.start()
}
if loadedState.lostAudioSession {
let timestamp = CMTimeGetSeconds(CMTimebaseGetTime(loadedState.controlTimebase.timebase))
self.seek(timestamp: timestamp, action: .play)
@ -632,7 +654,7 @@ private final class MediaPlayerContext {
}
}
fileprivate func pause(lostAudioSession: Bool) {
fileprivate func pause(lostAudioSession: Bool, faded: Bool = false) {
assert(self.queue.isCurrent())
switch self.state {
@ -651,31 +673,52 @@ private final class MediaPlayerContext {
}
self.state = .paused(loadedState)
self.lastStatusUpdateTimestamp = nil
if faded {
self.fadeTimer?.invalidate()
var volume: Double = 1.0
let fadeTimer = SwiftSignalKit.Timer(timeout: 0.025, repeat: true, completion: { [weak self] in
if let strongSelf = self {
volume -= 0.1
if volume > 0 {
strongSelf.audioRenderer?.renderer.setVolume(volume)
} else {
strongSelf.fadeTimer?.invalidate()
strongSelf.fadeTimer = nil
strongSelf.tick()
}
}
}, queue: self.queue)
self.fadeTimer = fadeTimer
fadeTimer.start()
}
self.tick()
}
}
fileprivate func togglePlayPause() {
fileprivate func togglePlayPause(faded: Bool) {
assert(self.queue.isCurrent())
switch self.state {
case .empty:
self.play()
self.play(faded: false)
case let .seeking(_, _, _, _, action, _):
switch action {
case .play:
self.pause(lostAudioSession: false)
self.pause(lostAudioSession: false, faded: faded)
case .pause:
self.play()
self.play(faded: faded)
}
case .paused:
if !self.enableSound {
self.playOnceWithSound(playAndRecord: false, seek: .none)
} else {
self.play()
self.play(faded: faded)
}
case .playing:
self.pause(lostAudioSession: false)
self.pause(lostAudioSession: false, faded: faded)
}
}
@ -787,7 +830,13 @@ private final class MediaPlayerContext {
var bufferingProgress: Float?
if let worstStatus = worstStatus, case let .full(fullUntil) = worstStatus, fullUntil.isFinite {
var playing = false
if case .playing = self.state {
playing = true
} else if self.fadeTimer != nil {
playing = true
}
if playing {
rate = self.baseRate
let nextTickDelay = max(0.0, fullUntil - timestamp) / self.baseRate
@ -805,7 +854,13 @@ private final class MediaPlayerContext {
rate = 0.0
performActionAtEndNow = true
} else {
var playing = false
if case .playing = self.state {
playing = true
} else if self.fadeTimer != nil {
playing = true
}
if playing {
rate = self.baseRate
let tickTimer = SwiftSignalKit.Timer(timeout: nextTickDelay, repeat: false, completion: { [weak self] in
@ -871,13 +926,18 @@ private final class MediaPlayerContext {
var statusTimestamp = CACurrentMediaTime()
let playbackStatus: MediaPlayerPlaybackStatus
var isPlaying = false
var isPaused = false
if case .playing = self.state {
isPlaying = true
} else if case .paused = self.state {
isPaused = true
}
if let bufferingProgress = bufferingProgress {
playbackStatus = .buffering(initial: false, whilePlaying: isPlaying, progress: Float(bufferingProgress), display: true)
} else if !rate.isZero {
if reportRate.isZero {
if isPaused && self.fadeTimer != nil {
playbackStatus = .paused
} else if reportRate.isZero {
//playbackStatus = .buffering(initial: false, whilePlaying: true)
playbackStatus = .playing
statusTimestamp = 0.0
@ -1076,10 +1136,10 @@ public final class MediaPlayer {
}
}
public func togglePlayPause() {
public func togglePlayPause(faded: Bool = false) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.togglePlayPause()
context.togglePlayPause(faded: faded)
}
}
}

View File

@ -230,6 +230,7 @@ private final class AudioPlayerRendererContext {
var audioGraph: AUGraph?
var timePitchAudioUnit: AudioComponentInstance?
var mixerAudioUnit: AudioComponentInstance?
var outputAudioUnit: AudioComponentInstance?
var bufferContextId: Int32!
@ -314,6 +315,12 @@ private final class AudioPlayerRendererContext {
}
}
fileprivate func setVolume(_ volume: Double) {
if let mixerAudioUnit = self.mixerAudioUnit {
AudioUnitSetParameter(mixerAudioUnit,kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, Float32(volume), 0)
}
}
fileprivate func setRate(_ rate: Double) {
assert(audioPlayerRendererQueue.isCurrent())
@ -406,6 +413,15 @@ private final class AudioPlayerRendererContext {
return
}
var mixerNode: AUNode = 0
var mixerDescription = AudioComponentDescription()
mixerDescription.componentType = kAudioUnitType_Mixer
mixerDescription.componentSubType = kAudioUnitSubType_MultiChannelMixer
mixerDescription.componentManufacturer = kAudioUnitManufacturer_Apple
guard AUGraphAddNode(audioGraph, &mixerDescription, &mixerNode) == noErr else {
return
}
var outputNode: AUNode = 0
var outputDesc = AudioComponentDescription()
outputDesc.componentType = kAudioUnitType_Output
@ -429,7 +445,11 @@ private final class AudioPlayerRendererContext {
return
}
guard AUGraphConnectNodeInput(audioGraph, timePitchNode, 0, outputNode, 0) == noErr else {
guard AUGraphConnectNodeInput(audioGraph, timePitchNode, 0, mixerNode, 0) == noErr else {
return
}
guard AUGraphConnectNodeInput(audioGraph, mixerNode, 0, outputNode, 0) == noErr else {
return
}
@ -444,6 +464,11 @@ private final class AudioPlayerRendererContext {
}
AudioUnitSetParameter(timePitchAudioUnit, kTimePitchParam_Rate, kAudioUnitScope_Global, 0, Float32(self.baseRate), 0)
var maybeMixerAudioUnit: AudioComponentInstance?
guard AUGraphNodeInfo(audioGraph, mixerNode, &mixerDescription, &maybeMixerAudioUnit) == noErr, let mixerAudioUnit = maybeMixerAudioUnit else {
return
}
var maybeOutputAudioUnit: AudioComponentInstance?
guard AUGraphNodeInfo(audioGraph, outputNode, &outputDesc, &maybeOutputAudioUnit) == noErr, let outputAudioUnit = maybeOutputAudioUnit else {
return
@ -456,7 +481,7 @@ private final class AudioPlayerRendererContext {
var streamFormat = AudioStreamBasicDescription()
AudioUnitSetProperty(converterAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &streamFormat, UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
AudioUnitSetProperty(timePitchAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
AudioUnitSetProperty(converterAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &streamFormat, UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
AudioUnitSetProperty(mixerAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &streamFormat, UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
var callbackStruct = AURenderCallbackStruct()
callbackStruct.inputProc = rendererInputProc
@ -474,8 +499,9 @@ private final class AudioPlayerRendererContext {
var maximumFramesPerSlice: UInt32 = 4096
AudioUnitSetProperty(converterAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
AudioUnitSetProperty(timePitchAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
AudioUnitSetProperty(mixerAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
AudioUnitSetProperty(outputAudioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFramesPerSlice, 4)
guard AUGraphInitialize(audioGraph) == noErr else {
return
}
@ -484,6 +510,7 @@ private final class AudioPlayerRendererContext {
self.audioGraph = audioGraph
self.timePitchAudioUnit = timePitchAudioUnit
self.mixerAudioUnit = mixerAudioUnit
self.outputAudioUnit = outputAudioUnit
}
@ -820,6 +847,15 @@ public final class MediaPlayerAudioRenderer {
}
}
public func setVolume(_ volume: Double) {
audioPlayerRendererQueue.async {
if let contextRef = self.contextRef {
let context = contextRef.takeUnretainedValue()
context.setVolume(volume)
}
}
}
public func beginRequestingFrames(queue: DispatchQueue, takeFrame: @escaping () -> MediaTrackFrameResult) {
audioPlayerRendererQueue.async {
if let contextRef = self.contextRef {

View File

@ -25,8 +25,8 @@ public final class CachedUserData: CachedPeerData {
self.pinnedMessageId = nil
self.isBlocked = false
self.commonGroupCount = 0
self.voiceCallsAvailable = false
self.videoCallsAvailable = false
self.voiceCallsAvailable = true
self.videoCallsAvailable = true
self.callsPrivate = false
self.canPinMessages = false
self.hasScheduledMessages = false

View File

@ -22,6 +22,10 @@ public struct TelegramChatAdminRightsFlags: OptionSet {
public static let canBeAnonymous = TelegramChatAdminRightsFlags(rawValue: 1 << 10)
public static let canManageCalls = TelegramChatAdminRightsFlags(rawValue: 1 << 11)
public static var all:TelegramChatAdminRightsFlags {
return [.canChangeInfo, .canPostMessages, .canEditMessages, .canDeleteMessages, .canBanUsers, .canInviteUsers, .canPinMessages, .canAddAdmins, .canBeAnonymous, .canManageCalls]
}
public static var groupSpecific: TelegramChatAdminRightsFlags = [
.canChangeInfo,
.canDeleteMessages,

View File

@ -257,8 +257,4 @@ class VoiceChatActionItemNode: ListViewItemNode {
override public func header() -> ListViewItemHeader? {
return nil
}
override var preferredAnimationCurve: (CGFloat) -> CGFloat {
return listViewAnimationCurveEaseInOut
}
}

View File

@ -942,9 +942,9 @@ public final class VoiceChatController: ViewController {
self.call.members,
invitedPeers
)
|> mapToSignal { values in
|> mapToThrottled { values in
return .single(values)
|> delay(0.0, queue: .mainQueue())
|> then(.complete() |> delay(0.1, queue: Queue.mainQueue()))
}).start(next: { [weak self] state, callMembers, invitedPeers in
guard let strongSelf = self else {
return
@ -1850,8 +1850,6 @@ public final class VoiceChatController: ViewController {
}
let topPanelFrame = self.topPanelNode.view.convert(self.topPanelNode.bounds, to: self.view)
let offset: CGFloat = self.contentContainer.bounds.minY
self.contentContainer.layer.animateBoundsOriginYAdditive(from: self.contentContainer.bounds.origin.y, to: -(layout.size.height - topPanelFrame.minY) - 44.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { _ in
offsetCompleted = true
internalCompletion()
@ -1885,7 +1883,7 @@ public final class VoiceChatController: ViewController {
if transition.crossFade {
options.insert(.AnimateCrossfade)
}
if transition.animated {
if transition.animated && self.animation == nil {
options.insert(.AnimateInsertion)
}
}
@ -1921,7 +1919,7 @@ public final class VoiceChatController: ViewController {
self.listNode.frame = frame
} else if !self.isExpanded {
if self.listNode.frame.minY != targetY && !self.animatingExpansion && self.panGestureArguments == nil {
self.animation = ListViewAnimation(from: self.listNode.frame.minY, to: targetY, duration: 0.4, curve: listViewAnimationCurveEaseInOut, beginAt: CACurrentMediaTime(), update: { [weak self] _, currentValue in
self.animation = ListViewAnimation(from: self.listNode.frame.minY, to: targetY, duration: 0.4, curve: listViewAnimationCurveSystem, beginAt: CACurrentMediaTime(), update: { [weak self] _, currentValue in
if let strongSelf = self {
var frame = strongSelf.listNode.frame
frame.origin.y = currentValue
@ -2079,6 +2077,7 @@ public final class VoiceChatController: ViewController {
private var panGestureArguments: (topInset: CGFloat, offset: CGFloat)?
@objc func panGesture(_ recognizer: UIPanGestureRecognizer) {
let contentOffset = self.listNode.visibleContentOffset()
switch recognizer.state {
case .began:
let topInset: CGFloat
@ -2096,7 +2095,8 @@ public final class VoiceChatController: ViewController {
if let (currentTopInset, currentPanOffset) = self.panGestureArguments {
topInset = currentTopInset
if case let .known(value) = self.listNode.visibleContentOffset(), value > 0 {
if case let .known(value) = contentOffset, value <= 0.5 {
} else {
translation = currentPanOffset
if self.isExpanded {
recognizer.setTranslation(CGPoint(), in: self.contentContainer.view)
@ -2114,7 +2114,6 @@ public final class VoiceChatController: ViewController {
}
if self.isExpanded {
} else {
if currentOffset > 0.0 {
self.listNode.scroller.panGestureRecognizer.setTranslation(CGPoint(), in: self.listNode.scroller)
@ -2136,7 +2135,9 @@ public final class VoiceChatController: ViewController {
let translation = recognizer.translation(in: self.contentContainer.view)
var velocity = recognizer.velocity(in: self.contentContainer.view)
if case let .known(value) = self.listNode.visibleContentOffset(), value > 0 {
if case let .known(value) = contentOffset, value > 0.0 {
velocity = CGPoint()
} else if case .unknown = contentOffset {
velocity = CGPoint()
}
@ -2164,6 +2165,7 @@ public final class VoiceChatController: ViewController {
self.isExpanded = false
self.updateIsFullscreen(false)
self.animatingExpansion = true
self.listNode.scroller.setContentOffset(CGPoint(), animated: false)
if let (layout, navigationHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
@ -2189,6 +2191,12 @@ public final class VoiceChatController: ViewController {
self.controller?.dismiss(closing: false, manual: true)
dismissing = true
} else if velocity.y < -300.0 || offset < topInset / 2.0 {
if velocity.y > -1500.0 && !self.isFullscreen {
DispatchQueue.main.async {
self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: ListViewScrollToItem(index: 0, position: .top(0.0), animated: true, curve: .Default(duration: nil), directionHint: .Up), updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })
}
}
self.isExpanded = true
self.updateIsFullscreen(true)
self.animatingExpansion = true
@ -2202,6 +2210,7 @@ public final class VoiceChatController: ViewController {
} else {
self.updateIsFullscreen(false)
self.animatingExpansion = true
self.listNode.scroller.setContentOffset(CGPoint(), animated: false)
if let (layout, navigationHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut))

View File

@ -787,8 +787,4 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.setRevealOptionsOpened(false, animated: true)
self.revealOptionsInteractivelyClosed()
}
override var preferredAnimationCurve: (CGFloat) -> CGFloat {
return listViewAnimationCurveEaseInOut
}
}

View File

@ -258,7 +258,11 @@ private final class PollResultsOptionContext {
}
|> mapToSignal { inputPeer -> Signal<([RenderedPeer], Int, String?), NoError> in
if let inputPeer = inputPeer {
let signal = account.network.request(Api.functions.messages.getPollVotes(flags: 1 << 0, peer: inputPeer, id: messageId.id, option: Buffer(data: opaqueIdentifier), offset: nextOffset, limit: nextOffset == nil ? 15 : 50))
var flags: Int32 = 1 << 0
if let _ = nextOffset {
flags |= (1 << 1)
}
let signal = account.network.request(Api.functions.messages.getPollVotes(flags: flags, peer: inputPeer, id: messageId.id, option: Buffer(data: opaqueIdentifier), offset: nextOffset, limit: nextOffset == nil ? 10 : 50))
|> map(Optional.init)
|> `catch` { _ -> Signal<Api.messages.VotesList?, NoError> in
return .single(nil)

View File

@ -82,7 +82,7 @@ final class ChatBotStartInputPanelNode: ChatInputPanelNode {
}
@objc func buttonPressed() {
guard let context = self.context, let presentationInterfaceState = self.presentationInterfaceState, let peer = presentationInterfaceState.renderedPeer?.peer else {
guard let _ = self.context, let presentationInterfaceState = self.presentationInterfaceState, let _ = presentationInterfaceState.renderedPeer?.peer else {
return
}
@ -91,7 +91,6 @@ final class ChatBotStartInputPanelNode: ChatInputPanelNode {
override func updateLayout(width: CGFloat, leftInset: CGFloat, rightInset: CGFloat, additionalSideInsets: UIEdgeInsets, maxHeight: CGFloat, isSecondary: Bool, transition: ContainedViewLayoutTransition, interfaceState: ChatPresentationInterfaceState, metrics: LayoutMetrics) -> CGFloat {
if self.presentationInterfaceState != interfaceState {
let previousState = self.presentationInterfaceState
self.presentationInterfaceState = interfaceState
}

View File

@ -30,7 +30,7 @@ private func titleAndColorForAction(_ action: SubscriberAction, theme: Presentat
return (strings.Conversation_Mute, theme.chat.inputPanel.panelControlAccentColor)
case .unmuteNotifications:
return (strings.Conversation_Unmute, theme.chat.inputPanel.panelControlAccentColor)
case let .unpinMessages(count):
case .unpinMessages:
return (strings.Chat_PanelUnpinAllMessages, theme.chat.inputPanel.panelControlAccentColor)
case .hidePinnedMessages:
return (strings.Chat_PanelHidePinnedMessages, theme.chat.inputPanel.panelControlAccentColor)

View File

@ -643,17 +643,6 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
}
if data.canEdit && !isPinnedMessages {
var mediaReference: AnyMediaReference?
for media in message.media {
if let image = media as? TelegramMediaImage, let _ = largestImageRepresentation(image.representations) {
mediaReference = ImageMediaReference.standalone(media: image).abstract
break
} else if let file = media as? TelegramMediaFile {
mediaReference = FileMediaReference.standalone(media: file).abstract
break
}
}
actions.append(.action(ContextMenuActionItem(text: chatPresentationInterfaceState.strings.Conversation_MessageDialogEdit, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Edit"), color: theme.actionSheet.primaryTextColor)
}, action: { c, f in

View File

@ -28,7 +28,7 @@ enum ChatMediaInputGridEntryIndex: Equatable, Comparable {
return .peerSpecificSetup
case let .collectionIndex(index):
return .sticker(index.collectionId, index.itemIndex.id)
case let .trending(id, index):
case let .trending(id, _):
return .trending(id)
}
}

View File

@ -814,7 +814,7 @@ public class ChatMessageItemView: ListViewItemNode {
item.controllerInteraction.openCheckoutOrReceipt(item.message.id)
case let .urlAuth(url, buttonId):
item.controllerInteraction.requestMessageActionUrlAuth(url, item.message.id, buttonId)
case let .setupPoll(isQuiz):
case .setupPoll:
break
}
}

View File

@ -36,7 +36,7 @@ final class ChatMessageWebpageBubbleContentNode: ChatMessageBubbleContentNode {
self.contentNode.openMedia = { [weak self] mode in
if let strongSelf = self, let item = strongSelf.item {
if let webPage = strongSelf.webPage, case let .Loaded(content) = webPage.content {
if let image = content.image, let instantPage = content.instantPage {
if let _ = content.image, let _ = content.instantPage {
if instantPageType(of: content) != .album {
item.controllerInteraction.openInstantPage(item.message, item.associatedData)
return

View File

@ -209,23 +209,23 @@ private enum ChatRecentActionsFilterEntry: ItemListNodeEntry {
func item(presentationData: ItemListPresentationData, arguments: Any) -> ListViewItem {
let arguments = arguments as! ChatRecentActionsFilterControllerArguments
switch self {
case let .actionsTitle(theme, text):
case let .actionsTitle(_, text):
return ItemListSectionHeaderItem(presentationData: presentationData, text: text, sectionId: self.section)
case let .allActions(theme, text, value):
case let .allActions(_, text, value):
return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, enabled: true, sectionId: self.section, style: .blocks, updated: { value in
arguments.toggleAllActions(value)
})
case let .actionItem(theme, _, events, text, value):
case let .actionItem(_, _, events, text, value):
return ItemListCheckboxItem(presentationData: presentationData, title: text, style: .right, checked: value, zeroSeparatorInsets: false, sectionId: self.section, action: {
arguments.toggleAction(events)
})
case let .adminsTitle(theme, text):
case let .adminsTitle(_, text):
return ItemListSectionHeaderItem(presentationData: presentationData, text: text, sectionId: self.section)
case let .allAdmins(theme, text, value):
case let .allAdmins(_, text, value):
return ItemListSwitchItem(presentationData: presentationData, title: text, value: value, enabled: true, sectionId: self.section, style: .blocks, updated: { value in
arguments.toggleAllAdmins(value)
})
case let .adminPeerItem(theme, strings, dateTimeFormat, nameDisplayOrder, _, participant, checked):
case let .adminPeerItem(_, strings, dateTimeFormat, nameDisplayOrder, _, participant, checked):
let peerText: String
switch participant.participant {
case .creator:

View File

@ -39,7 +39,7 @@ final class ChatSearchInputPanelNode: ChatInputPanelNode {
strongSelf.displayActivity = value
strongSelf.activityIndicator.isHidden = !value
if let interfaceState = strongSelf.presentationInterfaceState, let validLayout = strongSelf.validLayout {
strongSelf.updateLayout(width: validLayout.0, leftInset: validLayout.1, rightInset: validLayout.2, additionalSideInsets: validLayout.3, maxHeight: validLayout.4, isSecondary: validLayout.6, transition: .immediate, interfaceState: interfaceState, metrics: validLayout.5)
let _ = strongSelf.updateLayout(width: validLayout.0, leftInset: validLayout.1, rightInset: validLayout.2, additionalSideInsets: validLayout.3, maxHeight: validLayout.4, isSecondary: validLayout.6, transition: .immediate, interfaceState: interfaceState, metrics: validLayout.5)
}
}
}))

View File

@ -323,7 +323,7 @@ class ChatSearchResultsControllerNode: ViewControllerTracingNode, UIScrollViewDe
options.insert(.PreferSynchronousDrawing)
options.insert(.PreferSynchronousResourceLoading)
self.listNode.transaction(deleteIndices: transition.deletions, insertIndicesAndItems: transition.insertions, updateIndicesAndItems: transition.updates, options: options, updateSizeAndInsets: nil, updateOpaqueState: nil, completion: { [weak self] _ in
self.listNode.transaction(deleteIndices: transition.deletions, insertIndicesAndItems: transition.insertions, updateIndicesAndItems: transition.updates, options: options, updateSizeAndInsets: nil, updateOpaqueState: nil, completion: { _ in
})
}
}
@ -332,8 +332,6 @@ class ChatSearchResultsControllerNode: ViewControllerTracingNode, UIScrollViewDe
let hadValidLayout = self.validLayout != nil
self.validLayout = (layout, navigationBarHeight)
let topInset = navigationBarHeight
let (duration, curve) = listViewAnimationDurationAndCurve(transition: transition)
self.listNode.frame = CGRect(origin: CGPoint(), size: layout.size)

View File

@ -276,7 +276,6 @@ final class ChatSendMessageActionSheetControllerNode: ViewControllerTracingNode,
let outgoing: PresentationThemeBubbleColorComponents = self.presentationData.chatWallpaper.isEmpty ? self.presentationData.theme.chat.message.outgoing.bubble.withoutWallpaper : self.presentationData.theme.chat.message.outgoing.bubble.withWallpaper
let maxCornerRadius = self.presentationData.chatBubbleCorners.mainRadius
let minCornerRadius = self.presentationData.chatBubbleCorners.auxiliaryRadius
self.messageBackgroundNode.image = messageBubbleImage(maxCornerRadius: maxCornerRadius, minCornerRadius: maxCornerRadius, incoming: false, fillColor: outgoing.gradientFill, strokeColor: outgoing.fill == outgoing.gradientFill ? outgoing.stroke : .clear, neighbors: .none, theme: self.presentationData.theme.chat, wallpaper: self.presentationData.chatWallpaper, knockout: false)
self.view.addSubview(self.effectView)

View File

@ -67,7 +67,7 @@ final class ChatTitleView: UIView, NavigationBarTitleView {
var inputActivities: (PeerId, [(Peer, PeerInputActivity)])? {
didSet {
self.updateStatus()
let _ = self.updateStatus()
}
}
@ -79,7 +79,7 @@ final class ChatTitleView: UIView, NavigationBarTitleView {
didSet {
if self.networkState != oldValue {
updateNetworkStatusNode(networkState: self.networkState, layout: self.layout)
self.updateStatus()
let _ = self.updateStatus()
}
}
}
@ -532,7 +532,7 @@ final class ChatTitleView: UIView, NavigationBarTitleView {
self.addSubnode(self.button)
self.presenceManager = PeerPresenceStatusManager(update: { [weak self] in
self?.updateStatus()
let _ = self?.updateStatus()
})
self.button.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: [.touchUpInside])
@ -575,7 +575,7 @@ final class ChatTitleView: UIView, NavigationBarTitleView {
let titleContent = self.titleContent
self.titleContent = titleContent
self.updateStatus()
let _ = self.updateStatus()
if let (size, clearBounds) = self.validLayout {
self.updateLayout(size: size, clearBounds: clearBounds, transition: .immediate)

View File

@ -114,16 +114,17 @@ public final class MediaManagerImpl: NSObject, MediaManager {
didSet {
if self.musicMediaPlayer !== oldValue {
if let musicMediaPlayer = self.musicMediaPlayer {
let type = musicMediaPlayer.type
let account = musicMediaPlayer.account
self.musicMediaPlayerStateValue.set(musicMediaPlayer.playbackState
|> map { state -> (Account, SharedMediaPlayerItemPlaybackStateOrLoading)? in
|> map { state -> (Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)? in
guard let state = state else {
return nil
}
if case let .item(item) = state {
return (account, .state(item))
return (account, .state(item), type)
} else {
return (account, .loading)
return (account, .loading, type)
}
} |> deliverOnMainQueue)
} else {
@ -132,8 +133,8 @@ public final class MediaManagerImpl: NSObject, MediaManager {
}
}
}
private let musicMediaPlayerStateValue = Promise<(Account, SharedMediaPlayerItemPlaybackStateOrLoading)?>(nil)
public var musicMediaPlayerState: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading)?, NoError> {
private let musicMediaPlayerStateValue = Promise<(Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)?>(nil)
public var musicMediaPlayerState: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)?, NoError> {
return self.musicMediaPlayerStateValue.get()
}
@ -202,7 +203,7 @@ public final class MediaManagerImpl: NSObject, MediaManager {
if let voice = voice {
return (voice.0, voice.1, .voice)
} else if let music = music {
return (music.0, music.1, .music)
return (music.0, music.1, music.2)
} else {
return nil
}
@ -492,7 +493,7 @@ public final class MediaManagerImpl: NSObject, MediaManager {
strongSelf.musicMediaPlayer?.control(.playback(.pause))
strongSelf.voiceMediaPlayer?.stop()
if let (account, playlist, settings, _) = inputData {
let voiceMediaPlayer = SharedMediaPlayer(mediaManager: strongSelf, inForeground: strongSelf.inForeground, account: account, audioSession: strongSelf.audioSession, overlayMediaManager: strongSelf.overlayMediaManager, playlist: playlist, initialOrder: .reversed, initialLooping: .none, initialPlaybackRate: settings.voicePlaybackRate, playerIndex: nextPlayerIndex, controlPlaybackWithProximity: true)
let voiceMediaPlayer = SharedMediaPlayer(mediaManager: strongSelf, inForeground: strongSelf.inForeground, account: account, audioSession: strongSelf.audioSession, overlayMediaManager: strongSelf.overlayMediaManager, playlist: playlist, initialOrder: .reversed, initialLooping: .none, initialPlaybackRate: settings.voicePlaybackRate, playerIndex: nextPlayerIndex, controlPlaybackWithProximity: true, type: type)
strongSelf.voiceMediaPlayer = voiceMediaPlayer
voiceMediaPlayer.playedToEnd = { [weak voiceMediaPlayer] in
if let strongSelf = self, let voiceMediaPlayer = voiceMediaPlayer, voiceMediaPlayer === strongSelf.voiceMediaPlayer {
@ -510,11 +511,11 @@ public final class MediaManagerImpl: NSObject, MediaManager {
} else {
strongSelf.voiceMediaPlayer = nil
}
case .music:
case .music, .file:
strongSelf.musicMediaPlayer?.stop()
strongSelf.voiceMediaPlayer?.control(.playback(.pause))
if let (account, playlist, settings, storedState) = inputData {
let musicMediaPlayer = SharedMediaPlayer(mediaManager: strongSelf, inForeground: strongSelf.inForeground, account: account, audioSession: strongSelf.audioSession, overlayMediaManager: strongSelf.overlayMediaManager, playlist: playlist, initialOrder: settings.order, initialLooping: settings.looping, initialPlaybackRate: storedState?.playbackRate ?? .x1, playerIndex: nextPlayerIndex, controlPlaybackWithProximity: false)
let musicMediaPlayer = SharedMediaPlayer(mediaManager: strongSelf, inForeground: strongSelf.inForeground, account: account, audioSession: strongSelf.audioSession, overlayMediaManager: strongSelf.overlayMediaManager, playlist: playlist, initialOrder: settings.order, initialLooping: settings.looping, initialPlaybackRate: storedState?.playbackRate ?? .x1, playerIndex: nextPlayerIndex, controlPlaybackWithProximity: false, type: type)
strongSelf.musicMediaPlayer = musicMediaPlayer
musicMediaPlayer.cancelled = { [weak musicMediaPlayer] in
if let strongSelf = self, let musicMediaPlayer = musicMediaPlayer, musicMediaPlayer === strongSelf.musicMediaPlayer {
@ -549,7 +550,7 @@ public final class MediaManagerImpl: NSObject, MediaManager {
switch selectedType {
case .voice:
self.voiceMediaPlayer?.control(control)
case .music:
case .music, .file:
if self.voiceMediaPlayer != nil {
switch control {
case .playback(.play), .playback(.togglePlayPause):
@ -567,8 +568,11 @@ public final class MediaManagerImpl: NSObject, MediaManager {
switch type {
case .voice:
signal = self.voiceMediaPlayerState
case .music:
case .music, .file:
signal = self.musicMediaPlayerState
|> map { value in
return value.flatMap { ($0.0, $0.1) }
}
}
return signal
|> map { stateOrLoading -> SharedMediaPlayerItemPlaybackState? in
@ -598,7 +602,7 @@ public final class MediaManagerImpl: NSObject, MediaManager {
return .never()
}
}
case .music:
case .music, .file:
return .never()
}
}

View File

@ -271,7 +271,6 @@ final class MentionChatInputPanelItemNode: ListViewItemNode {
if self.revealOptions == options {
return
}
let previousOptions = self.revealOptions
let wasEmpty = self.revealOptions.isEmpty
self.revealOptions = options
let isEmpty = options.isEmpty

View File

@ -69,7 +69,7 @@ final class NotificationContainerControllerNode: ASDisplayNode {
if item.groupingKey == key {
self.topItemAndNode = nil
self.displayingItemsUpdated?(false)
topItemNode.animateOut(completion: { [weak self, weak topItemNode] in
topItemNode.animateOut(completion: { [weak topItemNode] in
topItemNode?.removeFromSupernode()
})
}
@ -78,7 +78,7 @@ final class NotificationContainerControllerNode: ASDisplayNode {
func enqueue(_ item: NotificationItem) {
if let (_, topItemNode) = self.topItemAndNode {
topItemNode.animateOut(completion: { [weak self, weak topItemNode] in
topItemNode.animateOut(completion: { [weak topItemNode] in
topItemNode?.removeFromSupernode()
})
}

View File

@ -151,12 +151,14 @@ func openChatMessageImpl(_ params: OpenChatMessageParams) -> Bool {
}
playerType = .music
} else {
if params.standalone {
if let playlistLocation = params.playlistLocation {
location = playlistLocation
} else if params.standalone {
location = .recentActions(params.message)
} else {
location = .singleMessage(params.message.id)
}
playerType = (file.isVoice || file.isInstantVideo) ? .voice : .music
playerType = (file.isVoice || file.isInstantVideo) ? .voice : .file
}
params.context.sharedContext.mediaManager.setPlaylist((params.context.account, PeerMessagesMediaPlaylist(context: params.context, location: location, chatLocationContextHolder: params.chatLocationContextHolder)), type: playerType, control: control)
return true

View File

@ -296,7 +296,7 @@ func openResolvedUrlImpl(_ resolvedUrl: ResolvedUrl, context: AccountContext, ur
case let .color(color):
signal = .single(.color(color.argb))
case let .gradient(topColor, bottomColor, rotation):
signal = .single(.gradient(topColor.argb, bottomColor.argb, WallpaperSettings()))
signal = .single(.gradient(topColor.argb, bottomColor.argb, WallpaperSettings(rotation: rotation)))
}
let _ = (signal

View File

@ -166,6 +166,8 @@ final class OverlayAudioPlayerControllerNode: ViewControllerTracingNode, UIGestu
tagMask = .music
case .voice:
tagMask = .voiceOrInstantVideo
case .file:
tagMask = .file
}
let chatLocationContextHolder = Atomic<ChatLocationContextHolder?>(value: nil)
@ -511,6 +513,8 @@ final class OverlayAudioPlayerControllerNode: ViewControllerTracingNode, UIGestu
tagMask = .music
case .voice:
tagMask = .voiceOrInstantVideo
case .file:
tagMask = .file
}
let chatLocationContextHolder = Atomic<ChatLocationContextHolder?>(value: nil)

View File

@ -142,7 +142,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
private var validLayout: (width: CGFloat, leftInset: CGFloat, rightInset: CGFloat, maxHeight: CGFloat)?
init(account: Account, accountManager: AccountManager, presentationData: PresentationData, status: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading)?, NoError>) {
init(account: Account, accountManager: AccountManager, presentationData: PresentationData, status: Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)?, NoError>) {
self.accountManager = accountManager
self.postbox = account.postbox
self.presentationData = presentationData
@ -232,7 +232,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
let accountId = account.id
let delayedStatus = status
|> mapToSignal { value -> Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading)?, NoError> in
|> mapToSignal { value -> Signal<(Account, SharedMediaPlayerItemPlaybackStateOrLoading, MediaManagerPlayerType)?, NoError> in
guard let value = value, value.0.id == accountId else {
return .single(nil)
}
@ -246,7 +246,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
}
let mappedStatus = combineLatest(delayedStatus, self.scrubberNode.scrubbingTimestamp) |> map { value, scrubbingTimestamp -> MediaPlayerStatus in
if let (_, valueOrLoading) = value, case let .state(value) = valueOrLoading {
if let (_, valueOrLoading, _) = value, case let .state(value) = valueOrLoading {
return MediaPlayerStatus(generationTimestamp: scrubbingTimestamp != nil ? 0 : value.status.generationTimestamp, duration: value.status.duration, dimensions: value.status.dimensions, timestamp: scrubbingTimestamp ?? value.status.timestamp, baseRate: value.status.baseRate, seekId: value.status.seekId, status: value.status.status, soundEnabled: value.status.soundEnabled)
} else {
return MediaPlayerStatus(generationTimestamp: 0.0, duration: 0.0, dimensions: CGSize(), timestamp: 0.0, baseRate: 1.0, seekId: 0, status: .paused, soundEnabled: true)
@ -275,7 +275,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
strongSelf.rightDurationLabelPushed = rightDurationLabelPushed
if let layout = strongSelf.validLayout {
strongSelf.updateLayout(width: layout.0, leftInset: layout.1, rightInset: layout.2, maxHeight: layout.3, transition: .animated(duration: 0.35, curve: .spring))
let _ = strongSelf.updateLayout(width: layout.0, leftInset: layout.1, rightInset: layout.2, maxHeight: layout.3, transition: .animated(duration: 0.35, curve: .spring))
}
}
})
@ -286,7 +286,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
return
}
var valueItemId: SharedMediaPlaylistItemId?
if let (_, value) = value, case let .state(state) = value {
if let (_, value, _) = value, case let .state(state) = value {
valueItemId = state.item.id
}
if !areSharedMediaPlaylistItemIdsEqual(valueItemId, strongSelf.currentItemId) {
@ -297,7 +297,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
var rateButtonIsHidden = true
strongSelf.shareNode.isHidden = false
var displayData: SharedMediaPlaybackDisplayData?
if let (_, valueOrLoading) = value, case let .state(value) = valueOrLoading {
if let (_, valueOrLoading, _) = value, case let .state(value) = valueOrLoading {
let isPaused: Bool
switch value.status.status {
case .playing:
@ -352,7 +352,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
if duration != strongSelf.currentDuration && !duration.isZero {
strongSelf.currentDuration = duration
if let layout = strongSelf.validLayout {
strongSelf.updateLayout(width: layout.0, leftInset: layout.1, rightInset: layout.2, maxHeight: layout.3, transition: .immediate)
let _ = strongSelf.updateLayout(width: layout.0, leftInset: layout.1, rightInset: layout.2, maxHeight: layout.3, transition: .immediate)
}
}
@ -368,7 +368,7 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
if strongSelf.displayData != displayData {
strongSelf.displayData = displayData
if let (_, valueOrLoading) = value, case let .state(value) = valueOrLoading, let source = value.item.playbackData?.source {
if let (_, valueOrLoading, _) = value, case let .state(value) = valueOrLoading, let source = value.item.playbackData?.source {
switch source {
case let .telegramFile(fileReference):
strongSelf.currentFileReference = fileReference
@ -635,10 +635,10 @@ final class OverlayPlayerControlsNode: ASDisplayNode {
transition.updateFrame(node: self.scrubberNode, frame: CGRect(origin: CGPoint(x: leftInset + sideInset, y: scrubberVerticalOrigin - 8.0), size: CGSize(width: width - sideInset * 2.0 - leftInset - rightInset, height: 10.0 + 8.0 * 2.0)))
var leftLabelVerticalOffset: CGFloat = self.leftDurationLabelPushed ? 6.0 : 0.0
let leftLabelVerticalOffset: CGFloat = self.leftDurationLabelPushed ? 6.0 : 0.0
transition.updateFrame(node: self.leftDurationLabel, frame: CGRect(origin: CGPoint(x: leftInset + sideInset, y: scrubberVerticalOrigin + 14.0 + leftLabelVerticalOffset), size: CGSize(width: 100.0, height: 20.0)))
var rightLabelVerticalOffset: CGFloat = self.rightDurationLabelPushed ? 6.0 : 0.0
let rightLabelVerticalOffset: CGFloat = self.rightDurationLabelPushed ? 6.0 : 0.0
transition.updateFrame(node: self.rightDurationLabel, frame: CGRect(origin: CGPoint(x: width - sideInset - rightInset - 100.0, y: scrubberVerticalOrigin + 14.0 + rightLabelVerticalOffset), size: CGSize(width: 100.0, height: 20.0)))
let rateRightOffset = timestampLabelWidthForDuration(self.currentDuration)

View File

@ -76,9 +76,7 @@ final class PeerInfoGroupsInCommonPaneNode: ASDisplayNode, PeerInfoPaneNode {
var isReady: Signal<Bool, NoError> {
return self.ready.get()
}
let shouldReceiveExpandProgressUpdates: Bool = false
private var disposable: Disposable?
init(context: AccountContext, peerId: PeerId, chatControllerInteraction: ChatControllerInteraction, openPeerContextAction: @escaping (Peer, ASDisplayNode, ContextGesture?) -> Void, groupsInCommonContext: GroupsInCommonContext) {

View File

@ -30,8 +30,6 @@ final class PeerInfoListPaneNode: ASDisplayNode, PeerInfoPaneNode {
return self.ready.get()
}
let shouldReceiveExpandProgressUpdates: Bool
private let selectedMessagesPromise = Promise<Set<MessageId>?>(nil)
private var selectedMessages: Set<MessageId>? {
didSet {
@ -61,13 +59,8 @@ final class PeerInfoListPaneNode: ASDisplayNode, PeerInfoPaneNode {
let chatLocationContextHolder = Atomic<ChatLocationContextHolder?>(value: nil)
self.listNode = ChatHistoryListNode(context: context, chatLocation: .peer(peerId), chatLocationContextHolder: chatLocationContextHolder, tagMask: tagMask, subject: nil, controllerInteraction: chatControllerInteraction, selectedMessages: self.selectedMessagesPromise.get(), mode: .list(search: false, reversed: false, displayHeaders: .allButLast, hintLinks: tagMask == .webPage, isGlobalSearch: false))
self.listNode.defaultToSynchronousTransactionWhileScrolling = true
if tagMask == .music {
self.shouldReceiveExpandProgressUpdates = true
} else {
self.shouldReceiveExpandProgressUpdates = false
}
self.listNode.scroller.bounces = false
self.mediaAccessoryPanelContainer = PassthroughContainerNode()
self.mediaAccessoryPanelContainer.clipsToBounds = true
@ -81,7 +74,7 @@ final class PeerInfoListPaneNode: ASDisplayNode, PeerInfoPaneNode {
|> take(1)
|> map { _ -> Bool in true })
if tagMask == .music || tagMask == .voiceOrInstantVideo {
if [.file, .music, .voiceOrInstantVideo].contains(tagMask) {
self.mediaStatusDisposable = (context.sharedContext.mediaManager.globalMediaPlayerState
|> mapToSignal { playlistStateAndType -> Signal<(Account, SharedMediaPlayerItemPlaybackState, MediaManagerPlayerType)?, NoError> in
if let (account, state, type) = playlistStateAndType {
@ -97,6 +90,10 @@ final class PeerInfoListPaneNode: ASDisplayNode, PeerInfoPaneNode {
if tagMask != .music {
return .single(nil) |> delay(0.2, queue: .mainQueue())
}
case .file:
if tagMask != .file {
return .single(nil) |> delay(0.2, queue: .mainQueue())
}
}
return .single((account, state, type))
} else {

View File

@ -28,15 +28,16 @@ enum PeerMembersListAction {
}
private struct PeerMembersListEntry: Comparable, Identifiable {
var index: Int
var member: PeerInfoMember
let theme: PresentationTheme
let index: Int
let member: PeerInfoMember
var stableId: PeerId {
return self.member.id
}
static func ==(lhs: PeerMembersListEntry, rhs: PeerMembersListEntry) -> Bool {
return lhs.member == rhs.member
return lhs.theme === rhs.theme && lhs.member == rhs.member
}
static func <(lhs: PeerMembersListEntry, rhs: PeerMembersListEntry) -> Bool {
@ -110,16 +111,15 @@ final class PeerInfoMembersPaneNode: ASDisplayNode, PeerInfoPaneNode {
private var canLoadMore: Bool = false
private var enqueuedTransactions: [PeerMembersListTransaction] = []
private var currentParams: (size: CGSize, isScrollingLockedAtTop: Bool, presentationData: PresentationData)?
private var currentParams: (size: CGSize, isScrollingLockedAtTop: Bool)?
private let presentationDataPromise = Promise<PresentationData>()
private let ready = Promise<Bool>()
private var didSetReady: Bool = false
var isReady: Signal<Bool, NoError> {
return self.ready.get()
}
let shouldReceiveExpandProgressUpdates: Bool = false
private var disposable: Disposable?
init(context: AccountContext, peerId: PeerId, membersContext: PeerInfoMembersContext, action: @escaping (PeerInfoMember, PeerMembersListAction) -> Void) {
@ -136,18 +136,17 @@ final class PeerInfoMembersPaneNode: ASDisplayNode, PeerInfoPaneNode {
self.disposable = (combineLatest(queue: .mainQueue(),
membersContext.state,
self.presentationDataPromise.get(),
context.account.postbox.combinedView(keys: [.basicPeer(peerId)])
)
|> deliverOnMainQueue).start(next: { [weak self] state, combinedView in
|> deliverOnMainQueue).start(next: { [weak self] state, presentationData, combinedView in
guard let strongSelf = self, let basicPeerView = combinedView.views[.basicPeer(peerId)] as? BasicPeerView, let enclosingPeer = basicPeerView.peer else {
return
}
strongSelf.enclosingPeer = enclosingPeer
strongSelf.currentState = state
if let (_, _, presentationData) = strongSelf.currentParams {
strongSelf.updateState(enclosingPeer: enclosingPeer, state: state, presentationData: presentationData)
}
strongSelf.updateState(enclosingPeer: enclosingPeer, state: state, presentationData: presentationData)
})
self.listNode.visibleBottomContentOffsetChanged = { [weak self] offset in
@ -174,7 +173,8 @@ final class PeerInfoMembersPaneNode: ASDisplayNode, PeerInfoPaneNode {
func update(size: CGSize, sideInset: CGFloat, bottomInset: CGFloat, visibleHeight: CGFloat, isScrollingLockedAtTop: Bool, expandProgress: CGFloat, presentationData: PresentationData, synchronous: Bool, transition: ContainedViewLayoutTransition) {
let isFirstLayout = self.currentParams == nil
self.currentParams = (size, isScrollingLockedAtTop, presentationData)
self.currentParams = (size, isScrollingLockedAtTop)
self.presentationDataPromise.set(.single(presentationData))
transition.updateFrame(node: self.listNode, frame: CGRect(origin: CGPoint(), size: size))
let (duration, curve) = listViewAnimationDurationAndCurve(transition: transition)
@ -200,7 +200,7 @@ final class PeerInfoMembersPaneNode: ASDisplayNode, PeerInfoPaneNode {
private func updateState(enclosingPeer: Peer, state: PeerInfoMembersState, presentationData: PresentationData) {
var entries: [PeerMembersListEntry] = []
for member in state.members {
entries.append(PeerMembersListEntry(index: entries.count, member: member))
entries.append(PeerMembersListEntry(theme: presentationData.theme, index: entries.count, member: member))
}
let transaction = preparedTransition(from: self.currentEntries, to: entries, context: self.context, presentationData: presentationData, enclosingPeer: enclosingPeer, action: { [weak self] member, action in
self?.action(member, action)
@ -212,7 +212,7 @@ final class PeerInfoMembersPaneNode: ASDisplayNode, PeerInfoPaneNode {
}
private func dequeueTransaction() {
guard let (layout, _, _) = self.currentParams, let transaction = self.enqueuedTransactions.first else {
guard let _ = self.currentParams, let transaction = self.enqueuedTransactions.first else {
return
}

View File

@ -781,9 +781,7 @@ final class PeerInfoVisualMediaPaneNode: ASDisplayNode, PeerInfoPaneNode, UIScro
var isReady: Signal<Bool, NoError> {
return self.ready.get()
}
let shouldReceiveExpandProgressUpdates: Bool = false
private let listDisposable = MetaDisposable()
private var hiddenMediaDisposable: Disposable?
private var mediaItems: [VisualMediaItem] = []

View File

@ -916,8 +916,10 @@ func peerInfoHeaderButtons(peer: Peer?, cachedData: CachedPeerData?, isOpenedFro
if let cachedUserData = cachedData as? CachedUserData {
callsAvailable = cachedUserData.voiceCallsAvailable
videoCallsAvailable = cachedUserData.videoCallsAvailable
} else {
callsAvailable = true
videoCallsAvailable = true
}
callsAvailable = true
}
if callsAvailable {
result.append(.call)

View File

@ -12,7 +12,6 @@ import ContextUI
protocol PeerInfoPaneNode: ASDisplayNode {
var isReady: Signal<Bool, NoError> { get }
var shouldReceiveExpandProgressUpdates: Bool { get }
func update(size: CGSize, sideInset: CGFloat, bottomInset: CGFloat, visibleHeight: CGFloat, isScrollingLockedAtTop: Bool, expandProgress: CGFloat, presentationData: PresentationData, synchronous: Bool, transition: ContainedViewLayoutTransition)
func scrollToTop() -> Bool

View File

@ -2192,7 +2192,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
}
var items: [ActionSheetItem] = []
items.append(ActionSheetButtonItem(title: strongSelf.presentationData.strings.Settings_CancelUpload, color: .destructive, action: { [weak self] in
items.append(ActionSheetButtonItem(title: strongSelf.presentationData.strings.Settings_CancelUpload, color: .destructive, action: {
dismissAction()
proceed()
}))
@ -2782,15 +2782,6 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
self?.view.endEditing(true)
}, contentContext: nil)
})
let disposable = self.resolveUrlDisposable
let resolvedUrl: Signal<ResolvedUrl, NoError>
if external {
resolvedUrl = .single(.externalUrl(url))
} else {
resolvedUrl = self.context.sharedContext.resolveUrl(account: self.context.account, url: url)
}
}
private func openPeer(peerId: PeerId, navigation: ChatControllerInteractionNavigateToPeer) {
@ -2901,7 +2892,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
actionSheet?.dismissAnimated()
}
var items: [ActionSheetItem] = []
if !peerInfoHeaderButtons(peer: peer, cachedData: data.cachedData, isOpenedFromChat: self.isOpenedFromChat, videoCallsEnabled: self.videoCallsEnabled).contains(.search) || self.headerNode.isAvatarExpanded {
if !peerInfoHeaderButtons(peer: peer, cachedData: data.cachedData, isOpenedFromChat: self.isOpenedFromChat, videoCallsEnabled: self.videoCallsEnabled).contains(.search) || (self.headerNode.isAvatarExpanded && self.peerId.namespace == Namespaces.Peer.CloudUser) {
items.append(ActionSheetButtonItem(title: presentationData.strings.ChatSearch_SearchPlaceholder, color: .accent, action: { [weak self] in
dismissAction()
self?.openChatWithMessageSearch()
@ -4610,16 +4601,14 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
case .recentCalls:
self.controller?.push(CallListController(context: context, mode: .navigation))
case .devices:
if let settings = self.data?.globalSettings {
let _ = (self.activeSessionsContextAndCount.get()
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] activeSessionsContextAndCount in
if let strongSelf = self, let activeSessionsContextAndCount = activeSessionsContextAndCount {
let (activeSessionsContext, count, webSessionsContext) = activeSessionsContextAndCount
strongSelf.controller?.push(recentSessionsController(context: strongSelf.context, activeSessionsContext: activeSessionsContext, webSessionsContext: webSessionsContext, websitesOnly: false))
}
})
}
let _ = (self.activeSessionsContextAndCount.get()
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] activeSessionsContextAndCount in
if let strongSelf = self, let activeSessionsContextAndCount = activeSessionsContextAndCount {
let (activeSessionsContext, _, webSessionsContext) = activeSessionsContextAndCount
strongSelf.controller?.push(recentSessionsController(context: strongSelf.context, activeSessionsContext: activeSessionsContext, webSessionsContext: webSessionsContext, websitesOnly: false))
}
})
case .chatFolders:
self.controller?.push(chatListFilterPresetListController(context: self.context, mode: .default))
case .notificationsAndSounds:

View File

@ -736,21 +736,6 @@ final class PeerMessagesMediaPlaylist: SharedMediaPlaylist {
viewIndex = .lowerBound
}
return .single((nil, messages.count, false))
// return self.postbox.aroundMessageHistoryViewForLocation(.peer(peerId), anchor: viewIndex, count: 10, fixedCombinedReadStates: nil, topTaggedMessageIdNamespaces: [], tagMask: tagMask, namespaces: namespaces, orderStatistics: [])
// |> mapToSignal { view -> Signal<(Message, [Message])?, NoError> in
// let position: NavigatedMessageFromViewPosition
// switch navigation {
// case .later, .random:
// position = .earlier
// case .earlier:
// position = .later
// }
// if let (message, aroundMessages, _) = navigatedMessageFromView(view.0, anchorIndex: MessageIndex.absoluteLowerBound(), position: position) {
// return .single((message, aroundMessages))
// } else {
// return .single(nil)
// }
// }
} else {
if hasMore {
return .single((nil, messages.count, true))

View File

@ -77,7 +77,7 @@ private enum SharedMediaPlaybackItem: Equatable {
func togglePlayPause() {
switch self {
case let .audio(player):
player.togglePlayPause()
player.togglePlayPause(faded: true)
case let .instantVideo(node):
node.togglePlayPause()
}
@ -178,7 +178,9 @@ final class SharedMediaPlayer {
private var currentPrefetchItems: (SharedMediaPlaybackDataSource, SharedMediaPlaybackDataSource)?
private let prefetchDisposable = MetaDisposable()
init(mediaManager: MediaManager, inForeground: Signal<Bool, NoError>, account: Account, audioSession: ManagedAudioSession, overlayMediaManager: OverlayMediaManager, playlist: SharedMediaPlaylist, initialOrder: MusicPlaybackSettingsOrder, initialLooping: MusicPlaybackSettingsLooping, initialPlaybackRate: AudioPlaybackRate, playerIndex: Int32, controlPlaybackWithProximity: Bool) {
let type: MediaManagerPlayerType
init(mediaManager: MediaManager, inForeground: Signal<Bool, NoError>, account: Account, audioSession: ManagedAudioSession, overlayMediaManager: OverlayMediaManager, playlist: SharedMediaPlaylist, initialOrder: MusicPlaybackSettingsOrder, initialLooping: MusicPlaybackSettingsLooping, initialPlaybackRate: AudioPlaybackRate, playerIndex: Int32, controlPlaybackWithProximity: Bool, type: MediaManagerPlayerType) {
self.mediaManager = mediaManager
self.account = account
self.audioSession = audioSession
@ -189,6 +191,7 @@ final class SharedMediaPlayer {
self.playerIndex = playerIndex
self.playbackRate = initialPlaybackRate
self.controlPlaybackWithProximity = controlPlaybackWithProximity
self.type = type
if controlPlaybackWithProximity {
self.forceAudioToSpeaker = !DeviceProximityManager.shared().currentValue()

@ -1 +1 @@
Subproject commit db98670d64d24e4c093f49f5882d175673fd59f0
Subproject commit 2e909582b78f63911497dad823e5b5f247ecf1d9