Various fixes

This commit is contained in:
Ilya Laktyushin 2023-09-03 21:14:01 +04:00
parent d685a5ad34
commit 92f974de06
13 changed files with 400 additions and 131 deletions

View File

@ -52,18 +52,21 @@ private final class LegacyICloudFileController: LegacyController, UIDocumentPick
public enum LegacyICloudFilePickerMode {
case `default`
case `import`
case `export`
var documentPickerMode: UIDocumentPickerMode {
switch self {
case .default:
return .open
case .import:
return .import
case .default:
return .open
case .import:
return .import
case .export:
return .exportToService
}
}
}
public func legacyICloudFilePicker(theme: PresentationTheme, mode: LegacyICloudFilePickerMode = .default, documentTypes: [String] = ["public.item"], forceDarkTheme: Bool = false, dismissed: @escaping () -> Void = {}, completion: @escaping ([URL]) -> Void) -> ViewController {
public func legacyICloudFilePicker(theme: PresentationTheme, mode: LegacyICloudFilePickerMode = .default, url: URL? = nil, documentTypes: [String] = ["public.item"], forceDarkTheme: Bool = false, dismissed: @escaping () -> Void = {}, completion: @escaping ([URL]) -> Void) -> ViewController {
var dismissImpl: (() -> Void)?
let legacyController = LegacyICloudFileController(presentation: .modal(animateIn: true), theme: theme, completion: { urls in
dismissImpl?()
@ -71,7 +74,16 @@ public func legacyICloudFilePicker(theme: PresentationTheme, mode: LegacyICloudF
})
legacyController.statusBar.statusBarStyle = .Black
let controller = DocumentPickerViewController(documentTypes: documentTypes, in: mode.documentPickerMode)
let controller: DocumentPickerViewController
if case .export = mode, let url {
if #available(iOS 14.0, *) {
controller = DocumentPickerViewController(forExporting: [url], asCopy: true)
} else {
controller = DocumentPickerViewController(url: url, in: mode.documentPickerMode)
}
} else {
controller = DocumentPickerViewController(documentTypes: documentTypes, in: mode.documentPickerMode)
}
controller.forceDarkTheme = forceDarkTheme
controller.didDisappear = {
dismissImpl?()

View File

@ -167,6 +167,7 @@ public final class ChatControllerInteraction {
public let openWebView: (String, String, Bool, ChatOpenWebViewSource) -> Void
public let activateAdAction: (EngineMessage.Id) -> Void
public let openRequestedPeerSelection: (EngineMessage.Id, ReplyMarkupButtonRequestPeerType, Int32) -> Void
public let saveMediaToFiles: (EngineMessage.Id) -> Void
public let requestMessageUpdate: (MessageId, Bool) -> Void
public let cancelInteractiveKeyboardGestures: () -> Void
@ -280,6 +281,7 @@ public final class ChatControllerInteraction {
openWebView: @escaping (String, String, Bool, ChatOpenWebViewSource) -> Void,
activateAdAction: @escaping (EngineMessage.Id) -> Void,
openRequestedPeerSelection: @escaping (EngineMessage.Id, ReplyMarkupButtonRequestPeerType, Int32) -> Void,
saveMediaToFiles: @escaping (EngineMessage.Id) -> Void,
requestMessageUpdate: @escaping (MessageId, Bool) -> Void,
cancelInteractiveKeyboardGestures: @escaping () -> Void,
dismissTextInput: @escaping () -> Void,
@ -375,6 +377,7 @@ public final class ChatControllerInteraction {
self.openWebView = openWebView
self.activateAdAction = activateAdAction
self.openRequestedPeerSelection = openRequestedPeerSelection
self.saveMediaToFiles = saveMediaToFiles
self.requestMessageUpdate = requestMessageUpdate
self.cancelInteractiveKeyboardGestures = cancelInteractiveKeyboardGestures
self.dismissTextInput = dismissTextInput

View File

@ -617,7 +617,12 @@ public final class MediaEditor {
if self.didPlayToEndTimeObserver == nil {
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: observedPlayer.currentItem, queue: nil, using: { [weak self] notification in
if let self {
let start = self.values.videoTrimRange?.lowerBound ?? 0.0
var start: Double
if self.sourceIsVideo {
start = self.values.videoTrimRange?.lowerBound ?? 0.0
} else {
start = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
}
let targetTime = CMTime(seconds: start, preferredTimescale: CMTimeScale(1000))
self.player?.seek(to: targetTime)
self.additionalPlayer?.seek(to: targetTime)
@ -626,19 +631,23 @@ public final class MediaEditor {
self.player?.play()
self.additionalPlayer?.play()
let audioTime = self.audioTime(for: targetTime)
if let audioDelay = self.audioDelay(for: targetTime) {
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
self?.audioPlayer?.seek(to: audioTime)
self?.audioPlayer?.play()
}, queue: Queue.mainQueue())
self.audioDelayTimer?.start()
if self.sourceIsVideo {
let audioTime = self.audioTime(for: targetTime)
if let audioDelay = self.audioDelay(for: targetTime) {
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
self?.audioPlayer?.seek(to: audioTime)
self?.audioPlayer?.play()
}, queue: Queue.mainQueue())
self.audioDelayTimer?.start()
} else {
self.audioPlayer?.seek(to: audioTime)
self.audioPlayer?.play()
}
} else {
self.audioPlayer?.seek(to: audioTime)
self.audioPlayer?.seek(to: targetTime)
self.audioPlayer?.play()
}
Queue.mainQueue().justDispatch {
self.onPlaybackAction(.play)
}
@ -761,15 +770,20 @@ public final class MediaEditor {
self.player?.play()
self.additionalPlayer?.play()
let audioTime = self.audioTime(for: targetPosition)
if let audioDelay = self.audioDelay(for: targetPosition) {
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
self?.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
self?.audioPlayer?.play()
}, queue: Queue.mainQueue())
self.audioDelayTimer?.start()
if self.sourceIsVideo {
let audioTime = self.audioTime(for: targetPosition)
if let audioDelay = self.audioDelay(for: targetPosition) {
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
self?.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
self?.audioPlayer?.play()
}, queue: Queue.mainQueue())
self.audioDelayTimer?.start()
} else {
self.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
self.audioPlayer?.play()
}
} else {
self.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
self.audioPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
self.audioPlayer?.play()
}

View File

@ -3087,34 +3087,93 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
return
}
let path = url.path
let fileName = "audio_\(url.lastPathComponent)"
let copyPath = fullDraftPath(peerId: self.context.account.peerId, path: fileName)
try? FileManager.default.copyItem(atPath: path, toPath: copyPath)
let isScopedResource = url.startAccessingSecurityScopedResource()
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: copyPath))
var artist: String?
var title: String?
for data in audioAsset.commonMetadata {
if data.commonKey == .commonKeyArtist {
artist = data.stringValue
let coordinator = NSFileCoordinator(filePresenter: nil)
var error: NSError?
coordinator.coordinate(readingItemAt: url, options: .forUploading, error: &error, byAccessor: { sourceUrl in
let path = sourceUrl.path
let fileName = "audio_\(sourceUrl.lastPathComponent)"
let copyPath = fullDraftPath(peerId: self.context.account.peerId, path: fileName)
try? FileManager.default.removeItem(atPath: copyPath)
do {
try FileManager.default.copyItem(atPath: path, toPath: copyPath)
} catch let e {
Logger.shared.log("MediaEditor", "copy file error \(e)")
if isScopedResource {
sourceUrl.stopAccessingSecurityScopedResource()
}
return
}
if data.commonKey == .commonKeyTitle {
title = data.stringValue
Queue.mainQueue().async {
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: copyPath))
func loadValues(asset: AVAsset, retryCount: Int, completion: @escaping () -> Void) {
asset.loadValuesAsynchronously(forKeys: ["tracks", "duration"], completionHandler: {
if asset.statusOfValue(forKey: "tracks", error: nil) == .loading {
if retryCount < 2 {
Queue.mainQueue().after(0.1, {
loadValues(asset: asset, retryCount: retryCount + 1, completion: completion)
})
} else {
completion()
}
} else {
completion()
}
})
}
loadValues(asset: audioAsset, retryCount: 0, completion: {
var audioDuration: Double = 0.0
guard let track = audioAsset.tracks(withMediaType: .audio).first else {
Logger.shared.log("MediaEditor", "track is nil")
if isScopedResource {
sourceUrl.stopAccessingSecurityScopedResource()
}
return
}
audioDuration = track.timeRange.duration.seconds
if audioDuration.isZero {
Logger.shared.log("MediaEditor", "duration is zero")
if isScopedResource {
sourceUrl.stopAccessingSecurityScopedResource()
}
return
}
var artist: String?
var title: String?
for data in audioAsset.commonMetadata {
if data.commonKey == .commonKeyArtist {
artist = data.stringValue
}
if data.commonKey == .commonKeyTitle {
title = data.stringValue
}
}
Queue.mainQueue().async {
mediaEditor.setAudioTrack(MediaAudioTrack(path: fileName, artist: artist, title: title, duration: audioDuration))
if mediaEditor.sourceIsVideo {
if let videoDuration = mediaEditor.duration {
mediaEditor.setAudioTrackTrimRange(0 ..< min(videoDuration, audioDuration), apply: true)
}
} else {
mediaEditor.setAudioTrackTrimRange(0 ..< min(15, audioDuration), apply: true)
}
self.requestUpdate(transition: .easeInOut(duration: 0.2))
if isScopedResource {
sourceUrl.stopAccessingSecurityScopedResource()
}
}
})
}
}
let audioDuration = audioAsset.duration.seconds
mediaEditor.setAudioTrack(MediaAudioTrack(path: fileName, artist: artist, title: title, duration: audioDuration))
if mediaEditor.sourceIsVideo {
if let videoDuration = mediaEditor.duration {
mediaEditor.setAudioTrackTrimRange(0 ..< min(videoDuration, audioDuration), apply: true)
}
} else {
mediaEditor.setAudioTrackTrimRange(0 ..< min(15, audioDuration), apply: true)
}
self.requestUpdate(transition: .easeInOut(duration: 0.2))
})
}), in: .window(.root))
}

View File

@ -442,13 +442,12 @@ final class VideoScrubberComponent: Component {
self.component = component
self.state = state
var trimDuration = component.duration
var animateAudioAppearance = false
if let previousComponent {
if previousComponent.audioData == nil, component.audioData != nil {
self.positionAnimation = nil
// if !component.audioOnly {
// self.isAudioSelected = true
// }
animateAudioAppearance = true
} else if previousComponent.audioData != nil, component.audioData == nil {
self.positionAnimation = nil
@ -480,6 +479,8 @@ final class VideoScrubberComponent: Component {
var audioAlpha: CGFloat = 0.0
if let audioData = component.audioData {
if component.audioOnly {
trimDuration = min(30.0, audioData.duration)
audioScrubberHeight = scrubberHeight
audioAlpha = 1.0
} else {
@ -493,11 +494,10 @@ final class VideoScrubberComponent: Component {
audioScrubberHeight = scrubberHeight
videoScrubberHeight = collapsedScrubberHeight
}
if component.duration > 0.0 {
let audioFraction = audioData.duration / component.duration
audioTotalWidth = ceil(totalWidth * audioFraction)
}
}
if trimDuration > 0.0 {
let audioFraction = audioData.duration / trimDuration
audioTotalWidth = ceil(totalWidth * audioFraction)
}
} else {
self.isAudioSelected = false
@ -524,8 +524,8 @@ final class VideoScrubberComponent: Component {
}
}
if !self.isAudioSelected {
if let _ = component.audioData, !component.audioOnly {
if !self.isAudioSelected && !component.audioOnly {
if let _ = component.audioData {
audioClipOrigin = deselectedAudioClipOrigin
audioClipWidth = deselectedAudioClipWidth
} else {
@ -538,7 +538,7 @@ final class VideoScrubberComponent: Component {
audioTransition.setFrame(view: self.audioClippingView, frame: audioClippingFrame)
audioTransition.setBounds(view: self.audioClippingView, bounds: audioClippingBounds)
self.audioScrollView.isUserInteractionEnabled = self.isAudioSelected
self.audioScrollView.isUserInteractionEnabled = self.isAudioSelected || component.audioOnly
audioTransition.setFrame(view: self.audioScrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: availableSize.width, height: audioScrubberHeight)))
self.audioScrollView.contentSize = CGSize(width: audioTotalWidth, height: audioScrubberHeight)
@ -553,15 +553,13 @@ final class VideoScrubberComponent: Component {
audioTransition.setFrame(view: self.audioVibrancyContainer, frame: CGRect(origin: .zero, size: audioContainerFrame.size))
let containerFrame = CGRect(origin: .zero, size: CGSize(width: audioClipWidth, height: audioContainerFrame.height))
var contentContainerOrigin = deselectedAudioClipOrigin + self.audioScrollView.contentOffset.x
if self.isAudioSelected {
contentContainerOrigin -= 6.0
}
let contentContainerOrigin = deselectedAudioClipOrigin + self.audioScrollView.contentOffset.x
audioTransition.setFrame(view: self.audioContentContainerView, frame: containerFrame.offsetBy(dx: contentContainerOrigin, dy: 0.0))
audioTransition.setFrame(view: self.audioContentMaskView, frame: CGRect(origin: .zero, size: containerFrame.size))
if let audioData = component.audioData, !component.audioOnly {
var components: [String] = []
var components: [String] = []
var trackTitle = ""
if let audioData = component.audioData {
if let artist = audioData.artist {
components.append(artist)
}
@ -571,53 +569,52 @@ final class VideoScrubberComponent: Component {
if components.isEmpty {
components.append("Audio")
}
let audioTitle = NSAttributedString(string: components.joined(separator: ""), font: Font.semibold(13.0), textColor: .white)
let audioTitleSize = self.audioTitle.update(
transition: transition,
component: AnyComponent(
MultilineTextComponent(
text: .plain(audioTitle)
)
),
environment: {},
containerSize: availableSize
)
let spacing: CGFloat = 4.0
let iconSize = CGSize(width: 14.0, height: 14.0)
let totalWidth = iconSize.width + audioTitleSize.width + spacing
audioTransition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
let audioIconFrame = CGRect(origin: CGPoint(x: max(8.0, floorToScreenPixels((audioClipWidth - totalWidth) / 2.0)), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize)
audioTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size))
audioTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center)
if let view = self.audioTitle.view {
if view.superview == nil {
view.alpha = 0.0
view.isUserInteractionEnabled = false
self.audioContainerView.addSubview(self.audioContentContainerView)
self.audioContentContainerView.addSubview(self.audioIconView)
self.audioContentContainerView.addSubview(view)
}
audioTransition.setAlpha(view: view, alpha: self.isAudioSelected ? 0.0 : 1.0)
let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize)
view.bounds = CGRect(origin: .zero, size: audioTitleFrame.size)
audioTransition.setPosition(view: view, position: audioTitleFrame.center)
}
} else {
audioTransition.setAlpha(view: self.audioIconView, alpha: 0.0)
if let view = self.audioTitle.view {
audioTransition.setAlpha(view: view, alpha: 0.0)
}
trackTitle = components.joined(separator: "")
}
let audioTitle = NSAttributedString(string: trackTitle, font: Font.semibold(13.0), textColor: .white)
let audioTitleSize = self.audioTitle.update(
transition: transition,
component: AnyComponent(
MultilineTextComponent(
text: .plain(audioTitle)
)
),
environment: {},
containerSize: availableSize
)
let spacing: CGFloat = 4.0
let iconSize = CGSize(width: 14.0, height: 14.0)
let contentTotalWidth = iconSize.width + audioTitleSize.width + spacing
audioTransition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
let audioIconFrame = CGRect(origin: CGPoint(x: max(8.0, floorToScreenPixels((deselectedAudioClipWidth - contentTotalWidth) / 2.0)), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize)
audioTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size))
audioTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center)
let trackTitleIsVisible = !self.isAudioSelected && !component.audioOnly && !trackTitle.isEmpty
if let view = self.audioTitle.view {
if view.superview == nil {
view.alpha = 0.0
view.isUserInteractionEnabled = false
self.audioContainerView.addSubview(self.audioContentContainerView)
self.audioContentContainerView.addSubview(self.audioIconView)
self.audioContentContainerView.addSubview(view)
}
audioTransition.setAlpha(view: view, alpha: trackTitleIsVisible ? 1.0 : 0.0)
let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize)
view.bounds = CGRect(origin: .zero, size: audioTitleFrame.size)
audioTransition.setPosition(view: view, position: audioTitleFrame.center)
}
audioTransition.setAlpha(view: self.audioIconView, alpha: trackTitleIsVisible ? 1.0 : 0.0)
if let audioData = component.audioData {
let samples = audioData.samples ?? Data()
if let view = self.audioWaveform.view, previousComponent?.audioData?.samples == nil && audioData.samples != nil, let snapshotView = view.snapshotView(afterScreenUpdates: false) {
if let view = self.audioWaveform.view, previousComponent?.audioData?.samples == nil && audioData.samples != nil, let snapshotView = view.snapshotContentTree() {
snapshotView.frame = view.frame
self.audioVibrancyContainer.addSubview(snapshotView)
@ -653,7 +650,6 @@ final class VideoScrubberComponent: Component {
audioTransition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: 0.0, y: self.isAudioSelected || component.audioOnly ? 0.0 : 6.0), size: audioWaveformSize))
}
}
self.cursorView.isHidden = component.audioOnly
let bounds = CGRect(origin: .zero, size: scrubberSize)
@ -688,7 +684,7 @@ final class VideoScrubberComponent: Component {
var startPosition = component.startPosition
var endPosition = component.endPosition
if self.isAudioSelected, let audioData = component.audioData {
if self.isAudioSelected || component.audioOnly, let audioData = component.audioData {
if let start = audioData.start {
startPosition = start
}
@ -697,11 +693,11 @@ final class VideoScrubberComponent: Component {
}
}
self.trimView.isHollow = self.isAudioSelected
self.trimView.isHollow = self.isAudioSelected || component.audioOnly
let (leftHandleFrame, rightHandleFrame) = self.trimView.update(
totalWidth: totalWidth,
scrubberSize: scrubberSize,
duration: component.duration,
duration: trimDuration,
startPosition: startPosition,
endPosition: endPosition,
position: component.position,

View File

@ -1027,7 +1027,11 @@ final class ShareWithPeersScreenComponent: Component {
if case .user = peer {
subtitle = environment.strings.VoiceChat_PersonalAccount
} else {
subtitle = environment.strings.Channel_Status
if let count = component.stateContext.stateValue?.participants[peer.id] {
subtitle = environment.strings.Conversation_StatusSubscribers(Int32(count))
} else {
subtitle = environment.strings.Channel_Status
}
}
var isStories = false
@ -2657,20 +2661,47 @@ public class ShareWithPeersScreen: ViewControllerComponentContainer {
switch subject {
case let .peers(peers, _):
let state = State(
sendAsPeers: peers,
peers: [],
peersMap: [:],
savedSelectedPeers: [:],
presences: [:],
participants: [:],
closeFriendsPeers: [],
grayListPeers: []
)
self.stateValue = state
self.stateSubject.set(.single(state))
self.stateDisposable = (.single(peers)
|> mapToSignal { peers -> Signal<([EnginePeer], [EnginePeer.Id: Optional<Int>]), NoError> in
return context.engine.data.subscribe(
EngineDataMap(peers.map(\.id).map(TelegramEngine.EngineData.Item.Peer.ParticipantCount.init))
)
|> map { participantCountMap -> ([EnginePeer], [EnginePeer.Id: Optional<Int>]) in
return (peers, participantCountMap)
}
}
|> deliverOnMainQueue).start(next: { [weak self] peers, participantCounts in
guard let self else {
return
}
var participants: [EnginePeer.Id: Int] = [:]
for (key, value) in participantCounts {
if let value {
participants[key] = value
}
}
let state = State(
sendAsPeers: peers,
peers: [],
peersMap: [:],
savedSelectedPeers: [:],
presences: [:],
participants: participants,
closeFriendsPeers: [],
grayListPeers: []
)
self.stateValue = state
self.stateSubject.set(.single(state))
for peer in peers {
if case let .channel(channel) = peer, participants[channel.id] == nil {
let _ = context.engine.peers.fetchAndUpdateCachedPeerData(peerId: channel.id).start()
}
}
self.readySubject.set(true)
self.readySubject.set(true)
})
case .stories:
let savedEveryoneExceptionPeers = peersListStoredState(engine: context.engine, base: .everyone)
let savedContactsExceptionPeers = peersListStoredState(engine: context.engine, base: .contacts)
@ -2742,20 +2773,38 @@ public class ShareWithPeersScreen: ViewControllerComponentContainer {
)
}
}
let adminedChannelsWithParticipants = adminedChannels
|> mapToSignal { peers -> Signal<([EnginePeer], [EnginePeer.Id: Optional<Int>]), NoError> in
return context.engine.data.subscribe(
EngineDataMap(peers.map(\.id).map(TelegramEngine.EngineData.Item.Peer.ParticipantCount.init))
)
|> map { participantCountMap -> ([EnginePeer], [EnginePeer.Id: Optional<Int>]) in
return (peers, participantCountMap)
}
}
self.stateDisposable = combineLatest(
queue: Queue.mainQueue(),
context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: context.account.peerId)),
adminedChannels,
adminedChannelsWithParticipants,
savedPeers,
closeFriends,
grayListPeers
)
.start(next: { [weak self] accountPeer, adminedChannels, savedPeers, closeFriends, grayListPeers in
.start(next: { [weak self] accountPeer, adminedChannelsWithParticipants, savedPeers, closeFriends, grayListPeers in
guard let self else {
return
}
let (adminedChannels, participantCounts) = adminedChannelsWithParticipants
var participants: [EnginePeer.Id: Int] = [:]
for (key, value) in participantCounts {
if let value {
participants[key] = value
}
}
var sendAsPeers: [EnginePeer] = []
if let accountPeer {
sendAsPeers.append(accountPeer)
@ -2773,12 +2822,18 @@ public class ShareWithPeersScreen: ViewControllerComponentContainer {
peersMap: peersMap,
savedSelectedPeers: savedSelectedPeers,
presences: [:],
participants: [:],
participants: participants,
closeFriendsPeers: closeFriends,
grayListPeers: grayListPeers
)
self.stateValue = state
self.stateSubject.set(.single(state))
for peer in adminedChannels {
if case let .channel(channel) = peer, participants[channel.id] == nil {
let _ = context.engine.peers.fetchAndUpdateCachedPeerData(peerId: channel.id).start()
}
}
self.readySubject.set(true)
})

View File

@ -269,7 +269,7 @@ private final class VolumeSliderContextItemNode: ASDisplayNode, ContextMenuCusto
@objc private func tapGesture(_ gestureRecognizer: UITapGestureRecognizer) {
let location = gestureRecognizer.location(in: gestureRecognizer.view)
self.value = max(self.minValue, min(2.0, location.x / self.bounds.width * 2.0))
self.value = max(self.minValue, min(1.0, location.x / self.bounds.width))
self.valueChanged(self.value, true)
}

View File

@ -101,6 +101,7 @@ import ChatAvatarNavigationNode
import ChatContextQuery
import PeerReportScreen
import PeerSelectionController
import SaveToCameraRoll
#if DEBUG
import os.signpost
@ -317,6 +318,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
private var sendAsPeersDisposable: Disposable?
private var preloadAttachBotIconsDisposables: DisposableSet?
private var keepMessageCountersSyncrhonizedDisposable: Disposable?
private var saveMediaDisposable: MetaDisposable?
private let editingMessage = ValuePromise<Float?>(nil, ignoreRepeated: true)
private let startingBot = ValuePromise<Bool>(false, ignoreRepeated: true)
@ -4510,6 +4512,113 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
}
}
self.push(controller)
}, saveMediaToFiles: { [weak self] messageId in
let _ = (context.engine.data.get(TelegramEngine.EngineData.Item.Messages.Message(id: messageId))
|> deliverOnMainQueue).start(next: { message in
guard let self, let message else {
return
}
var file: TelegramMediaFile?
for media in message.media {
if let mediaFile = media as? TelegramMediaFile, mediaFile.isMusic {
file = mediaFile
}
}
guard let file else {
return
}
var signal = fetchMediaData(context: context, postbox: context.account.postbox, userLocation: .other, mediaReference: .message(message: MessageReference(message._asMessage()), media: file))
let disposable: MetaDisposable
if let current = self.saveMediaDisposable {
disposable = current
} else {
disposable = MetaDisposable()
self.saveMediaDisposable = disposable
}
var cancelImpl: (() -> Void)?
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
let progressSignal = Signal<Never, NoError> { [weak self] subscriber in
guard let self else {
return EmptyDisposable
}
let controller = OverlayStatusController(theme: presentationData.theme, type: .loading(cancelled: {
cancelImpl?()
}))
self.present(controller, in: .window(.root), with: ViewControllerPresentationArguments(presentationAnimation: .modalSheet))
return ActionDisposable { [weak controller] in
Queue.mainQueue().async() {
controller?.dismiss()
}
}
}
|> runOn(Queue.mainQueue())
|> delay(0.15, queue: Queue.mainQueue())
let progressDisposable = progressSignal.start()
signal = signal
|> afterDisposed {
Queue.mainQueue().async {
progressDisposable.dispose()
}
}
cancelImpl = { [weak disposable] in
disposable?.set(nil)
}
disposable.set((signal
|> deliverOnMainQueue).start(next: { [weak self] state, _ in
guard let self else {
return
}
switch state {
case .progress:
break
case let .data(data):
if data.complete {
var symlinkPath = data.path + ".mp3"
if fileSize(symlinkPath) != nil {
try? FileManager.default.removeItem(atPath: symlinkPath)
}
let _ = try? FileManager.default.linkItem(atPath: data.path, toPath: symlinkPath)
let audioUrl = URL(fileURLWithPath: symlinkPath)
let audioAsset = AVURLAsset(url: audioUrl)
var nameComponents: [String] = []
var artist: String?
var title: String?
for data in audioAsset.commonMetadata {
if data.commonKey == .commonKeyArtist {
artist = data.stringValue
}
if data.commonKey == .commonKeyTitle {
title = data.stringValue
}
}
if let artist, !artist.isEmpty {
nameComponents.append(artist)
}
if let title, !title.isEmpty {
nameComponents.append(title)
}
if !nameComponents.isEmpty {
try? FileManager.default.removeItem(atPath: symlinkPath)
let filename = "\(nameComponents.joined(separator: " ")).mp3"
symlinkPath = symlinkPath.replacingOccurrences(of: audioUrl.lastPathComponent, with: filename)
let _ = try? FileManager.default.linkItem(atPath: data.path, toPath: symlinkPath)
}
let url = URL(fileURLWithPath: symlinkPath)
let controller = legacyICloudFilePicker(theme: self.presentationData.theme, mode: .export, url: url, documentTypes: [], forceDarkTheme: false, dismissed: {}, completion: { _ in
})
self.present(controller, in: .window(.root))
}
}
}))
})
}, requestMessageUpdate: { [weak self] id, scroll in
if let self {
self.chatDisplayNode.historyNode.requestMessageUpdate(id, andScrollToItem: scroll)
@ -6385,6 +6494,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
self.translationStateDisposable?.dispose()
self.premiumGiftSuggestionDisposable?.dispose()
self.powerSavingMonitoringDisposable?.dispose()
self.saveMediaDisposable?.dispose()
}
deallocate()
}

View File

@ -1171,6 +1171,20 @@ func contextMenuForChatPresentationInterfaceState(chatPresentationInterfaceState
}
}
for media in message.media {
if let file = media as? TelegramMediaFile {
if file.isMusic {
actions.append(.action(ContextMenuActionItem(text: "Save to Files", icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Save"), color: theme.actionSheet.primaryTextColor)
}, action: { _, f in
controllerInteraction.saveMediaToFiles(message.id)
f(.default)
})))
}
break
}
}
if (loggingSettings.logToFile || loggingSettings.logToConsole) && !downloadableMediaResourceInfos.isEmpty {
actions.append(.action(ContextMenuActionItem(text: "Send Logs", icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Message"), color: theme.actionSheet.primaryTextColor)

View File

@ -556,6 +556,7 @@ final class ChatRecentActionsControllerNode: ViewControllerTracingNode {
}, openWebView: { _, _, _, _ in
}, activateAdAction: { _ in
}, openRequestedPeerSelection: { _, _, _ in
}, saveMediaToFiles: { _ in
}, requestMessageUpdate: { _, _ in
}, cancelInteractiveKeyboardGestures: {
}, dismissTextInput: {
@ -1065,6 +1066,8 @@ final class ChatRecentActionsControllerNode: ViewControllerTracingNode {
break
case .startAttach:
break
case .boost:
break
}
}
}))

View File

@ -165,6 +165,7 @@ final class OverlayAudioPlayerControllerNode: ViewControllerTracingNode, UIGestu
}, openWebView: { _, _, _, _ in
}, activateAdAction: { _ in
}, openRequestedPeerSelection: { _, _, _ in
}, saveMediaToFiles: { _ in
}, requestMessageUpdate: { _, _ in
}, cancelInteractiveKeyboardGestures: {
}, dismissTextInput: {

View File

@ -2854,6 +2854,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
}, openWebView: { _, _, _, _ in
}, activateAdAction: { _ in
}, openRequestedPeerSelection: { _, _, _ in
}, saveMediaToFiles: { _ in
}, requestMessageUpdate: { _, _ in
}, cancelInteractiveKeyboardGestures: {
}, dismissTextInput: {

View File

@ -1537,6 +1537,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
}, openWebView: { _, _, _, _ in
}, activateAdAction: { _ in
}, openRequestedPeerSelection: { _, _, _ in
}, saveMediaToFiles: { _ in
}, requestMessageUpdate: { _, _ in
}, cancelInteractiveKeyboardGestures: {
}, dismissTextInput: {