Merge branch 'experiments/metal-background'

# Conflicts:
#	submodules/TelegramCore/Sources/ApiUtils/TelegramMediaAction.swift
#	submodules/TelegramUI/BUILD
This commit is contained in:
Ali 2022-05-14 00:10:29 +04:00
commit e96d8557dd
41 changed files with 1364 additions and 83 deletions

View File

@ -1798,6 +1798,8 @@ plist_fragment(
<string>We need this so that you can share photos and videos from your photo library.</string> <string>We need this so that you can share photos and videos from your photo library.</string>
<key>NSSiriUsageDescription</key> <key>NSSiriUsageDescription</key>
<string>You can use Siri to send messages.</string> <string>You can use Siri to send messages.</string>
<key>NSSpeechRecognitionUsageDescription</key>
<string>We need this to transcribe audio messages on your request.</string>
<key>NSUserActivityTypes</key> <key>NSUserActivityTypes</key>
<array> <array>
<string>INSendMessageIntent</string> <string>INSendMessageIntent</string>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.3 KiB

After

Width:  |  Height:  |  Size: 9.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.1 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

@ -661,7 +661,7 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
"Arrow1.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor, "Arrow1.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor,
"Arrow2.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor, "Arrow2.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor,
], ],
loop: true mode: .animating(loop: true)
) )
progressValue = progress progressValue = progress
@ -682,7 +682,7 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
"Arrow1.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor, "Arrow1.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor,
"Arrow2.Union.Fill 1": strongSelf.presentationData.theme.rootController.navigationSearchBar.inputFillColor.blitOver(strongSelf.presentationData.theme.rootController.navigationBar.opaqueBackgroundColor, alpha: 1.0), "Arrow2.Union.Fill 1": strongSelf.presentationData.theme.rootController.navigationSearchBar.inputFillColor.blitOver(strongSelf.presentationData.theme.rootController.navigationBar.opaqueBackgroundColor, alpha: 1.0),
], ],
loop: false mode: .animating(loop: false)
) )
progressValue = 1.0 progressValue = 1.0

View File

@ -20,7 +20,7 @@ public final class BundleIconComponent: Component {
if lhs.tintColor != rhs.tintColor { if lhs.tintColor != rhs.tintColor {
return false return false
} }
return false return true
} }
public final class View: UIImageView { public final class View: UIImageView {

View File

@ -6,16 +6,20 @@ import HierarchyTrackingLayer
public final class LottieAnimationComponent: Component { public final class LottieAnimationComponent: Component {
public struct Animation: Equatable { public struct Animation: Equatable {
public enum Mode: Equatable {
case still
case animating(loop: Bool)
case animateTransitionFromPrevious
}
public var name: String public var name: String
public var loop: Bool public var mode: Mode
public var isAnimating: Bool
public var colors: [String: UIColor] public var colors: [String: UIColor]
public init(name: String, colors: [String: UIColor], loop: Bool, isAnimating: Bool = true) { public init(name: String, colors: [String: UIColor], mode: Mode) {
self.name = name self.name = name
self.colors = colors self.colors = colors
self.loop = loop self.mode = mode
self.isAnimating = isAnimating
} }
} }
@ -55,6 +59,7 @@ public final class LottieAnimationComponent: Component {
private var colorCallbacks: [LOTColorValueCallback] = [] private var colorCallbacks: [LOTColorValueCallback] = []
private var animationView: LOTAnimationView? private var animationView: LOTAnimationView?
private var didPlayToCompletion: Bool = false
private let hierarchyTrackingLayer: HierarchyTrackingLayer private let hierarchyTrackingLayer: HierarchyTrackingLayer
@ -100,12 +105,22 @@ public final class LottieAnimationComponent: Component {
} }
func update(component: LottieAnimationComponent, availableSize: CGSize, transition: Transition) -> CGSize { func update(component: LottieAnimationComponent, availableSize: CGSize, transition: Transition) -> CGSize {
var updatePlayback = false
if self.component?.animation != component.animation { if self.component?.animation != component.animation {
if let animationView = self.animationView {
if case .animateTransitionFromPrevious = component.animation.mode, !animationView.isAnimationPlaying, !self.didPlayToCompletion {
animationView.play { _ in
}
}
}
if let animationView = self.animationView, animationView.isAnimationPlaying { if let animationView = self.animationView, animationView.isAnimationPlaying {
animationView.completionBlock = { [weak self] _ in animationView.completionBlock = { [weak self] _ in
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
strongSelf.didPlayToCompletion = true
let _ = strongSelf.update(component: component, availableSize: availableSize, transition: transition) let _ = strongSelf.update(component: component, availableSize: availableSize, transition: transition)
} }
animationView.loopAnimation = false animationView.loopAnimation = false
@ -113,14 +128,22 @@ public final class LottieAnimationComponent: Component {
self.component = component self.component = component
self.animationView?.removeFromSuperview() self.animationView?.removeFromSuperview()
self.didPlayToCompletion = false
if let url = getAppBundle().url(forResource: component.animation.name, withExtension: "json"), let composition = LOTComposition(filePath: url.path) { if let url = getAppBundle().url(forResource: component.animation.name, withExtension: "json"), let composition = LOTComposition(filePath: url.path) {
let view = LOTAnimationView(model: composition, in: getAppBundle()) let view = LOTAnimationView(model: composition, in: getAppBundle())
view.loopAnimation = component.animation.loop switch component.animation.mode {
case .still, .animateTransitionFromPrevious:
view.loopAnimation = false
case let .animating(loop):
view.loopAnimation = loop
}
view.animationSpeed = 1.0 view.animationSpeed = 1.0
view.backgroundColor = .clear view.backgroundColor = .clear
view.isOpaque = false view.isOpaque = false
//view.logHierarchyKeypaths()
for (key, value) in component.animation.colors { for (key, value) in component.animation.colors {
let colorCallback = LOTColorValueCallback(color: value.cgColor) let colorCallback = LOTColorValueCallback(color: value.cgColor)
self.colorCallbacks.append(colorCallback) self.colorCallbacks.append(colorCallback)
@ -129,6 +152,8 @@ public final class LottieAnimationComponent: Component {
self.animationView = view self.animationView = view
self.addSubview(view) self.addSubview(view)
updatePlayback = true
} }
} }
} }
@ -146,7 +171,8 @@ public final class LottieAnimationComponent: Component {
if let animationView = self.animationView { if let animationView = self.animationView {
animationView.frame = CGRect(origin: CGPoint(x: floor((size.width - animationSize.width) / 2.0), y: floor((size.height - animationSize.height) / 2.0)), size: animationSize) animationView.frame = CGRect(origin: CGPoint(x: floor((size.width - animationSize.width) / 2.0), y: floor((size.height - animationSize.height) / 2.0)), size: animationSize)
if component.animation.isAnimating { if updatePlayback {
if case .animating = component.animation.mode {
if !animationView.isAnimationPlaying { if !animationView.isAnimationPlaying {
animationView.play { _ in animationView.play { _ in
} }
@ -157,6 +183,7 @@ public final class LottieAnimationComponent: Component {
} }
} }
} }
}
return size return size
} }

View File

@ -8,7 +8,15 @@ public final class NullActionClass: NSObject, CAAction {
public let nullAction = NullActionClass() public let nullAction = NullActionClass()
open class SimpleLayer: CALayer { open class SimpleLayer: CALayer {
public var didEnterHierarchy: (() -> Void)?
public var didExitHierarchy: (() -> Void)?
override open func action(forKey event: String) -> CAAction? { override open func action(forKey event: String) -> CAAction? {
if event == kCAOnOrderIn {
self.didEnterHierarchy?()
} else if event == kCAOnOrderOut {
self.didExitHierarchy?()
}
return nullAction return nullAction
} }
@ -26,7 +34,15 @@ open class SimpleLayer: CALayer {
} }
open class SimpleShapeLayer: CAShapeLayer { open class SimpleShapeLayer: CAShapeLayer {
public var didEnterHierarchy: (() -> Void)?
public var didExitHierarchy: (() -> Void)?
override open func action(forKey event: String) -> CAAction? { override open func action(forKey event: String) -> CAAction? {
if event == kCAOnOrderIn {
self.didEnterHierarchy?()
} else if event == kCAOnOrderOut {
self.didExitHierarchy?()
}
return nullAction return nullAction
} }

View File

@ -191,7 +191,9 @@ public class ItemListCheckboxItemNode: ItemListRevealOptionsItemNode {
case .left: case .left:
leftInset += 62.0 leftInset += 62.0
case .right: case .right:
leftInset += 0.0 if item.icon == nil {
leftInset += 16.0
}
} }
let iconInset: CGFloat = 62.0 let iconInset: CGFloat = 62.0

View File

@ -0,0 +1,20 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "ConvertOpusToAAC",
module_name = "ConvertOpusToAAC",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
"//submodules/FFMpegBinding:FFMpegBinding",
"//submodules/MediaPlayer:UniversalMediaPlayer",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,69 @@
import Foundation
import UniversalMediaPlayer
import AVFoundation
import SwiftSignalKit
public func convertOpusToAAC(sourcePath: String, allocateTempFile: @escaping () -> String) -> Signal<String?, NoError> {
return Signal { subscriber in
var isCancelled = false
let queue = Queue()
queue.async {
do {
let audioSource = SoftwareAudioSource(path: sourcePath)
let outputPath = allocateTempFile()
let assetWriter = try AVAssetWriter(outputURL: URL(fileURLWithPath: outputPath), fileType: .m4a)
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size)
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono
let outputSettings: [String: Any] = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 48000,
AVEncoderBitRateKey: 96000,
AVNumberOfChannelsKey: 1,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout<AudioChannelLayout>.size)
]
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings)
assetWriter.add(audioInput)
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero)
let finishWriting: () -> Void = {
assetWriter.finishWriting(completionHandler: {
subscriber.putNext(outputPath)
subscriber.putCompletion()
})
}
audioInput.requestMediaDataWhenReady(on: queue.queue, using: {
if audioInput.isReadyForMoreMediaData {
if !isCancelled, let sampleBuffer = audioSource.readSampleBuffer() {
if !audioInput.append(sampleBuffer) {
audioInput.markAsFinished()
finishWriting()
return
}
} else {
audioInput.markAsFinished()
finishWriting()
}
}
})
} catch let e {
print("Error: \(e)")
subscriber.putNext(nil)
subscriber.putCompletion()
}
}
return ActionDisposable {
isCancelled = true
}
}
}

View File

@ -0,0 +1,18 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "LocalAudioTranscription",
module_name = "LocalAudioTranscription",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,73 @@
import Foundation
import SwiftSignalKit
import Speech
private var sharedRecognizer: Any?
public func transcribeAudio(path: String) -> Signal<String?, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
if #available(iOS 13.0, *) {
SFSpeechRecognizer.requestAuthorization { (status) in
switch status {
case .notDetermined:
subscriber.putNext(nil)
subscriber.putCompletion()
case .restricted:
subscriber.putNext(nil)
subscriber.putCompletion()
case .denied:
subscriber.putNext(nil)
subscriber.putCompletion()
case .authorized:
let speechRecognizer: SFSpeechRecognizer
if let sharedRecognizer = sharedRecognizer as? SFSpeechRecognizer {
speechRecognizer = sharedRecognizer
} else {
guard let speechRecognizerValue = SFSpeechRecognizer(locale: Locale(identifier: "ru-RU")), speechRecognizerValue.isAvailable else {
subscriber.putNext(nil)
subscriber.putCompletion()
return
}
speechRecognizerValue.defaultTaskHint = .unspecified
sharedRecognizer = speechRecognizerValue
speechRecognizer = speechRecognizerValue
speechRecognizer.supportsOnDeviceRecognition = false
}
let request = SFSpeechURLRecognitionRequest(url: URL(fileURLWithPath: path))
request.requiresOnDeviceRecognition = speechRecognizer.supportsOnDeviceRecognition
request.shouldReportPartialResults = false
let task = speechRecognizer.recognitionTask(with: request, resultHandler: { result, error in
if let result = result {
subscriber.putNext(result.bestTranscription.formattedString)
subscriber.putCompletion()
} else {
print("transcribeAudio: \(String(describing: error))")
subscriber.putNext(nil)
subscriber.putCompletion()
}
})
disposable.set(ActionDisposable {
task.cancel()
})
@unknown default:
subscriber.putNext(nil)
subscriber.putCompletion()
}
}
} else {
subscriber.putNext(nil)
subscriber.putCompletion()
}
return disposable
}
|> runOn(.mainQueue())
}

View File

@ -9,6 +9,8 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
private let audioFrame: FFMpegAVFrame private let audioFrame: FFMpegAVFrame
private var resetDecoderOnNextFrame = true private var resetDecoderOnNextFrame = true
private let formatDescription: CMAudioFormatDescription
private var delayedFrames: [MediaTrackFrame] = [] private var delayedFrames: [MediaTrackFrame] = []
init(codecContext: FFMpegAVCodecContext, sampleRate: Int = 44100, channelCount: Int = 2) { init(codecContext: FFMpegAVCodecContext, sampleRate: Int = 44100, channelCount: Int = 2) {
@ -16,6 +18,27 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
self.audioFrame = FFMpegAVFrame() self.audioFrame = FFMpegAVFrame()
self.swrContext = FFMpegSWResample(sourceChannelCount: Int(codecContext.channels()), sourceSampleRate: Int(codecContext.sampleRate()), sourceSampleFormat: codecContext.sampleFormat(), destinationChannelCount: channelCount, destinationSampleRate: sampleRate, destinationSampleFormat: FFMPEG_AV_SAMPLE_FMT_S16) self.swrContext = FFMpegSWResample(sourceChannelCount: Int(codecContext.channels()), sourceSampleRate: Int(codecContext.sampleRate()), sourceSampleFormat: codecContext.sampleFormat(), destinationChannelCount: channelCount, destinationSampleRate: sampleRate, destinationSampleFormat: FFMPEG_AV_SAMPLE_FMT_S16)
var outputDescription = AudioStreamBasicDescription(
mSampleRate: Float64(sampleRate),
mFormatID: kAudioFormatLinearPCM,
mFormatFlags: kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked,
mBytesPerPacket: UInt32(2 * channelCount),
mFramesPerPacket: 1,
mBytesPerFrame: UInt32(2 * channelCount),
mChannelsPerFrame: UInt32(channelCount),
mBitsPerChannel: 16,
mReserved: 0
)
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size)
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono
var formatDescription: CMAudioFormatDescription?
CMAudioFormatDescriptionCreate(allocator: nil, asbd: &outputDescription, layoutSize: MemoryLayout<AudioChannelLayout>.size, layout: &channelLayout, magicCookieSize: 0, magicCookie: nil, extensions: nil, formatDescriptionOut: &formatDescription)
self.formatDescription = formatDescription!
} }
func decodeRaw(frame: MediaTrackDecodableFrame) -> Data? { func decodeRaw(frame: MediaTrackDecodableFrame) -> Data? {
@ -112,13 +135,18 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
return nil return nil
} }
var timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: pts, decodeTimeStamp: pts) //var timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: pts, decodeTimeStamp: pts)
var sampleBuffer: CMSampleBuffer? var sampleBuffer: CMSampleBuffer?
var sampleSize = data.count //var sampleSize = data.count
guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: nil, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
guard CMAudioSampleBufferCreateReadyWithPacketDescriptions(allocator: nil, dataBuffer: blockBuffer!, formatDescription: self.formatDescription, sampleCount: Int(data.count / 2), presentationTimeStamp: pts, packetDescriptions: nil, sampleBufferOut: &sampleBuffer) == noErr else {
return nil return nil
} }
/*guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self.formatDescription, sampleCount: Int(frame.duration), sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
return nil
}*/
let resetDecoder = self.resetDecoderOnNextFrame let resetDecoder = self.resetDecoderOnNextFrame
self.resetDecoderOnNextFrame = false self.resetDecoderOnNextFrame = false

View File

@ -710,6 +710,10 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
} }
} }
public func update(size: CGSize, animator: ControlledTransitionAnimator) {
self.updateProgressAnimations(animator: animator)
}
public func updateColors(backgroundColor: UIColor, foregroundColor: UIColor) { public func updateColors(backgroundColor: UIColor, foregroundColor: UIColor) {
switch self.contentNodes { switch self.contentNodes {
case let .standard(node): case let .standard(node):
@ -736,8 +740,8 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
} }
} }
private func updateProgressAnimations() { private func updateProgressAnimations(animator: ControlledTransitionAnimator? = nil) {
self.updateProgress() self.updateProgress(animator: animator)
let needsAnimation: Bool let needsAnimation: Bool
@ -794,7 +798,7 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
}) })
} }
private func updateProgress() { private func updateProgress(animator: ControlledTransitionAnimator? = nil) {
let bounds = self.bounds let bounds = self.bounds
var isPlaying = false var isPlaying = false
@ -832,10 +836,11 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
node.containerNode.frame = CGRect(origin: CGPoint(), size: bounds.size) node.containerNode.frame = CGRect(origin: CGPoint(), size: bounds.size)
let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: floor((bounds.size.height - node.lineHeight) / 2.0)), size: CGSize(width: bounds.size.width, height: node.lineHeight)) let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: floor((bounds.size.height - node.lineHeight) / 2.0)), size: CGSize(width: bounds.size.width, height: node.lineHeight))
let foregroundContentFrame = CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height))
node.backgroundNode.position = backgroundFrame.center node.backgroundNode.position = backgroundFrame.center
node.backgroundNode.bounds = CGRect(origin: CGPoint(), size: backgroundFrame.size) node.backgroundNode.bounds = CGRect(origin: CGPoint(), size: backgroundFrame.size)
let foregroundContentFrame = CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height))
node.foregroundContentNode.position = foregroundContentFrame.center node.foregroundContentNode.position = foregroundContentFrame.center
node.foregroundContentNode.bounds = CGRect(origin: CGPoint(), size: foregroundContentFrame.size) node.foregroundContentNode.bounds = CGRect(origin: CGPoint(), size: foregroundContentFrame.size)
@ -963,8 +968,14 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
} }
let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: bounds.size.width, height: bounds.size.height)) let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: bounds.size.width, height: bounds.size.height))
if let animator = animator {
animator.updateFrame(layer: node.backgroundNode.layer, frame: backgroundFrame, completion: nil)
animator.updateFrame(layer: node.foregroundContentNode.layer, frame: CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height)), completion: nil)
} else {
node.backgroundNode.frame = backgroundFrame node.backgroundNode.frame = backgroundFrame
node.foregroundContentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height)) node.foregroundContentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height))
}
let timestampAndDuration: (timestamp: Double, duration: Double)? let timestampAndDuration: (timestamp: Double, duration: Double)?
if let statusValue = self.statusValue, Double(0.0).isLess(than: statusValue.duration) { if let statusValue = self.statusValue, Double(0.0).isLess(than: statusValue.duration) {

View File

@ -446,6 +446,19 @@ public final class SoftwareAudioSource {
} }
} }
public func readSampleBuffer() -> CMSampleBuffer? {
guard let audioStream = self.audioStream, let _ = self.avFormatContext else {
return nil
}
let (decodableFrame, _) = self.readDecodableFrame()
if let decodableFrame = decodableFrame {
return audioStream.decoder.decode(frame: decodableFrame)?.sampleBuffer
} else {
return nil
}
}
public func readEncodedFrame() -> (Data, Int)? { public func readEncodedFrame() -> (Data, Int)? {
guard let _ = self.audioStream, let _ = self.avFormatContext else { guard let _ = self.audioStream, let _ = self.avFormatContext else {
return nil return nil

View File

@ -147,10 +147,11 @@
id parsedMessage = [MTInternalMessageParser parseMessage:rpcResultMessage.data]; id parsedMessage = [MTInternalMessageParser parseMessage:rpcResultMessage.data];
if ([parsedMessage isKindOfClass:[MTRpcError class]]) { if ([parsedMessage isKindOfClass:[MTRpcError class]]) {
if (MTLogEnabled()) {
MTRpcError *rpcError = (MTRpcError *)parsedMessage; MTRpcError *rpcError = (MTRpcError *)parsedMessage;
if (MTLogEnabled()) {
MTLog(@"[MTRequestMessageService#%p response for %" PRId64 " is error: %d: %@]", self, _currentMessageId, (int)rpcError.errorCode, rpcError.errorDescription); MTLog(@"[MTRequestMessageService#%p response for %" PRId64 " is error: %d: %@]", self, _currentMessageId, (int)rpcError.errorCode, rpcError.errorDescription);
} }
MTShortLog(@"[MTRequestMessageService#%p response for %" PRId64 " is error: %d: %@]", self, _currentMessageId, (int)rpcError.errorCode, rpcError.errorDescription);
} }
//boolTrue#997275b5 = Bool; //boolTrue#997275b5 = Bool;

View File

@ -178,6 +178,8 @@ final class ShimmerEffectForegroundNode: ASDisplayNode {
private var absoluteLocation: (CGRect, CGSize)? private var absoluteLocation: (CGRect, CGSize)?
private var isCurrentlyInHierarchy = false private var isCurrentlyInHierarchy = false
private var shouldBeAnimating = false private var shouldBeAnimating = false
private var globalTimeOffset = true
private var duration: Double?
override init() { override init() {
self.imageNodeContainer = ASDisplayNode() self.imageNodeContainer = ASDisplayNode()
@ -212,17 +214,19 @@ final class ShimmerEffectForegroundNode: ASDisplayNode {
self.updateAnimation() self.updateAnimation()
} }
func update(backgroundColor: UIColor, foregroundColor: UIColor, horizontal: Bool = false) { func update(backgroundColor: UIColor, foregroundColor: UIColor, horizontal: Bool, effectSize: CGFloat?, globalTimeOffset: Bool, duration: Double?) {
if let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.isEqual(backgroundColor), let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.isEqual(foregroundColor), self.currentHorizontal == horizontal { if let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.isEqual(backgroundColor), let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.isEqual(foregroundColor), self.currentHorizontal == horizontal {
return return
} }
self.currentBackgroundColor = backgroundColor self.currentBackgroundColor = backgroundColor
self.currentForegroundColor = foregroundColor self.currentForegroundColor = foregroundColor
self.currentHorizontal = horizontal self.currentHorizontal = horizontal
self.globalTimeOffset = globalTimeOffset
self.duration = duration
let image: UIImage? let image: UIImage?
if horizontal { if horizontal {
image = generateImage(CGSize(width: 320.0, height: 16.0), opaque: false, scale: 1.0, rotatedContext: { size, context in image = generateImage(CGSize(width: effectSize ?? 320.0, height: 16.0), opaque: false, scale: 1.0, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size)) context.clear(CGRect(origin: CGPoint(), size: size))
context.setFillColor(backgroundColor.cgColor) context.setFillColor(backgroundColor.cgColor)
context.fill(CGRect(origin: CGPoint(), size: size)) context.fill(CGRect(origin: CGPoint(), size: size))
@ -304,18 +308,22 @@ final class ShimmerEffectForegroundNode: ASDisplayNode {
} }
if horizontal { if horizontal {
let gradientHeight: CGFloat = 320.0 let gradientHeight: CGFloat = self.imageNode.image?.size.width ?? 320.0
self.imageNode.frame = CGRect(origin: CGPoint(x: -gradientHeight, y: 0.0), size: CGSize(width: gradientHeight, height: containerSize.height)) self.imageNode.frame = CGRect(origin: CGPoint(x: -gradientHeight, y: 0.0), size: CGSize(width: gradientHeight, height: containerSize.height))
let animation = self.imageNode.layer.makeAnimation(from: 0.0 as NSNumber, to: (containerSize.width + gradientHeight) as NSNumber, keyPath: "position.x", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: 1.3 * 1.0, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true) let animation = self.imageNode.layer.makeAnimation(from: 0.0 as NSNumber, to: (containerSize.width + gradientHeight) as NSNumber, keyPath: "position.x", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: duration ?? 1.3, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true)
animation.repeatCount = Float.infinity animation.repeatCount = Float.infinity
if self.globalTimeOffset {
animation.beginTime = 1.0 animation.beginTime = 1.0
}
self.imageNode.layer.add(animation, forKey: "shimmer") self.imageNode.layer.add(animation, forKey: "shimmer")
} else { } else {
let gradientHeight: CGFloat = 250.0 let gradientHeight: CGFloat = 250.0
self.imageNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -gradientHeight), size: CGSize(width: containerSize.width, height: gradientHeight)) self.imageNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -gradientHeight), size: CGSize(width: containerSize.width, height: gradientHeight))
let animation = self.imageNode.layer.makeAnimation(from: 0.0 as NSNumber, to: (containerSize.height + gradientHeight) as NSNumber, keyPath: "position.y", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: 1.3 * 1.0, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true) let animation = self.imageNode.layer.makeAnimation(from: 0.0 as NSNumber, to: (containerSize.height + gradientHeight) as NSNumber, keyPath: "position.y", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: duration ?? 1.3, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true)
animation.repeatCount = Float.infinity animation.repeatCount = Float.infinity
if self.globalTimeOffset {
animation.beginTime = 1.0 animation.beginTime = 1.0
}
self.imageNode.layer.add(animation, forKey: "shimmer") self.imageNode.layer.add(animation, forKey: "shimmer")
} }
} }
@ -339,6 +347,7 @@ public final class ShimmerEffectNode: ASDisplayNode {
private var currentForegroundColor: UIColor? private var currentForegroundColor: UIColor?
private var currentShimmeringColor: UIColor? private var currentShimmeringColor: UIColor?
private var currentHorizontal: Bool? private var currentHorizontal: Bool?
private var currentEffectSize: CGFloat?
private var currentSize = CGSize() private var currentSize = CGSize()
override public init() { override public init() {
@ -361,8 +370,8 @@ public final class ShimmerEffectNode: ASDisplayNode {
self.effectNode.updateAbsoluteRect(rect, within: containerSize) self.effectNode.updateAbsoluteRect(rect, within: containerSize)
} }
public func update(backgroundColor: UIColor, foregroundColor: UIColor, shimmeringColor: UIColor, shapes: [Shape], horizontal: Bool = false, size: CGSize) { public func update(backgroundColor: UIColor, foregroundColor: UIColor, shimmeringColor: UIColor, shapes: [Shape], horizontal: Bool = false, effectSize: CGFloat? = nil, globalTimeOffset: Bool = true, duration: Double? = nil, size: CGSize) {
if self.currentShapes == shapes, let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.isEqual(backgroundColor), let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.isEqual(foregroundColor), let currentShimmeringColor = self.currentShimmeringColor, currentShimmeringColor.isEqual(shimmeringColor), horizontal == self.currentHorizontal, self.currentSize == size { if self.currentShapes == shapes, let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.isEqual(backgroundColor), let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.isEqual(foregroundColor), let currentShimmeringColor = self.currentShimmeringColor, currentShimmeringColor.isEqual(shimmeringColor), horizontal == self.currentHorizontal, effectSize == self.currentEffectSize, self.currentSize == size {
return return
} }
@ -375,7 +384,7 @@ public final class ShimmerEffectNode: ASDisplayNode {
self.backgroundNode.backgroundColor = foregroundColor self.backgroundNode.backgroundColor = foregroundColor
self.effectNode.update(backgroundColor: foregroundColor, foregroundColor: shimmeringColor, horizontal: horizontal) self.effectNode.update(backgroundColor: foregroundColor, foregroundColor: shimmeringColor, horizontal: horizontal, effectSize: effectSize, globalTimeOffset: globalTimeOffset, duration: duration)
self.foregroundNode.image = generateImage(size, rotatedContext: { size, context in self.foregroundNode.image = generateImage(size, rotatedContext: { size, context in
context.setFillColor(backgroundColor.cgColor) context.setFillColor(backgroundColor.cgColor)

View File

@ -81,7 +81,7 @@ public class StickerShimmerEffectNode: ASDisplayNode {
self.backgroundNode.backgroundColor = foregroundColor self.backgroundNode.backgroundColor = foregroundColor
self.effectNode.update(backgroundColor: backgroundColor == nil ? .clear : foregroundColor, foregroundColor: shimmeringColor, horizontal: true) self.effectNode.update(backgroundColor: backgroundColor == nil ? .clear : foregroundColor, foregroundColor: shimmeringColor, horizontal: true, effectSize: nil, globalTimeOffset: true, duration: nil)
let bounds = CGRect(origin: CGPoint(), size: size) let bounds = CGRect(origin: CGPoint(), size: size)
let image = generateImage(size, rotatedContext: { size, context in let image = generateImage(size, rotatedContext: { size, context in

View File

@ -808,8 +808,7 @@ public final class MediaStreamComponent: CombinedComponent {
"Point 3.Group 1.Fill 1": whiteColor, "Point 3.Group 1.Fill 1": whiteColor,
"Point 1.Group 1.Fill 1": whiteColor "Point 1.Group 1.Fill 1": whiteColor
], ],
loop: false, mode: .still
isAnimating: false
), ),
size: CGSize(width: 22.0, height: 22.0) size: CGSize(width: 22.0, height: 22.0)
).tagged(moreAnimationTag))), ).tagged(moreAnimationTag))),

View File

@ -40,8 +40,8 @@ func telegramMediaActionFromApiAction(_ action: Api.MessageAction) -> TelegramMe
return TelegramMediaAction(action: .phoneCall(callId: callId, discardReason: discardReason, duration: duration, isVideo: isVideo)) return TelegramMediaAction(action: .phoneCall(callId: callId, discardReason: discardReason, duration: duration, isVideo: isVideo))
case .messageActionEmpty: case .messageActionEmpty:
return nil return nil
case let .messageActionPaymentSent(_, currency, totalAmount, _): case let .messageActionPaymentSent(_, currency, totalAmount, invoiceSlug):
return TelegramMediaAction(action: .paymentSent(currency: currency, totalAmount: totalAmount)) return TelegramMediaAction(action: .paymentSent(currency: currency, totalAmount: totalAmount, invoiceSlug: invoiceSlug))
case .messageActionPaymentSentMe: case .messageActionPaymentSentMe:
return nil return nil
case .messageActionScreenshotTaken: case .messageActionScreenshotTaken:

View File

@ -39,7 +39,7 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
case messageAutoremoveTimeoutUpdated(Int32) case messageAutoremoveTimeoutUpdated(Int32)
case gameScore(gameId: Int64, score: Int32) case gameScore(gameId: Int64, score: Int32)
case phoneCall(callId: Int64, discardReason: PhoneCallDiscardReason?, duration: Int32?, isVideo: Bool) case phoneCall(callId: Int64, discardReason: PhoneCallDiscardReason?, duration: Int32?, isVideo: Bool)
case paymentSent(currency: String, totalAmount: Int64) case paymentSent(currency: String, totalAmount: Int64, invoiceSlug: String?)
case customText(text: String, entities: [MessageTextEntity]) case customText(text: String, entities: [MessageTextEntity])
case botDomainAccessGranted(domain: String) case botDomainAccessGranted(domain: String)
case botSentSecureValues(types: [SentSecureValueType]) case botSentSecureValues(types: [SentSecureValueType])
@ -88,7 +88,7 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
} }
self = .phoneCall(callId: decoder.decodeInt64ForKey("i", orElse: 0), discardReason: discardReason, duration: decoder.decodeInt32ForKey("d", orElse: 0), isVideo: decoder.decodeInt32ForKey("vc", orElse: 0) != 0) self = .phoneCall(callId: decoder.decodeInt64ForKey("i", orElse: 0), discardReason: discardReason, duration: decoder.decodeInt32ForKey("d", orElse: 0), isVideo: decoder.decodeInt32ForKey("vc", orElse: 0) != 0)
case 15: case 15:
self = .paymentSent(currency: decoder.decodeStringForKey("currency", orElse: ""), totalAmount: decoder.decodeInt64ForKey("ta", orElse: 0)) self = .paymentSent(currency: decoder.decodeStringForKey("currency", orElse: ""), totalAmount: decoder.decodeInt64ForKey("ta", orElse: 0), invoiceSlug: decoder.decodeOptionalStringForKey("invoiceSlug"))
case 16: case 16:
self = .customText(text: decoder.decodeStringForKey("text", orElse: ""), entities: decoder.decodeObjectArrayWithDecoderForKey("ent")) self = .customText(text: decoder.decodeStringForKey("text", orElse: ""), entities: decoder.decodeObjectArrayWithDecoderForKey("ent"))
case 17: case 17:
@ -172,10 +172,15 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
encoder.encodeInt32(13, forKey: "_rawValue") encoder.encodeInt32(13, forKey: "_rawValue")
encoder.encodeInt64(gameId, forKey: "i") encoder.encodeInt64(gameId, forKey: "i")
encoder.encodeInt32(score, forKey: "s") encoder.encodeInt32(score, forKey: "s")
case let .paymentSent(currency, totalAmount): case let .paymentSent(currency, totalAmount, invoiceSlug):
encoder.encodeInt32(15, forKey: "_rawValue") encoder.encodeInt32(15, forKey: "_rawValue")
encoder.encodeString(currency, forKey: "currency") encoder.encodeString(currency, forKey: "currency")
encoder.encodeInt64(totalAmount, forKey: "ta") encoder.encodeInt64(totalAmount, forKey: "ta")
if let invoiceSlug = invoiceSlug {
encoder.encodeString(invoiceSlug, forKey: "invoiceSlug")
} else {
encoder.encodeNil(forKey: "invoiceSlug")
}
case let .phoneCall(callId, discardReason, duration, isVideo): case let .phoneCall(callId, discardReason, duration, isVideo):
encoder.encodeInt32(14, forKey: "_rawValue") encoder.encodeInt32(14, forKey: "_rawValue")
encoder.encodeInt64(callId, forKey: "i") encoder.encodeInt64(callId, forKey: "i")

View File

@ -97,7 +97,11 @@ private class AdMessagesHistoryContextImpl {
self.opaqueId = try container.decode(Data.self, forKey: .opaqueId) self.opaqueId = try container.decode(Data.self, forKey: .opaqueId)
self.messageType = (try container.decodeIfPresent(MessageType.self, forKey: .messageType)) ?? .sponsored if let messageType = try container.decodeIfPresent(Int32.self, forKey: .messageType) {
self.messageType = MessageType(rawValue: messageType) ?? .sponsored
} else {
self.messageType = .sponsored
}
self.text = try container.decode(String.self, forKey: .text) self.text = try container.decode(String.self, forKey: .text)
self.textEntities = try container.decode([MessageTextEntity].self, forKey: .textEntities) self.textEntities = try container.decode([MessageTextEntity].self, forKey: .textEntities)
@ -116,7 +120,7 @@ private class AdMessagesHistoryContextImpl {
var container = encoder.container(keyedBy: CodingKeys.self) var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(self.opaqueId, forKey: .opaqueId) try container.encode(self.opaqueId, forKey: .opaqueId)
try container.encode(self.messageType, forKey: .messageType) try container.encode(self.messageType.rawValue, forKey: .messageType)
try container.encode(self.text, forKey: .text) try container.encode(self.text, forKey: .text)
try container.encode(self.textEntities, forKey: .textEntities) try container.encode(self.textEntities, forKey: .textEntities)

View File

@ -322,6 +322,10 @@ public extension TelegramEngine {
return _internal_translate(network: self.account.network, text: text, fromLang: fromLang, toLang: toLang) return _internal_translate(network: self.account.network, text: text, fromLang: fromLang, toLang: toLang)
} }
public func transcribeAudio(messageId: MessageId) -> Signal<String?, NoError> {
return _internal_transcribeAudio(postbox: self.account.postbox, network: self.account.network, messageId: messageId)
}
public func requestWebView(peerId: PeerId, botId: PeerId, url: String?, payload: String?, themeParams: [String: Any]?, fromMenu: Bool, replyToMessageId: MessageId?) -> Signal<RequestWebViewResult, RequestWebViewError> { public func requestWebView(peerId: PeerId, botId: PeerId, url: String?, payload: String?, themeParams: [String: Any]?, fromMenu: Bool, replyToMessageId: MessageId?) -> Signal<RequestWebViewResult, RequestWebViewError> {
return _internal_requestWebView(postbox: self.account.postbox, network: self.account.network, stateManager: self.account.stateManager, peerId: peerId, botId: botId, url: url, payload: payload, themeParams: themeParams, fromMenu: fromMenu, replyToMessageId: replyToMessageId) return _internal_requestWebView(postbox: self.account.postbox, network: self.account.network, stateManager: self.account.stateManager, peerId: peerId, botId: botId, url: url, payload: payload, themeParams: themeParams, fromMenu: fromMenu, replyToMessageId: replyToMessageId)
} }

View File

@ -28,3 +28,28 @@ func _internal_translate(network: Network, text: String, fromLang: String?, toLa
} }
} }
} }
func _internal_transcribeAudio(postbox: Postbox, network: Network, messageId: MessageId) -> Signal<String?, NoError> {
return postbox.transaction { transaction -> Api.InputPeer? in
return transaction.getPeer(messageId.peerId).flatMap(apiInputPeer)
}
|> mapToSignal { inputPeer -> Signal<String?, NoError> in
guard let inputPeer = inputPeer else {
return .single(nil)
}
return network.request(Api.functions.messages.transcribeAudio(peer: inputPeer, msgId: messageId.id))
|> map(Optional.init)
|> `catch` { _ -> Signal<Api.messages.TranscribedAudio?, NoError> in
return .single(nil)
}
|> mapToSignal { result -> Signal<String?, NoError> in
guard let result = result else {
return .single(nil)
}
switch result {
case let .transcribedAudio(string):
return .single(string)
}
}
}
}

View File

@ -470,17 +470,23 @@ func _internal_sendBotPaymentForm(account: Account, formId: Int64, source: BotPa
for media in message.media { for media in message.media {
if let action = media as? TelegramMediaAction { if let action = media as? TelegramMediaAction {
if case .paymentSent = action.action { if case .paymentSent = action.action {
switch source {
case let .slug(slug):
for media in message.media {
if let action = media as? TelegramMediaAction, case let .paymentSent(_, _, invoiceSlug?) = action.action, invoiceSlug == slug {
if case let .Id(id) = message.id {
receiptMessageId = id
}
}
}
case let .message(messageId):
for attribute in message.attributes { for attribute in message.attributes {
if let reply = attribute as? ReplyMessageAttribute { if let reply = attribute as? ReplyMessageAttribute {
switch source {
case let .message(messageId):
if reply.messageId == messageId { if reply.messageId == messageId {
if case let .Id(id) = message.id { if case let .Id(id) = message.id {
receiptMessageId = id receiptMessageId = id
} }
} }
case .slug:
break
} }
} }
} }

View File

@ -437,7 +437,7 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
var argumentAttributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id)]) var argumentAttributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id)])
argumentAttributes[1] = MarkdownAttributeSet(font: titleBoldFont, textColor: primaryTextColor, additionalAttributes: [:]) argumentAttributes[1] = MarkdownAttributeSet(font: titleBoldFont, textColor: primaryTextColor, additionalAttributes: [:])
attributedString = addAttributesToStringWithRanges(formatWithArgumentRanges(baseString, ranges, [authorName, gameTitle ?? ""]), body: bodyAttributes, argumentAttributes: argumentAttributes) attributedString = addAttributesToStringWithRanges(formatWithArgumentRanges(baseString, ranges, [authorName, gameTitle ?? ""]), body: bodyAttributes, argumentAttributes: argumentAttributes)
case let .paymentSent(currency, totalAmount): case let .paymentSent(currency, totalAmount, _):
var invoiceMessage: EngineMessage? var invoiceMessage: EngineMessage?
for attribute in message.attributes { for attribute in message.attributes {
if let attribute = attribute as? ReplyMessageAttribute, let message = message.associatedMessages[attribute.messageId] { if let attribute = attribute as? ReplyMessageAttribute, let message = message.associatedMessages[attribute.messageId] {

View File

@ -271,6 +271,10 @@ swift_library(
"//submodules/Components/HierarchyTrackingLayer:HierarchyTrackingLayer", "//submodules/Components/HierarchyTrackingLayer:HierarchyTrackingLayer",
"//submodules/Utils/RangeSet:RangeSet", "//submodules/Utils/RangeSet:RangeSet",
"//submodules/InAppPurchaseManager:InAppPurchaseManager", "//submodules/InAppPurchaseManager:InAppPurchaseManager",
"//submodules/TelegramUI/Components/AudioTranscriptionButtonComponent:AudioTranscriptionButtonComponent",
"//submodules/TelegramUI/Components/AudioWaveformComponent:AudioWaveformComponent",
"//submodules/Media/ConvertOpusToAAC:ConvertOpusToAAC",
"//submodules/Media/LocalAudioTranscription:LocalAudioTranscription",
] + select({ ] + select({
"@build_bazel_rules_apple//apple:ios_armv7": [], "@build_bazel_rules_apple//apple:ios_armv7": [],
"@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets, "@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets,

View File

@ -0,0 +1,22 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "AudioTranscriptionButtonComponent",
module_name = "AudioTranscriptionButtonComponent",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/ComponentFlow:ComponentFlow",
"//submodules/AppBundle:AppBundle",
"//submodules/Display:Display",
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/Components/LottieAnimationComponent:LottieAnimationComponent",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,188 @@
import Foundation
import UIKit
import ComponentFlow
import AppBundle
import Display
import TelegramPresentationData
import LottieAnimationComponent
public final class AudioTranscriptionButtonComponent: Component {
public enum TranscriptionState {
case possible
case inProgress
case expanded
case collapsed
}
public let theme: PresentationThemePartedColors
public let transcriptionState: TranscriptionState
public let pressed: () -> Void
public init(
theme: PresentationThemePartedColors,
transcriptionState: TranscriptionState,
pressed: @escaping () -> Void
) {
self.theme = theme
self.transcriptionState = transcriptionState
self.pressed = pressed
}
public static func ==(lhs: AudioTranscriptionButtonComponent, rhs: AudioTranscriptionButtonComponent) -> Bool {
if lhs.theme !== rhs.theme {
return false
}
if lhs.transcriptionState != rhs.transcriptionState {
return false
}
return true
}
public final class View: UIButton {
private var component: AudioTranscriptionButtonComponent?
private let backgroundLayer: SimpleLayer
private var inProgressLayer: SimpleShapeLayer?
private let animationView: ComponentHostView<Empty>
override init(frame: CGRect) {
self.backgroundLayer = SimpleLayer()
self.animationView = ComponentHostView<Empty>()
self.animationView.isUserInteractionEnabled = false
super.init(frame: frame)
self.backgroundLayer.masksToBounds = true
self.backgroundLayer.cornerRadius = 10.0
self.layer.addSublayer(self.backgroundLayer)
self.addSubview(self.animationView)
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
@objc private func pressed() {
self.component?.pressed()
}
func update(component: AudioTranscriptionButtonComponent, availableSize: CGSize, transition: Transition) -> CGSize {
let size = CGSize(width: 30.0, height: 30.0)
let foregroundColor = component.theme.bubble.withWallpaper.reactionActiveBackground
if self.component?.transcriptionState != component.transcriptionState {
switch component.transcriptionState {
case .inProgress:
if self.inProgressLayer == nil {
let inProgressLayer = SimpleShapeLayer()
inProgressLayer.isOpaque = false
inProgressLayer.backgroundColor = nil
inProgressLayer.fillColor = nil
inProgressLayer.lineCap = .round
inProgressLayer.lineWidth = 1.0
let path = UIBezierPath(roundedRect: CGRect(origin: CGPoint(), size: CGSize(width: 30.0, height: 30.0)), cornerRadius: 9.0).cgPath
inProgressLayer.path = path
self.inProgressLayer = inProgressLayer
inProgressLayer.didEnterHierarchy = { [weak inProgressLayer] in
guard let inProgressLayer = inProgressLayer else {
return
}
let endAnimation = CABasicAnimation(keyPath: "strokeEnd")
endAnimation.fromValue = CGFloat(0.0) as NSNumber
endAnimation.toValue = CGFloat(1.0) as NSNumber
endAnimation.duration = 1.25
endAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
endAnimation.fillMode = .forwards
endAnimation.repeatCount = .infinity
inProgressLayer.add(endAnimation, forKey: "strokeEnd")
let startAnimation = CABasicAnimation(keyPath: "strokeStart")
startAnimation.fromValue = CGFloat(0.0) as NSNumber
startAnimation.toValue = CGFloat(1.0) as NSNumber
startAnimation.duration = 1.25
startAnimation.timingFunction = CAMediaTimingFunction(name: .easeIn)
startAnimation.fillMode = .forwards
startAnimation.repeatCount = .infinity
inProgressLayer.add(startAnimation, forKey: "strokeStart")
}
self.layer.addSublayer(inProgressLayer)
}
default:
if let inProgressLayer = self.inProgressLayer {
self.inProgressLayer = nil
if case .none = transition.animation {
inProgressLayer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false, completion: { [weak inProgressLayer] _ in
inProgressLayer?.removeFromSuperlayer()
})
} else {
inProgressLayer.removeFromSuperlayer()
}
}
}
let animationName: String
switch component.transcriptionState {
case .possible:
animationName = "voiceToText"
case .inProgress:
animationName = "voiceToText"
case .collapsed:
animationName = "voiceToText"
case .expanded:
animationName = "textToVoice"
}
let animationSize = self.animationView.update(
transition: transition,
component: AnyComponent(LottieAnimationComponent(
animation: LottieAnimationComponent.Animation(
name: animationName,
colors: [
"icon.Group 3.Stroke 1": foregroundColor,
"icon.Group 1.Stroke 1": foregroundColor,
"icon.Group 4.Stroke 1": foregroundColor,
"icon.Group 2.Stroke 1": foregroundColor,
"Artboard Copy 2 Outlines.Group 5.Stroke 1": foregroundColor,
"Artboard Copy 2 Outlines.Group 1.Stroke 1": foregroundColor,
"Artboard Copy 2 Outlines.Group 4.Stroke 1": foregroundColor,
"Artboard Copy Outlines.Group 1.Stroke 1": foregroundColor,
],
mode: .animateTransitionFromPrevious
),
size: CGSize(width: 30.0, height: 30.0)
)),
environment: {},
containerSize: CGSize(width: 30.0, height: 30.0)
)
self.animationView.frame = CGRect(origin: CGPoint(x: floor((size.width - animationSize.width) / 2.0), y: floor((size.width - animationSize.height) / 2.0)), size: animationSize)
}
self.backgroundLayer.backgroundColor = component.theme.bubble.withWallpaper.reactionInactiveBackground.cgColor
self.inProgressLayer?.strokeColor = foregroundColor.cgColor
self.component = component
self.backgroundLayer.frame = CGRect(origin: CGPoint(), size: size)
if let inProgressLayer = self.inProgressLayer {
inProgressLayer.frame = CGRect(origin: CGPoint(), size: size)
}
return CGSize(width: min(availableSize.width, size.width), height: min(availableSize.height, size.height))
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}

View File

@ -0,0 +1,20 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "AudioWaveformComponent",
module_name = "AudioWaveformComponent",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/ComponentFlow:ComponentFlow",
"//submodules/AppBundle:AppBundle",
"//submodules/Display:Display",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,63 @@
import Foundation
import UIKit
import ComponentFlow
import Display
public final class AudioWaveformComponent: Component {
public let backgroundColor: UIColor
public let foregroundColor: UIColor
public let samples: Data
public let peak: Int32
public init(
backgroundColor: UIColor,
foregroundColor: UIColor,
samples: Data,
peak: Int32
) {
self.backgroundColor = backgroundColor
self.foregroundColor = foregroundColor
self.samples = samples
self.peak = peak
}
public static func ==(lhs: AudioWaveformComponent, rhs: AudioWaveformComponent) -> Bool {
if lhs.backgroundColor !== rhs.backgroundColor {
return false
}
if lhs.foregroundColor != rhs.foregroundColor {
return false
}
if lhs.samples != rhs.samples {
return false
}
if lhs.peak != rhs.peak {
return false
}
return true
}
public final class View: UIView {
private var component: AudioWaveformComponent?
override init(frame: CGRect) {
super.init(frame: frame)
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func update(component: AudioWaveformComponent, availableSize: CGSize, transition: Transition) -> CGSize {
return CGSize(width: availableSize.width, height: availableSize.height)
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
{"v":"5.8.1","fr":60,"ip":0,"op":20,"w":300,"h":300,"nm":"Comp 7","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"icon","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[0]},{"t":19,"s":[-90]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":0,"s":[150,150,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":12,"s":[150,178,0],"to":[0,0,0],"ti":[0,0,0]},{"t":19,"s":[150,150,0]}],"ix":2,"l":2},"a":{"a":0,"k":[150,150,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[176.7,163.3],[220,163.3]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[0]},{"t":12,"s":[100]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0.592156862745,0.592156862745,0.592156862745,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13.3,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0],[-2.606,-0.912],[-0.503,-1.436],[0,0]],"o":[[0,0],[0.912,-2.606],[1.436,0.503],[0,0],[0,0]],"v":[[-35,45.248],[-4.719,-41.268],[1.652,-44.336],[4.719,-41.268],[35,45.248]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[0]},{"t":14,"s":[100]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0.592156862745,0.592156862745,0.592156862745,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13.3,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[198.3,144.752],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":3,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[-17.5,35],[17.5,0],[-17.5,-35]],"c":false}]},{"t":19,"s":[{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[4.2,60],[64.2,0],[4.2,-60]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[0.592156862745,0.592156862745,0.592156862745,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[13.3]},{"t":19,"s":[16.7]}],"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[115.8,150],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 3","np":2,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":0,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[63.3,150],[130,150]],"c":false}]},{"t":19,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[93.3,150],[160,150]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[0]},{"t":12,"s":[100]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0.592156862745,0.592156862745,0.592156862745,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13.3,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 4","np":3,"cix":2,"bm":0,"ix":4,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":20,"st":0,"bm":0}],"markers":[]}

View File

@ -583,6 +583,7 @@ final class ChatMessageAttachedContentNode: ASDisplayNode {
dateAndStatusType: statusType, dateAndStatusType: statusType,
displayReactions: false, displayReactions: false,
messageSelection: nil, messageSelection: nil,
layoutConstants: layoutConstants,
constrainedSize: CGSize(width: constrainedSize.width - horizontalInsets.left - horizontalInsets.right, height: constrainedSize.height) constrainedSize: CGSize(width: constrainedSize.width - horizontalInsets.left - horizontalInsets.right, height: constrainedSize.height)
)) ))
refineContentFileLayout = refineLayout refineContentFileLayout = refineLayout

View File

@ -135,6 +135,7 @@ class ChatMessageFileBubbleContentNode: ChatMessageBubbleContentNode {
dateAndStatusType: statusType, dateAndStatusType: statusType,
displayReactions: true, displayReactions: true,
messageSelection: item.message.groupingKey != nil ? selection : nil, messageSelection: item.message.groupingKey != nil ? selection : nil,
layoutConstants: layoutConstants,
constrainedSize: CGSize(width: constrainedSize.width - layoutConstants.file.bubbleInsets.left - layoutConstants.file.bubbleInsets.right, height: constrainedSize.height) constrainedSize: CGSize(width: constrainedSize.width - layoutConstants.file.bubbleInsets.left - layoutConstants.file.bubbleInsets.right, height: constrainedSize.height)
)) ))

View File

@ -18,6 +18,12 @@ import MusicAlbumArtResources
import AudioBlob import AudioBlob
import ContextUI import ContextUI
import ChatPresentationInterfaceState import ChatPresentationInterfaceState
import ComponentFlow
import AudioTranscriptionButtonComponent
import AudioWaveformComponent
import ShimmerEffect
import ConvertOpusToAAC
import LocalAudioTranscription
private struct FetchControls { private struct FetchControls {
let fetch: (Bool) -> Void let fetch: (Bool) -> Void
@ -43,6 +49,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let dateAndStatusType: ChatMessageDateAndStatusType? let dateAndStatusType: ChatMessageDateAndStatusType?
let displayReactions: Bool let displayReactions: Bool
let messageSelection: Bool? let messageSelection: Bool?
let layoutConstants: ChatMessageItemLayoutConstants
let constrainedSize: CGSize let constrainedSize: CGSize
init( init(
@ -63,6 +70,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
dateAndStatusType: ChatMessageDateAndStatusType?, dateAndStatusType: ChatMessageDateAndStatusType?,
displayReactions: Bool, displayReactions: Bool,
messageSelection: Bool?, messageSelection: Bool?,
layoutConstants: ChatMessageItemLayoutConstants,
constrainedSize: CGSize constrainedSize: CGSize
) { ) {
self.context = context self.context = context
@ -82,6 +90,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.dateAndStatusType = dateAndStatusType self.dateAndStatusType = dateAndStatusType
self.displayReactions = displayReactions self.displayReactions = displayReactions
self.messageSelection = messageSelection self.messageSelection = messageSelection
self.layoutConstants = layoutConstants
self.constrainedSize = constrainedSize self.constrainedSize = constrainedSize
} }
} }
@ -95,7 +104,11 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
private let fetchingCompactTextNode: ImmediateTextNode private let fetchingCompactTextNode: ImmediateTextNode
private let waveformNode: AudioWaveformNode private let waveformNode: AudioWaveformNode
private let waveformForegroundNode: AudioWaveformNode private let waveformForegroundNode: AudioWaveformNode
private var waveformShimmerNode: ShimmerEffectNode?
private var waveformMaskNode: AudioWaveformNode?
private var waveformScrubbingNode: MediaPlayerScrubbingNode? private var waveformScrubbingNode: MediaPlayerScrubbingNode?
private var audioTranscriptionButton: ComponentHostView<Empty>?
private let textNode: TextNode
let dateAndStatusNode: ChatMessageDateAndStatusNode let dateAndStatusNode: ChatMessageDateAndStatusNode
private let consumableContentNode: ASImageNode private let consumableContentNode: ASImageNode
@ -157,6 +170,10 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
private var streamingCacheStatusFrame: CGRect? private var streamingCacheStatusFrame: CGRect?
private var fileIconImage: UIImage? private var fileIconImage: UIImage?
private var audioTranscriptionState: AudioTranscriptionButtonComponent.TranscriptionState = .possible
private var transcribedText: String?
private var transcribeDisposable: Disposable?
override init() { override init() {
self.titleNode = TextNode() self.titleNode = TextNode()
self.titleNode.displaysAsynchronously = false self.titleNode.displaysAsynchronously = false
@ -189,6 +206,10 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.waveformForegroundNode = AudioWaveformNode() self.waveformForegroundNode = AudioWaveformNode()
self.waveformForegroundNode.isLayerBacked = true self.waveformForegroundNode.isLayerBacked = true
self.textNode = TextNode()
self.textNode.displaysAsynchronously = false
self.textNode.isUserInteractionEnabled = false
self.dateAndStatusNode = ChatMessageDateAndStatusNode() self.dateAndStatusNode = ChatMessageDateAndStatusNode()
self.consumableContentNode = ASImageNode() self.consumableContentNode = ASImageNode()
@ -209,6 +230,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.playbackStatusDisposable.dispose() self.playbackStatusDisposable.dispose()
self.fetchDisposable.dispose() self.fetchDisposable.dispose()
self.audioLevelEventsDisposable.dispose() self.audioLevelEventsDisposable.dispose()
self.transcribeDisposable?.dispose()
} }
override func didLoad() { override func didLoad() {
@ -275,15 +297,98 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
} }
} }
private func transcribe() {
guard let context = self.context, let message = self.message else {
return
}
if self.transcribedText == nil {
if self.transcribeDisposable == nil {
self.audioTranscriptionState = .inProgress
self.requestUpdateLayout(true)
if !"".isEmpty {
let signal: Signal<String?, NoError> = context.account.postbox.transaction { transaction -> Message? in
return transaction.getMessage(message.id)
}
|> mapToSignal { message -> Signal<String?, NoError> in
guard let message = message else {
return .single(nil)
}
guard let file = message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile else {
return .single(nil)
}
return context.account.postbox.mediaBox.resourceData(id: file.resource.id)
|> take(1)
|> mapToSignal { data -> Signal<String?, NoError> in
if !data.complete {
return .single(nil)
}
return .single(data.path)
}
}
|> mapToSignal { result -> Signal<String?, NoError> in
guard let result = result else {
return .single(nil)
}
return convertOpusToAAC(sourcePath: result, allocateTempFile: {
return TempBox.shared.tempFile(fileName: "audio.m4a").path
})
}
|> mapToSignal { result -> Signal<String?, NoError> in
guard let result = result else {
return .single(nil)
}
return transcribeAudio(path: result)
}
let _ = signal.start(next: { [weak self] result in
guard let strongSelf = self else {
return
}
strongSelf.transcribeDisposable = nil
strongSelf.audioTranscriptionState = .expanded
strongSelf.transcribedText = result
strongSelf.requestUpdateLayout(true)
})
} else {
self.transcribeDisposable = (context.engine.messages.transcribeAudio(messageId: message.id)
|> deliverOnMainQueue).start(next: { [weak self] result in
guard let strongSelf = self else {
return
}
strongSelf.transcribeDisposable = nil
strongSelf.audioTranscriptionState = .expanded
strongSelf.transcribedText = result
strongSelf.requestUpdateLayout(true)
})
}
}
} else {
switch self.audioTranscriptionState {
case .expanded:
self.audioTranscriptionState = .collapsed
self.requestUpdateLayout(true)
case .collapsed:
self.audioTranscriptionState = .expanded
self.requestUpdateLayout(true)
default:
break
}
}
}
func asyncLayout() -> (Arguments) -> (CGFloat, (CGSize) -> (CGFloat, (CGFloat) -> (CGSize, (Bool, ListViewItemUpdateAnimation) -> Void))) { func asyncLayout() -> (Arguments) -> (CGFloat, (CGSize) -> (CGFloat, (CGFloat) -> (CGSize, (Bool, ListViewItemUpdateAnimation) -> Void))) {
let currentFile = self.file let currentFile = self.file
let titleAsyncLayout = TextNode.asyncLayout(self.titleNode) let titleAsyncLayout = TextNode.asyncLayout(self.titleNode)
let descriptionAsyncLayout = TextNode.asyncLayout(self.descriptionNode) let descriptionAsyncLayout = TextNode.asyncLayout(self.descriptionNode)
let descriptionMeasuringAsyncLayout = TextNode.asyncLayout(self.descriptionMeasuringNode) let descriptionMeasuringAsyncLayout = TextNode.asyncLayout(self.descriptionMeasuringNode)
let textAsyncLayout = TextNode.asyncLayout(self.textNode)
let statusLayout = self.dateAndStatusNode.asyncLayout() let statusLayout = self.dateAndStatusNode.asyncLayout()
let currentMessage = self.message let currentMessage = self.message
let transcribedText = self.transcribedText
let audioTranscriptionState = self.audioTranscriptionState
return { arguments in return { arguments in
return (CGFloat.greatestFiniteMagnitude, { constrainedSize in return (CGFloat.greatestFiniteMagnitude, { constrainedSize in
@ -453,6 +558,17 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let (descriptionMeasuringLayout, descriptionMeasuringApply) = descriptionMeasuringAsyncLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "\(fileSizeString) / \(fileSizeString)", font: descriptionFont, textColor: .black), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .middle, constrainedSize: textConstrainedSize, alignment: .natural, cutout: nil, insets: UIEdgeInsets())) let (descriptionMeasuringLayout, descriptionMeasuringApply) = descriptionMeasuringAsyncLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "\(fileSizeString) / \(fileSizeString)", font: descriptionFont, textColor: .black), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .middle, constrainedSize: textConstrainedSize, alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let descriptionMaxWidth = max(descriptionLayout.size.width, descriptionMeasuringLayout.size.width) let descriptionMaxWidth = max(descriptionLayout.size.width, descriptionMeasuringLayout.size.width)
let textFont = arguments.presentationData.messageFont
let textString: NSAttributedString?
if let transcribedText = transcribedText, case .expanded = audioTranscriptionState {
textString = NSAttributedString(string: transcribedText, font: textFont, textColor: messageTheme.primaryTextColor)
} else {
textString = nil
}
let horizontalInset: CGFloat = (arguments.layoutConstants.bubble.edgeInset + arguments.layoutConstants.bubble.borderInset) * 2.0
let inlineTextConstrainedSize = CGSize(width: constrainedSize.width, height: constrainedSize.height)
let (textLayout, textApply) = textAsyncLayout(TextNodeLayoutArguments(attributedString: textString, backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: inlineTextConstrainedSize.width - horizontalInset, height: .greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let minVoiceWidth: CGFloat = 120.0 let minVoiceWidth: CGFloat = 120.0
let maxVoiceWidth = constrainedSize.width let maxVoiceWidth = constrainedSize.width
@ -517,6 +633,13 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
reactionSettings = ChatMessageDateAndStatusNode.TrailingReactionSettings(displayInline: displayReactionsInline, preferAdditionalInset: !displayReactionsInline) reactionSettings = ChatMessageDateAndStatusNode.TrailingReactionSettings(displayInline: displayReactionsInline, preferAdditionalInset: !displayReactionsInline)
} }
let statusLayoutInput: ChatMessageDateAndStatusNode.LayoutInput
if let _ = textString {
statusLayoutInput = .trailingContent(contentWidth: textLayout.trailingLineWidth, reactionSettings: reactionSettings)
} else {
statusLayoutInput = .trailingContent(contentWidth: iconFrame == nil ? 1000.0 : controlAreaWidth, reactionSettings: reactionSettings)
}
statusSuggestedWidthAndContinue = statusLayout(ChatMessageDateAndStatusNode.Arguments( statusSuggestedWidthAndContinue = statusLayout(ChatMessageDateAndStatusNode.Arguments(
context: arguments.context, context: arguments.context,
presentationData: arguments.presentationData, presentationData: arguments.presentationData,
@ -524,7 +647,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
impressionCount: viewCount, impressionCount: viewCount,
dateText: dateText, dateText: dateText,
type: statusType, type: statusType,
layoutInput: .trailingContent(contentWidth: iconFrame == nil ? 1000.0 : controlAreaWidth, reactionSettings: reactionSettings), layoutInput: statusLayoutInput,
constrainedSize: constrainedSize, constrainedSize: constrainedSize,
availableReactions: arguments.associatedData.availableReactions, availableReactions: arguments.associatedData.availableReactions,
reactions: dateReactionsAndPeers.reactions, reactions: dateReactionsAndPeers.reactions,
@ -543,7 +666,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let descriptionAndStatusWidth = descriptionLayout.size.width let descriptionAndStatusWidth = descriptionLayout.size.width
let calcDuration = max(minVoiceLength, min(maxVoiceLength, CGFloat(audioDuration))) let calcDuration = max(minVoiceLength, min(maxVoiceLength, CGFloat(audioDuration)))
minLayoutWidth = minVoiceWidth + (maxVoiceWidth - minVoiceWidth) * (calcDuration - minVoiceLength) / (maxVoiceLength - minVoiceLength) minLayoutWidth = 30.0 + 8.0 + minVoiceWidth + (maxVoiceWidth - minVoiceWidth) * (calcDuration - minVoiceLength) / (maxVoiceLength - minVoiceLength)
minLayoutWidth = max(descriptionAndStatusWidth + 56, minLayoutWidth) minLayoutWidth = max(descriptionAndStatusWidth + 56, minLayoutWidth)
} else { } else {
minLayoutWidth = max(titleLayout.size.width, descriptionMaxWidth) + 44.0 + 8.0 minLayoutWidth = max(titleLayout.size.width, descriptionMaxWidth) + 44.0 + 8.0
@ -553,6 +676,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
minLayoutWidth = max(minLayoutWidth, statusSuggestedWidthAndContinue.0) minLayoutWidth = max(minLayoutWidth, statusSuggestedWidthAndContinue.0)
} }
minLayoutWidth = max(minLayoutWidth, textLayout.size.width + horizontalInset)
let fileIconImage: UIImage? let fileIconImage: UIImage?
if hasThumbnail { if hasThumbnail {
fileIconImage = nil fileIconImage = nil
@ -591,6 +716,11 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
fittedLayoutSize = CGSize(width: unionSize.width, height: unionSize.height) fittedLayoutSize = CGSize(width: unionSize.width, height: unionSize.height)
} }
if textString != nil {
fittedLayoutSize.width = max(fittedLayoutSize.width + horizontalInset, textLayout.size.width)
fittedLayoutSize.height += textLayout.size.height + 5.0
}
var statusSizeAndApply: (CGSize, (ListViewItemUpdateAnimation) -> Void)? var statusSizeAndApply: (CGSize, (ListViewItemUpdateAnimation) -> Void)?
if let statusSuggestedWidthAndContinue = statusSuggestedWidthAndContinue { if let statusSuggestedWidthAndContinue = statusSuggestedWidthAndContinue {
statusSizeAndApply = statusSuggestedWidthAndContinue.1(boundingWidth) statusSizeAndApply = statusSuggestedWidthAndContinue.1(boundingWidth)
@ -645,8 +775,41 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
} else { } else {
statusReferenceFrame = progressFrame.offsetBy(dx: 0.0, dy: 8.0) statusReferenceFrame = progressFrame.offsetBy(dx: 0.0, dy: 8.0)
} }
if textString == nil, strongSelf.textNode.supernode != nil, animation.isAnimated {
if let snapshotView = strongSelf.textNode.view.snapshotContentTree() {
snapshotView.frame = strongSelf.textNode.frame
strongSelf.view.insertSubview(snapshotView, aboveSubview: strongSelf.textNode.view)
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in
snapshotView?.removeFromSuperview()
})
}
}
let _ = textApply()
let textFrame = CGRect(origin: CGPoint(x: arguments.layoutConstants.text.bubbleInsets.left - arguments.layoutConstants.file.bubbleInsets.left, y: statusReferenceFrame.maxY + 1.0), size: textLayout.size)
strongSelf.textNode.frame = textFrame
if textString != nil {
if strongSelf.textNode.supernode == nil {
strongSelf.addSubnode(strongSelf.textNode)
if animation.isAnimated {
strongSelf.textNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
} else {
if strongSelf.textNode.supernode != nil {
strongSelf.textNode.removeFromSupernode()
}
}
if let statusSizeAndApply = statusSizeAndApply { if let statusSizeAndApply = statusSizeAndApply {
let statusFrame = CGRect(origin: CGPoint(x: statusReferenceFrame.minX, y: statusReferenceFrame.maxY + statusOffset), size: statusSizeAndApply.0) let statusFrame: CGRect
if textString != nil {
statusFrame = CGRect(origin: CGPoint(x: fittedLayoutSize.width - 5.0 - statusSizeAndApply.0.width, y: textFrame.maxY + 4.0), size: statusSizeAndApply.0)
} else {
statusFrame = CGRect(origin: CGPoint(x: statusReferenceFrame.minX, y: statusReferenceFrame.maxY + statusOffset), size: statusSizeAndApply.0)
}
if strongSelf.dateAndStatusNode.supernode == nil { if strongSelf.dateAndStatusNode.supernode == nil {
strongSelf.dateAndStatusNode.frame = statusFrame strongSelf.dateAndStatusNode.frame = statusFrame
strongSelf.addSubnode(strongSelf.dateAndStatusNode) strongSelf.addSubnode(strongSelf.dateAndStatusNode)
@ -671,7 +834,60 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
strongSelf.waveformScrubbingNode = waveformScrubbingNode strongSelf.waveformScrubbingNode = waveformScrubbingNode
strongSelf.addSubnode(waveformScrubbingNode) strongSelf.addSubnode(waveformScrubbingNode)
} }
strongSelf.waveformScrubbingNode?.frame = CGRect(origin: CGPoint(x: 57.0, y: 1.0), size: CGSize(width: boundingWidth - 60.0, height: 15.0))
let scrubbingFrame = CGRect(origin: CGPoint(x: 57.0, y: 1.0), size: CGSize(width: boundingWidth - 60.0 - 30.0 - 8.0, height: 15.0))
if case .inProgress = audioTranscriptionState {
if strongSelf.waveformShimmerNode == nil {
let waveformShimmerNode = ShimmerEffectNode()
strongSelf.waveformShimmerNode = waveformShimmerNode
strongSelf.addSubnode(waveformShimmerNode)
let waveformMaskNode = AudioWaveformNode()
strongSelf.waveformMaskNode = waveformMaskNode
waveformShimmerNode.view.mask = waveformMaskNode.view
}
if let audioWaveform = audioWaveform, let waveformShimmerNode = strongSelf.waveformShimmerNode, let waveformMaskNode = strongSelf.waveformMaskNode {
waveformShimmerNode.frame = scrubbingFrame
waveformShimmerNode.updateAbsoluteRect(scrubbingFrame, within: CGSize(width: scrubbingFrame.size.width + 60.0, height: scrubbingFrame.size.height + 4.0))
var shapes: [ShimmerEffectNode.Shape] = []
shapes.append(.rect(rect: CGRect(origin: CGPoint(), size: scrubbingFrame.size)))
waveformShimmerNode.update(
backgroundColor: .blue,
foregroundColor: messageTheme.mediaInactiveControlColor,
shimmeringColor: messageTheme.mediaActiveControlColor,
shapes: shapes,
horizontal: true,
effectSize: 60.0,
globalTimeOffset: false,
duration: 0.7,
size: scrubbingFrame.size
)
waveformMaskNode.frame = CGRect(origin: CGPoint(), size: scrubbingFrame.size)
waveformMaskNode.setup(color: .black, gravity: .bottom, waveform: audioWaveform)
}
} else {
if let waveformShimmerNode = strongSelf.waveformShimmerNode {
strongSelf.waveformShimmerNode = nil
if animation.isAnimated {
waveformShimmerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak waveformShimmerNode] _ in
waveformShimmerNode?.removeFromSupernode()
})
} else {
waveformShimmerNode.removeFromSupernode()
}
}
strongSelf.waveformMaskNode = nil
}
if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
waveformScrubbingNode.frame = scrubbingFrame
//animation.animator.updateFrame(layer: waveformScrubbingNode.layer, frame: scrubbingFrame, completion: nil)
//waveformScrubbingNode.update(size: scrubbingFrame.size, animator: animation.animator)
}
let waveformColor: UIColor let waveformColor: UIColor
if arguments.incoming { if arguments.incoming {
if consumableContentIcon != nil { if consumableContentIcon != nil {
@ -684,10 +900,41 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
} }
strongSelf.waveformNode.setup(color: waveformColor, gravity: .bottom, waveform: audioWaveform) strongSelf.waveformNode.setup(color: waveformColor, gravity: .bottom, waveform: audioWaveform)
strongSelf.waveformForegroundNode.setup(color: messageTheme.mediaActiveControlColor, gravity: .bottom, waveform: audioWaveform) strongSelf.waveformForegroundNode.setup(color: messageTheme.mediaActiveControlColor, gravity: .bottom, waveform: audioWaveform)
} else if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
let audioTranscriptionButton: ComponentHostView<Empty>
if let current = strongSelf.audioTranscriptionButton {
audioTranscriptionButton = current
} else {
audioTranscriptionButton = ComponentHostView<Empty>()
strongSelf.audioTranscriptionButton = audioTranscriptionButton
strongSelf.view.addSubview(audioTranscriptionButton)
}
let audioTranscriptionButtonSize = audioTranscriptionButton.update(
transition: animation.isAnimated ? .easeInOut(duration: 0.3) : .immediate,
component: AnyComponent(AudioTranscriptionButtonComponent(
theme: arguments.incoming ? arguments.presentationData.theme.theme.chat.message.incoming : arguments.presentationData.theme.theme.chat.message.outgoing,
transcriptionState: audioTranscriptionState,
pressed: {
guard let strongSelf = self else {
return
}
strongSelf.transcribe()
}
)),
environment: {},
containerSize: CGSize(width: 30.0, height: 30.0)
)
animation.animator.updateFrame(layer: audioTranscriptionButton.layer, frame: CGRect(origin: CGPoint(x: boundingWidth - 30.0 + 3.0, y: -6.0), size: audioTranscriptionButtonSize), completion: nil)
} else {
if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
strongSelf.waveformScrubbingNode = nil strongSelf.waveformScrubbingNode = nil
waveformScrubbingNode.removeFromSupernode() waveformScrubbingNode.removeFromSupernode()
} }
if let audioTranscriptionButton = strongSelf.audioTranscriptionButton {
strongSelf.audioTranscriptionButton = nil
audioTranscriptionButton.removeFromSuperview()
}
}
if let iconFrame = iconFrame { if let iconFrame = iconFrame {
let iconNode: TransformImageNode let iconNode: TransformImageNode
@ -1213,6 +1460,11 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
} }
} }
} }
if let audioTranscriptionButton = self.audioTranscriptionButton {
if let result = audioTranscriptionButton.hitTest(self.view.convert(point, to: self.audioTranscriptionButton), with: event) {
return result
}
}
return super.hitTest(point, with: event) return super.hitTest(point, with: event)
} }

View File

@ -836,7 +836,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
+ (tgcalls::ProtocolVersion)protocolVersionFromLibraryVersion:(NSString *)version { + (tgcalls::ProtocolVersion)protocolVersionFromLibraryVersion:(NSString *)version {
if ([version isEqualToString:@"2.7.7"]) { if ([version isEqualToString:@"2.7.7"]) {
return tgcalls::ProtocolVersion::V0; return tgcalls::ProtocolVersion::V0;
} else if ([version isEqualToString:@"3.0.0"]) { } else if ([version isEqualToString:@"5.0.0"]) {
return tgcalls::ProtocolVersion::V1; return tgcalls::ProtocolVersion::V1;
} else { } else {
return tgcalls::ProtocolVersion::V0; return tgcalls::ProtocolVersion::V0;

View File

@ -1,4 +1,44 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library") load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
load(
"@build_bazel_rules_apple//apple:resources.bzl",
"apple_resource_bundle",
"apple_resource_group",
)
load("//build-system/bazel-utils:plist_fragment.bzl",
"plist_fragment",
)
filegroup(
name = "WallpaperBackgroundNodeMetalResources",
srcs = glob([
"Resources/**/*.metal",
]),
visibility = ["//visibility:public"],
)
plist_fragment(
name = "WallpaperBackgroundNodeBundleInfoPlist",
extension = "plist",
template =
"""
<key>CFBundleIdentifier</key>
<string>org.telegram.WallpaperBackgroundNode</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleName</key>
<string>WallpaperBackgroundNode</string>
"""
)
apple_resource_bundle(
name = "WallpaperBackgroundNodeBundle",
infoplists = [
":WallpaperBackgroundNodeBundleInfoPlist",
],
resources = [
":WallpaperBackgroundNodeMetalResources",
],
)
swift_library( swift_library(
name = "WallpaperBackgroundNode", name = "WallpaperBackgroundNode",
@ -9,6 +49,9 @@ swift_library(
copts = [ copts = [
"-warnings-as-errors", "-warnings-as-errors",
], ],
data = [
":WallpaperBackgroundNodeBundle",
],
deps = [ deps = [
"//submodules/AsyncDisplayKit:AsyncDisplayKit", "//submodules/AsyncDisplayKit:AsyncDisplayKit",
"//submodules/Display:Display", "//submodules/Display:Display",

View File

@ -0,0 +1,35 @@
#include <metal_stdlib>
using namespace metal;
typedef struct {
packed_float2 position;
} Vertex;
typedef struct {
float4 position[[position]];
} Varyings;
vertex Varyings wallpaperVertex(constant Vertex *verticies[[buffer(0)]], unsigned int vid[[vertex_id]]) {
Varyings out;
constant Vertex &v = verticies[vid];
out.position = float4(float2(v.position), 0.0, 1.0);
return out;
}
fragment half4 wallpaperFragment1(Varyings in[[stage_in]]) {
float4 out = float4(0.0, 1.0, 0.0, 1.0);
return half4(out);
}
fragment half4 wallpaperFragment(Varyings in[[stage_in]], constant uint2 &resolution[[buffer(0)]], constant float &time[[buffer(1)]]) {
half4 p = half4(in.position);
p.y = -p.y;
p.y /= resolution.y;
p.y += tan(time + tan(p.x) + sin(.2 * p.x));
float4 out = float4(0.0, (0.3 + (p.y < 0.0 ? 0.0 : 1.0 - p.y * 3.0)) * 0.2, 0.0, 1.0);
return half4(out);
}

View File

@ -0,0 +1,312 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
import GradientBackground
import TelegramPresentationData
import TelegramCore
import AccountContext
import SwiftSignalKit
import WallpaperResources
import FastBlur
import Svg
import GZip
import AppBundle
import AnimatedStickerNode
import TelegramAnimatedStickerNode
import HierarchyTrackingLayer
import MetalKit
import HierarchyTrackingLayer
import simd
private final class NullActionClass: NSObject, CAAction {
static let shared = NullActionClass()
@objc public func run(forKey event: String, object anObject: Any, arguments dict: [AnyHashable : Any]?) {
}
}
@available(iOS 13.0, *)
open class SimpleMetalLayer: CAMetalLayer {
override open func action(forKey event: String) -> CAAction? {
return nullAction
}
override public init() {
super.init()
}
override public init(layer: Any) {
super.init(layer: layer)
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
private func makePipelineState(device: MTLDevice, library: MTLLibrary, vertexProgram: String, fragmentProgram: String) -> MTLRenderPipelineState? {
guard let loadedVertexProgram = library.makeFunction(name: vertexProgram) else {
return nil
}
guard let loadedFragmentProgram = library.makeFunction(name: fragmentProgram) else {
return nil
}
let pipelineStateDescriptor = MTLRenderPipelineDescriptor()
pipelineStateDescriptor.vertexFunction = loadedVertexProgram
pipelineStateDescriptor.fragmentFunction = loadedFragmentProgram
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
guard let pipelineState = try? device.makeRenderPipelineState(descriptor: pipelineStateDescriptor) else {
return nil
}
return pipelineState
}
@available(iOS 13.0, *)
final class MetalWallpaperBackgroundNode: ASDisplayNode, WallpaperBackgroundNode {
private let device: MTLDevice
private let metalLayer: SimpleMetalLayer
private let commandQueue: MTLCommandQueue
private let renderPipelineState: MTLRenderPipelineState
private let hierarchyTrackingLayer = HierarchyTrackingLayer()
var isReady: Signal<Bool, NoError> {
return .single(true)
}
var rotation: CGFloat = 0.0
private var animationPhase: Int = 0
private var animationThread: Thread?
private var displayLink: CADisplayLink?
override init() {
self.device = MTLCreateSystemDefaultDevice()!
self.metalLayer = SimpleMetalLayer()
self.metalLayer.maximumDrawableCount = 3
self.metalLayer.presentsWithTransaction = true
self.metalLayer.contentsScale = UIScreenScale
self.commandQueue = self.device.makeCommandQueue()!
let mainBundle = Bundle(for: MetalWallpaperBackgroundNode.self)
guard let path = mainBundle.path(forResource: "WallpaperBackgroundNodeBundle", ofType: "bundle") else {
preconditionFailure()
}
guard let bundle = Bundle(path: path) else {
preconditionFailure()
}
guard let defaultLibrary = try? self.device.makeDefaultLibrary(bundle: bundle) else {
preconditionFailure()
}
guard let renderPipelineState = makePipelineState(device: self.device, library: defaultLibrary, vertexProgram: "wallpaperVertex", fragmentProgram: "wallpaperFragment") else {
preconditionFailure()
}
self.renderPipelineState = renderPipelineState
super.init()
self.metalLayer.device = self.device
self.metalLayer.pixelFormat = .bgra8Unorm
self.metalLayer.framebufferOnly = true
self.metalLayer.allowsNextDrawableTimeout = true
self.metalLayer.isOpaque = true
self.layer.addSublayer(self.metalLayer)
self.layer.addSublayer(self.hierarchyTrackingLayer)
self.hierarchyTrackingLayer.opacity = 0.0
self.hierarchyTrackingLayer.didEnterHierarchy = { [weak self] in
self?.updateIsVisible(true)
}
self.hierarchyTrackingLayer.didExitHierarchy = { [weak self] in
self?.updateIsVisible(false)
}
}
func update(wallpaper: TelegramWallpaper) {
}
func _internalUpdateIsSettingUpWallpaper() {
}
func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
if self.metalLayer.drawableSize != size {
self.metalLayer.drawableSize = size
transition.updateFrame(layer: self.metalLayer, frame: CGRect(origin: CGPoint(), size: size))
self.redraw()
}
}
private func updateIsVisible(_ isVisible: Bool) {
if isVisible {
if self.displayLink == nil {
final class DisplayLinkTarget: NSObject {
private let f: () -> Void
init(_ f: @escaping () -> Void) {
self.f = f
}
@objc func event() {
self.f()
}
}
let displayLink = CADisplayLink(target: DisplayLinkTarget { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.redraw()
}, selector: #selector(DisplayLinkTarget.event))
self.displayLink = displayLink
if #available(iOS 15.0, iOSApplicationExtension 15.0, *) {
if "".isEmpty {
displayLink.preferredFrameRateRange = CAFrameRateRange(minimum: 60.0, maximum: 60.0, preferred: 60.0)
} else {
displayLink.preferredFrameRateRange = CAFrameRateRange(minimum: Float(UIScreen.main.maximumFramesPerSecond), maximum: Float(UIScreen.main.maximumFramesPerSecond), preferred: Float(UIScreen.main.maximumFramesPerSecond))
}
}
displayLink.isPaused = false
if !"".isEmpty {
self.animationThread = Thread(block: {
displayLink.add(to: .current, forMode: .common)
while true {
if Thread.current.isCancelled {
break
}
RunLoop.current.run(until: .init(timeIntervalSinceNow: 1.0))
}
})
self.animationThread?.name = "MetalWallpaperBackgroundNode"
self.animationThread?.qualityOfService = .userInteractive
self.animationThread?.start()
} else {
displayLink.add(to: .current, forMode: .common)
}
}
} else {
if let displayLink = self.displayLink {
self.displayLink = nil
displayLink.invalidate()
}
if let animationThread = self.animationThread {
self.animationThread = nil
animationThread.cancel()
}
}
}
private var previousDrawTime: Double?
private func redraw() {
let timestamp = CACurrentMediaTime()
if let previousDrawTime = self.previousDrawTime {
let _ = previousDrawTime
//print("frame time \((timestamp - previousDrawTime) * 1000.0)")
}
self.previousDrawTime = timestamp
self.animationPhase += 1
let animationOffset = Float(self.animationPhase % 200) / 200.0
let _ = animationOffset
guard let commandBuffer = self.commandQueue.makeCommandBuffer() else {
return
}
guard let drawable = self.metalLayer.nextDrawable() else {
return
}
let drawTime = CACurrentMediaTime() - timestamp
if drawTime > 9.0 / 1000.0 {
print("get time \(drawTime * 1000.0)")
}
let renderPassDescriptor = MTLRenderPassDescriptor()
renderPassDescriptor.colorAttachments[0].texture = drawable.texture
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(
red: 0.0,
green: 0.0,
blue: 0.0,
alpha: 1.0
)
guard let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
return
}
var vertices: [Float] = [
-1.0, -1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, 1.0
]
renderEncoder.setRenderPipelineState(self.renderPipelineState)
renderEncoder.setVertexBytes(&vertices, length: 4 * vertices.count, index: 0)
var resolution = simd_uint2(UInt32(drawable.texture.width), UInt32(drawable.texture.height))
renderEncoder.setFragmentBytes(&resolution, length: MemoryLayout<simd_uint2>.size * 2, index: 0)
var time = Float(timestamp) * 0.25
renderEncoder.setFragmentBytes(&time, length: 4, index: 1)
renderEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4, instanceCount: 1)
renderEncoder.endEncoding()
if self.metalLayer.presentsWithTransaction {
if Thread.isMainThread {
commandBuffer.commit()
commandBuffer.waitUntilScheduled()
drawable.present()
} else {
CATransaction.begin()
commandBuffer.commit()
commandBuffer.waitUntilScheduled()
drawable.present()
CATransaction.commit()
}
} else {
commandBuffer.addScheduledHandler { _ in
drawable.present()
}
commandBuffer.commit()
}
}
func animateEvent(transition: ContainedViewLayoutTransition, extendAnimation: Bool) {
}
func updateBubbleTheme(bubbleTheme: PresentationTheme, bubbleCorners: PresentationChatBubbleCorners) {
}
func hasBubbleBackground(for type: WallpaperBubbleType) -> Bool {
return false
}
func makeBubbleBackground(for type: WallpaperBubbleType) -> WallpaperBubbleBackgroundNode? {
return nil
}
func makeDimmedNode() -> ASDisplayNode? {
return nil
}
}

View File

@ -1775,7 +1775,13 @@ private let sharedStorage = WallpaperBackgroundNodeMergedImpl.SharedStorage()
public func createWallpaperBackgroundNode(context: AccountContext, forChatDisplay: Bool, useSharedAnimationPhase: Bool = false, useExperimentalImplementation: Bool = false) -> WallpaperBackgroundNode { public func createWallpaperBackgroundNode(context: AccountContext, forChatDisplay: Bool, useSharedAnimationPhase: Bool = false, useExperimentalImplementation: Bool = false) -> WallpaperBackgroundNode {
if forChatDisplay && useExperimentalImplementation { if forChatDisplay && useExperimentalImplementation {
#if DEBUG
if #available(iOS 13.0, iOSApplicationExtension 13.0, *) {
return MetalWallpaperBackgroundNode()
}
#else
return WallpaperBackgroundNodeMergedImpl(context: context, storage: useSharedAnimationPhase ? sharedStorage : nil) return WallpaperBackgroundNodeMergedImpl(context: context, storage: useSharedAnimationPhase ? sharedStorage : nil)
#endif
} }
return WallpaperBackgroundNodeImpl(context: context, useSharedAnimationPhase: useSharedAnimationPhase) return WallpaperBackgroundNodeImpl(context: context, useSharedAnimationPhase: useSharedAnimationPhase)