Audio transcription

This commit is contained in:
Ali 2022-05-14 00:08:44 +04:00
parent f80bf33453
commit 6b3c11a6fd
39 changed files with 1139 additions and 189 deletions

View File

@ -1798,6 +1798,8 @@ plist_fragment(
<string>We need this so that you can share photos and videos from your photo library.</string>
<key>NSSiriUsageDescription</key>
<string>You can use Siri to send messages.</string>
<key>NSSpeechRecognitionUsageDescription</key>
<string>We need this to transcribe audio messages on your request.</string>
<key>NSUserActivityTypes</key>
<array>
<string>INSendMessageIntent</string>

View File

@ -661,7 +661,7 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
"Arrow1.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor,
"Arrow2.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor,
],
loop: true
mode: .animating(loop: true)
)
progressValue = progress
@ -682,7 +682,7 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
"Arrow1.Union.Fill 1": strongSelf.presentationData.theme.list.itemAccentColor,
"Arrow2.Union.Fill 1": strongSelf.presentationData.theme.rootController.navigationSearchBar.inputFillColor.blitOver(strongSelf.presentationData.theme.rootController.navigationBar.opaqueBackgroundColor, alpha: 1.0),
],
loop: false
mode: .animating(loop: false)
)
progressValue = 1.0

View File

@ -20,7 +20,7 @@ public final class BundleIconComponent: Component {
if lhs.tintColor != rhs.tintColor {
return false
}
return false
return true
}
public final class View: UIImageView {

View File

@ -6,16 +6,20 @@ import HierarchyTrackingLayer
public final class LottieAnimationComponent: Component {
public struct Animation: Equatable {
public enum Mode: Equatable {
case still
case animating(loop: Bool)
case animateTransitionFromPrevious
}
public var name: String
public var loop: Bool
public var isAnimating: Bool
public var mode: Mode
public var colors: [String: UIColor]
public init(name: String, colors: [String: UIColor], loop: Bool, isAnimating: Bool = true) {
public init(name: String, colors: [String: UIColor], mode: Mode) {
self.name = name
self.colors = colors
self.loop = loop
self.isAnimating = isAnimating
self.mode = mode
}
}
@ -55,6 +59,7 @@ public final class LottieAnimationComponent: Component {
private var colorCallbacks: [LOTColorValueCallback] = []
private var animationView: LOTAnimationView?
private var didPlayToCompletion: Bool = false
private let hierarchyTrackingLayer: HierarchyTrackingLayer
@ -100,12 +105,22 @@ public final class LottieAnimationComponent: Component {
}
func update(component: LottieAnimationComponent, availableSize: CGSize, transition: Transition) -> CGSize {
var updatePlayback = false
if self.component?.animation != component.animation {
if let animationView = self.animationView {
if case .animateTransitionFromPrevious = component.animation.mode, !animationView.isAnimationPlaying, !self.didPlayToCompletion {
animationView.play { _ in
}
}
}
if let animationView = self.animationView, animationView.isAnimationPlaying {
animationView.completionBlock = { [weak self] _ in
guard let strongSelf = self else {
return
}
strongSelf.didPlayToCompletion = true
let _ = strongSelf.update(component: component, availableSize: availableSize, transition: transition)
}
animationView.loopAnimation = false
@ -113,14 +128,22 @@ public final class LottieAnimationComponent: Component {
self.component = component
self.animationView?.removeFromSuperview()
self.didPlayToCompletion = false
if let url = getAppBundle().url(forResource: component.animation.name, withExtension: "json"), let composition = LOTComposition(filePath: url.path) {
let view = LOTAnimationView(model: composition, in: getAppBundle())
view.loopAnimation = component.animation.loop
switch component.animation.mode {
case .still, .animateTransitionFromPrevious:
view.loopAnimation = false
case let .animating(loop):
view.loopAnimation = loop
}
view.animationSpeed = 1.0
view.backgroundColor = .clear
view.isOpaque = false
//view.logHierarchyKeypaths()
for (key, value) in component.animation.colors {
let colorCallback = LOTColorValueCallback(color: value.cgColor)
self.colorCallbacks.append(colorCallback)
@ -129,6 +152,8 @@ public final class LottieAnimationComponent: Component {
self.animationView = view
self.addSubview(view)
updatePlayback = true
}
}
}
@ -146,7 +171,8 @@ public final class LottieAnimationComponent: Component {
if let animationView = self.animationView {
animationView.frame = CGRect(origin: CGPoint(x: floor((size.width - animationSize.width) / 2.0), y: floor((size.height - animationSize.height) / 2.0)), size: animationSize)
if component.animation.isAnimating {
if updatePlayback {
if case .animating = component.animation.mode {
if !animationView.isAnimationPlaying {
animationView.play { _ in
}
@ -157,6 +183,7 @@ public final class LottieAnimationComponent: Component {
}
}
}
}
return size
}

View File

@ -8,7 +8,15 @@ public final class NullActionClass: NSObject, CAAction {
public let nullAction = NullActionClass()
open class SimpleLayer: CALayer {
public var didEnterHierarchy: (() -> Void)?
public var didExitHierarchy: (() -> Void)?
override open func action(forKey event: String) -> CAAction? {
if event == kCAOnOrderIn {
self.didEnterHierarchy?()
} else if event == kCAOnOrderOut {
self.didExitHierarchy?()
}
return nullAction
}
@ -26,7 +34,15 @@ open class SimpleLayer: CALayer {
}
open class SimpleShapeLayer: CAShapeLayer {
public var didEnterHierarchy: (() -> Void)?
public var didExitHierarchy: (() -> Void)?
override open func action(forKey event: String) -> CAAction? {
if event == kCAOnOrderIn {
self.didEnterHierarchy?()
} else if event == kCAOnOrderOut {
self.didExitHierarchy?()
}
return nullAction
}

View File

@ -0,0 +1,20 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "ConvertOpusToAAC",
module_name = "ConvertOpusToAAC",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
"//submodules/FFMpegBinding:FFMpegBinding",
"//submodules/MediaPlayer:UniversalMediaPlayer",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,69 @@
import Foundation
import UniversalMediaPlayer
import AVFoundation
import SwiftSignalKit
public func convertOpusToAAC(sourcePath: String, allocateTempFile: @escaping () -> String) -> Signal<String?, NoError> {
return Signal { subscriber in
var isCancelled = false
let queue = Queue()
queue.async {
do {
let audioSource = SoftwareAudioSource(path: sourcePath)
let outputPath = allocateTempFile()
let assetWriter = try AVAssetWriter(outputURL: URL(fileURLWithPath: outputPath), fileType: .m4a)
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size)
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono
let outputSettings: [String: Any] = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 48000,
AVEncoderBitRateKey: 96000,
AVNumberOfChannelsKey: 1,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout<AudioChannelLayout>.size)
]
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings)
assetWriter.add(audioInput)
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: .zero)
let finishWriting: () -> Void = {
assetWriter.finishWriting(completionHandler: {
subscriber.putNext(outputPath)
subscriber.putCompletion()
})
}
audioInput.requestMediaDataWhenReady(on: queue.queue, using: {
if audioInput.isReadyForMoreMediaData {
if !isCancelled, let sampleBuffer = audioSource.readSampleBuffer() {
if !audioInput.append(sampleBuffer) {
audioInput.markAsFinished()
finishWriting()
return
}
} else {
audioInput.markAsFinished()
finishWriting()
}
}
})
} catch let e {
print("Error: \(e)")
subscriber.putNext(nil)
subscriber.putCompletion()
}
}
return ActionDisposable {
isCancelled = true
}
}
}

View File

@ -0,0 +1,18 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "LocalAudioTranscription",
module_name = "LocalAudioTranscription",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,73 @@
import Foundation
import SwiftSignalKit
import Speech
private var sharedRecognizer: Any?
public func transcribeAudio(path: String) -> Signal<String?, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
if #available(iOS 13.0, *) {
SFSpeechRecognizer.requestAuthorization { (status) in
switch status {
case .notDetermined:
subscriber.putNext(nil)
subscriber.putCompletion()
case .restricted:
subscriber.putNext(nil)
subscriber.putCompletion()
case .denied:
subscriber.putNext(nil)
subscriber.putCompletion()
case .authorized:
let speechRecognizer: SFSpeechRecognizer
if let sharedRecognizer = sharedRecognizer as? SFSpeechRecognizer {
speechRecognizer = sharedRecognizer
} else {
guard let speechRecognizerValue = SFSpeechRecognizer(locale: Locale(identifier: "ru-RU")), speechRecognizerValue.isAvailable else {
subscriber.putNext(nil)
subscriber.putCompletion()
return
}
speechRecognizerValue.defaultTaskHint = .unspecified
sharedRecognizer = speechRecognizerValue
speechRecognizer = speechRecognizerValue
speechRecognizer.supportsOnDeviceRecognition = false
}
let request = SFSpeechURLRecognitionRequest(url: URL(fileURLWithPath: path))
request.requiresOnDeviceRecognition = speechRecognizer.supportsOnDeviceRecognition
request.shouldReportPartialResults = false
let task = speechRecognizer.recognitionTask(with: request, resultHandler: { result, error in
if let result = result {
subscriber.putNext(result.bestTranscription.formattedString)
subscriber.putCompletion()
} else {
print("transcribeAudio: \(String(describing: error))")
subscriber.putNext(nil)
subscriber.putCompletion()
}
})
disposable.set(ActionDisposable {
task.cancel()
})
@unknown default:
subscriber.putNext(nil)
subscriber.putCompletion()
}
}
} else {
subscriber.putNext(nil)
subscriber.putCompletion()
}
return disposable
}
|> runOn(.mainQueue())
}

View File

@ -9,6 +9,8 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
private let audioFrame: FFMpegAVFrame
private var resetDecoderOnNextFrame = true
private let formatDescription: CMAudioFormatDescription
private var delayedFrames: [MediaTrackFrame] = []
init(codecContext: FFMpegAVCodecContext, sampleRate: Int = 44100, channelCount: Int = 2) {
@ -16,6 +18,27 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
self.audioFrame = FFMpegAVFrame()
self.swrContext = FFMpegSWResample(sourceChannelCount: Int(codecContext.channels()), sourceSampleRate: Int(codecContext.sampleRate()), sourceSampleFormat: codecContext.sampleFormat(), destinationChannelCount: channelCount, destinationSampleRate: sampleRate, destinationSampleFormat: FFMPEG_AV_SAMPLE_FMT_S16)
var outputDescription = AudioStreamBasicDescription(
mSampleRate: Float64(sampleRate),
mFormatID: kAudioFormatLinearPCM,
mFormatFlags: kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked,
mBytesPerPacket: UInt32(2 * channelCount),
mFramesPerPacket: 1,
mBytesPerFrame: UInt32(2 * channelCount),
mChannelsPerFrame: UInt32(channelCount),
mBitsPerChannel: 16,
mReserved: 0
)
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size)
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono
var formatDescription: CMAudioFormatDescription?
CMAudioFormatDescriptionCreate(allocator: nil, asbd: &outputDescription, layoutSize: MemoryLayout<AudioChannelLayout>.size, layout: &channelLayout, magicCookieSize: 0, magicCookie: nil, extensions: nil, formatDescriptionOut: &formatDescription)
self.formatDescription = formatDescription!
}
func decodeRaw(frame: MediaTrackDecodableFrame) -> Data? {
@ -112,13 +135,18 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
return nil
}
var timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: pts, decodeTimeStamp: pts)
//var timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: pts, decodeTimeStamp: pts)
var sampleBuffer: CMSampleBuffer?
var sampleSize = data.count
guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: nil, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
//var sampleSize = data.count
guard CMAudioSampleBufferCreateReadyWithPacketDescriptions(allocator: nil, dataBuffer: blockBuffer!, formatDescription: self.formatDescription, sampleCount: Int(data.count / 2), presentationTimeStamp: pts, packetDescriptions: nil, sampleBufferOut: &sampleBuffer) == noErr else {
return nil
}
/*guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self.formatDescription, sampleCount: Int(frame.duration), sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
return nil
}*/
let resetDecoder = self.resetDecoderOnNextFrame
self.resetDecoderOnNextFrame = false

View File

@ -710,6 +710,10 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
}
}
public func update(size: CGSize, animator: ControlledTransitionAnimator) {
self.updateProgressAnimations(animator: animator)
}
public func updateColors(backgroundColor: UIColor, foregroundColor: UIColor) {
switch self.contentNodes {
case let .standard(node):
@ -736,8 +740,8 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
}
}
private func updateProgressAnimations() {
self.updateProgress()
private func updateProgressAnimations(animator: ControlledTransitionAnimator? = nil) {
self.updateProgress(animator: animator)
let needsAnimation: Bool
@ -794,7 +798,7 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
})
}
private func updateProgress() {
private func updateProgress(animator: ControlledTransitionAnimator? = nil) {
let bounds = self.bounds
var isPlaying = false
@ -832,10 +836,11 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
node.containerNode.frame = CGRect(origin: CGPoint(), size: bounds.size)
let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: floor((bounds.size.height - node.lineHeight) / 2.0)), size: CGSize(width: bounds.size.width, height: node.lineHeight))
let foregroundContentFrame = CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height))
node.backgroundNode.position = backgroundFrame.center
node.backgroundNode.bounds = CGRect(origin: CGPoint(), size: backgroundFrame.size)
let foregroundContentFrame = CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height))
node.foregroundContentNode.position = foregroundContentFrame.center
node.foregroundContentNode.bounds = CGRect(origin: CGPoint(), size: foregroundContentFrame.size)
@ -963,8 +968,14 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
}
let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: bounds.size.width, height: bounds.size.height))
if let animator = animator {
animator.updateFrame(layer: node.backgroundNode.layer, frame: backgroundFrame, completion: nil)
animator.updateFrame(layer: node.foregroundContentNode.layer, frame: CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height)), completion: nil)
} else {
node.backgroundNode.frame = backgroundFrame
node.foregroundContentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: backgroundFrame.size.width, height: backgroundFrame.size.height))
}
let timestampAndDuration: (timestamp: Double, duration: Double)?
if let statusValue = self.statusValue, Double(0.0).isLess(than: statusValue.duration) {

View File

@ -446,6 +446,19 @@ public final class SoftwareAudioSource {
}
}
public func readSampleBuffer() -> CMSampleBuffer? {
guard let audioStream = self.audioStream, let _ = self.avFormatContext else {
return nil
}
let (decodableFrame, _) = self.readDecodableFrame()
if let decodableFrame = decodableFrame {
return audioStream.decoder.decode(frame: decodableFrame)?.sampleBuffer
} else {
return nil
}
}
public func readEncodedFrame() -> (Data, Int)? {
guard let _ = self.audioStream, let _ = self.avFormatContext else {
return nil

View File

@ -147,10 +147,11 @@
id parsedMessage = [MTInternalMessageParser parseMessage:rpcResultMessage.data];
if ([parsedMessage isKindOfClass:[MTRpcError class]]) {
if (MTLogEnabled()) {
MTRpcError *rpcError = (MTRpcError *)parsedMessage;
if (MTLogEnabled()) {
MTLog(@"[MTRequestMessageService#%p response for %" PRId64 " is error: %d: %@]", self, _currentMessageId, (int)rpcError.errorCode, rpcError.errorDescription);
}
MTShortLog(@"[MTRequestMessageService#%p response for %" PRId64 " is error: %d: %@]", self, _currentMessageId, (int)rpcError.errorCode, rpcError.errorDescription);
}
//boolTrue#997275b5 = Bool;

View File

@ -178,6 +178,8 @@ final class ShimmerEffectForegroundNode: ASDisplayNode {
private var absoluteLocation: (CGRect, CGSize)?
private var isCurrentlyInHierarchy = false
private var shouldBeAnimating = false
private var globalTimeOffset = true
private var duration: Double?
override init() {
self.imageNodeContainer = ASDisplayNode()
@ -212,17 +214,19 @@ final class ShimmerEffectForegroundNode: ASDisplayNode {
self.updateAnimation()
}
func update(backgroundColor: UIColor, foregroundColor: UIColor, horizontal: Bool = false) {
func update(backgroundColor: UIColor, foregroundColor: UIColor, horizontal: Bool, effectSize: CGFloat?, globalTimeOffset: Bool, duration: Double?) {
if let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.isEqual(backgroundColor), let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.isEqual(foregroundColor), self.currentHorizontal == horizontal {
return
}
self.currentBackgroundColor = backgroundColor
self.currentForegroundColor = foregroundColor
self.currentHorizontal = horizontal
self.globalTimeOffset = globalTimeOffset
self.duration = duration
let image: UIImage?
if horizontal {
image = generateImage(CGSize(width: 320.0, height: 16.0), opaque: false, scale: 1.0, rotatedContext: { size, context in
image = generateImage(CGSize(width: effectSize ?? 320.0, height: 16.0), opaque: false, scale: 1.0, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setFillColor(backgroundColor.cgColor)
context.fill(CGRect(origin: CGPoint(), size: size))
@ -304,18 +308,22 @@ final class ShimmerEffectForegroundNode: ASDisplayNode {
}
if horizontal {
let gradientHeight: CGFloat = 320.0
let gradientHeight: CGFloat = self.imageNode.image?.size.width ?? 320.0
self.imageNode.frame = CGRect(origin: CGPoint(x: -gradientHeight, y: 0.0), size: CGSize(width: gradientHeight, height: containerSize.height))
let animation = self.imageNode.layer.makeAnimation(from: 0.0 as NSNumber, to: (containerSize.width + gradientHeight) as NSNumber, keyPath: "position.x", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: 1.3 * 1.0, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true)
let animation = self.imageNode.layer.makeAnimation(from: 0.0 as NSNumber, to: (containerSize.width + gradientHeight) as NSNumber, keyPath: "position.x", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: duration ?? 1.3, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true)
animation.repeatCount = Float.infinity
if self.globalTimeOffset {
animation.beginTime = 1.0
}
self.imageNode.layer.add(animation, forKey: "shimmer")
} else {
let gradientHeight: CGFloat = 250.0
self.imageNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -gradientHeight), size: CGSize(width: containerSize.width, height: gradientHeight))
let animation = self.imageNode.layer.makeAnimation(from: 0.0 as NSNumber, to: (containerSize.height + gradientHeight) as NSNumber, keyPath: "position.y", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: 1.3 * 1.0, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true)
let animation = self.imageNode.layer.makeAnimation(from: 0.0 as NSNumber, to: (containerSize.height + gradientHeight) as NSNumber, keyPath: "position.y", timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, duration: duration ?? 1.3, delay: 0.0, mediaTimingFunction: nil, removeOnCompletion: true, additive: true)
animation.repeatCount = Float.infinity
if self.globalTimeOffset {
animation.beginTime = 1.0
}
self.imageNode.layer.add(animation, forKey: "shimmer")
}
}
@ -339,6 +347,7 @@ public final class ShimmerEffectNode: ASDisplayNode {
private var currentForegroundColor: UIColor?
private var currentShimmeringColor: UIColor?
private var currentHorizontal: Bool?
private var currentEffectSize: CGFloat?
private var currentSize = CGSize()
override public init() {
@ -361,8 +370,8 @@ public final class ShimmerEffectNode: ASDisplayNode {
self.effectNode.updateAbsoluteRect(rect, within: containerSize)
}
public func update(backgroundColor: UIColor, foregroundColor: UIColor, shimmeringColor: UIColor, shapes: [Shape], horizontal: Bool = false, size: CGSize) {
if self.currentShapes == shapes, let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.isEqual(backgroundColor), let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.isEqual(foregroundColor), let currentShimmeringColor = self.currentShimmeringColor, currentShimmeringColor.isEqual(shimmeringColor), horizontal == self.currentHorizontal, self.currentSize == size {
public func update(backgroundColor: UIColor, foregroundColor: UIColor, shimmeringColor: UIColor, shapes: [Shape], horizontal: Bool = false, effectSize: CGFloat? = nil, globalTimeOffset: Bool = true, duration: Double? = nil, size: CGSize) {
if self.currentShapes == shapes, let currentBackgroundColor = self.currentBackgroundColor, currentBackgroundColor.isEqual(backgroundColor), let currentForegroundColor = self.currentForegroundColor, currentForegroundColor.isEqual(foregroundColor), let currentShimmeringColor = self.currentShimmeringColor, currentShimmeringColor.isEqual(shimmeringColor), horizontal == self.currentHorizontal, effectSize == self.currentEffectSize, self.currentSize == size {
return
}
@ -375,7 +384,7 @@ public final class ShimmerEffectNode: ASDisplayNode {
self.backgroundNode.backgroundColor = foregroundColor
self.effectNode.update(backgroundColor: foregroundColor, foregroundColor: shimmeringColor, horizontal: horizontal)
self.effectNode.update(backgroundColor: foregroundColor, foregroundColor: shimmeringColor, horizontal: horizontal, effectSize: effectSize, globalTimeOffset: globalTimeOffset, duration: duration)
self.foregroundNode.image = generateImage(size, rotatedContext: { size, context in
context.setFillColor(backgroundColor.cgColor)

View File

@ -81,7 +81,7 @@ public class StickerShimmerEffectNode: ASDisplayNode {
self.backgroundNode.backgroundColor = foregroundColor
self.effectNode.update(backgroundColor: backgroundColor == nil ? .clear : foregroundColor, foregroundColor: shimmeringColor, horizontal: true)
self.effectNode.update(backgroundColor: backgroundColor == nil ? .clear : foregroundColor, foregroundColor: shimmeringColor, horizontal: true, effectSize: nil, globalTimeOffset: true, duration: nil)
let bounds = CGRect(origin: CGPoint(), size: size)
let image = generateImage(size, rotatedContext: { size, context in

View File

@ -414,7 +414,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1281329567] = { return Api.MessageAction.parse_messageActionGroupCallScheduled($0) }
dict[-1615153660] = { return Api.MessageAction.parse_messageActionHistoryClear($0) }
dict[1345295095] = { return Api.MessageAction.parse_messageActionInviteToGroupCall($0) }
dict[1080663248] = { return Api.MessageAction.parse_messageActionPaymentSent($0) }
dict[-1776926890] = { return Api.MessageAction.parse_messageActionPaymentSent($0) }
dict[-1892568281] = { return Api.MessageAction.parse_messageActionPaymentSentMe($0) }
dict[-2132731265] = { return Api.MessageAction.parse_messageActionPhoneCall($0) }
dict[-1799538451] = { return Api.MessageAction.parse_messageActionPinMessage($0) }
@ -985,6 +985,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[946083368] = { return Api.messages.StickerSetInstallResult.parse_stickerSetInstallResultSuccess($0) }
dict[816245886] = { return Api.messages.Stickers.parse_stickers($0) }
dict[-244016606] = { return Api.messages.Stickers.parse_stickersNotModified($0) }
dict[-1442723025] = { return Api.messages.TranscribedAudio.parse_transcribedAudio($0) }
dict[1741309751] = { return Api.messages.TranslatedText.parse_translateNoResult($0) }
dict[-1575684144] = { return Api.messages.TranslatedText.parse_translateResultText($0) }
dict[136574537] = { return Api.messages.VotesList.parse_votesList($0) }
@ -1739,6 +1740,8 @@ public extension Api {
_1.serialize(buffer, boxed)
case let _1 as Api.messages.Stickers:
_1.serialize(buffer, boxed)
case let _1 as Api.messages.TranscribedAudio:
_1.serialize(buffer, boxed)
case let _1 as Api.messages.TranslatedText:
_1.serialize(buffer, boxed)
case let _1 as Api.messages.VotesList:

View File

@ -1009,7 +1009,7 @@ public extension Api {
case messageActionGroupCallScheduled(call: Api.InputGroupCall, scheduleDate: Int32)
case messageActionHistoryClear
case messageActionInviteToGroupCall(call: Api.InputGroupCall, users: [Int64])
case messageActionPaymentSent(currency: String, totalAmount: Int64)
case messageActionPaymentSent(flags: Int32, currency: String, totalAmount: Int64, invoiceSlug: String?)
case messageActionPaymentSentMe(flags: Int32, currency: String, totalAmount: Int64, payload: Buffer, info: Api.PaymentRequestedInfo?, shippingOptionId: String?, charge: Api.PaymentCharge)
case messageActionPhoneCall(flags: Int32, callId: Int64, reason: Api.PhoneCallDiscardReason?, duration: Int32?)
case messageActionPinMessage
@ -1170,12 +1170,14 @@ public extension Api {
serializeInt64(item, buffer: buffer, boxed: false)
}
break
case .messageActionPaymentSent(let currency, let totalAmount):
case .messageActionPaymentSent(let flags, let currency, let totalAmount, let invoiceSlug):
if boxed {
buffer.appendInt32(1080663248)
buffer.appendInt32(-1776926890)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeString(currency, buffer: buffer, boxed: false)
serializeInt64(totalAmount, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 0) != 0 {serializeString(invoiceSlug!, buffer: buffer, boxed: false)}
break
case .messageActionPaymentSentMe(let flags, let currency, let totalAmount, let payload, let info, let shippingOptionId, let charge):
if boxed {
@ -1303,8 +1305,8 @@ public extension Api {
return ("messageActionHistoryClear", [])
case .messageActionInviteToGroupCall(let call, let users):
return ("messageActionInviteToGroupCall", [("call", String(describing: call)), ("users", String(describing: users))])
case .messageActionPaymentSent(let currency, let totalAmount):
return ("messageActionPaymentSent", [("currency", String(describing: currency)), ("totalAmount", String(describing: totalAmount))])
case .messageActionPaymentSent(let flags, let currency, let totalAmount, let invoiceSlug):
return ("messageActionPaymentSent", [("flags", String(describing: flags)), ("currency", String(describing: currency)), ("totalAmount", String(describing: totalAmount)), ("invoiceSlug", String(describing: invoiceSlug))])
case .messageActionPaymentSentMe(let flags, let currency, let totalAmount, let payload, let info, let shippingOptionId, let charge):
return ("messageActionPaymentSentMe", [("flags", String(describing: flags)), ("currency", String(describing: currency)), ("totalAmount", String(describing: totalAmount)), ("payload", String(describing: payload)), ("info", String(describing: info)), ("shippingOptionId", String(describing: shippingOptionId)), ("charge", String(describing: charge))])
case .messageActionPhoneCall(let flags, let callId, let reason, let duration):
@ -1565,14 +1567,20 @@ public extension Api {
}
}
public static func parse_messageActionPaymentSent(_ reader: BufferReader) -> MessageAction? {
var _1: String?
_1 = parseString(reader)
var _2: Int64?
_2 = reader.readInt64()
var _1: Int32?
_1 = reader.readInt32()
var _2: String?
_2 = parseString(reader)
var _3: Int64?
_3 = reader.readInt64()
var _4: String?
if Int(_1!) & Int(1 << 0) != 0 {_4 = parseString(reader) }
let _c1 = _1 != nil
let _c2 = _2 != nil
if _c1 && _c2 {
return Api.MessageAction.messageActionPaymentSent(currency: _1!, totalAmount: _2!)
let _c3 = _3 != nil
let _c4 = (Int(_1!) & Int(1 << 0) == 0) || _4 != nil
if _c1 && _c2 && _c3 && _c4 {
return Api.MessageAction.messageActionPaymentSent(flags: _1!, currency: _2!, totalAmount: _3!, invoiceSlug: _4)
}
else {
return nil

View File

@ -488,6 +488,42 @@ public extension Api.messages {
}
}
public extension Api.messages {
enum TranscribedAudio: TypeConstructorDescription {
case transcribedAudio(text: String)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .transcribedAudio(let text):
if boxed {
buffer.appendInt32(-1442723025)
}
serializeString(text, buffer: buffer, boxed: false)
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .transcribedAudio(let text):
return ("transcribedAudio", [("text", String(describing: text))])
}
}
public static func parse_transcribedAudio(_ reader: BufferReader) -> TranscribedAudio? {
var _1: String?
_1 = parseString(reader)
let _c1 = _1 != nil
if _c1 {
return Api.messages.TranscribedAudio.transcribedAudio(text: _1!)
}
else {
return nil
}
}
}
}
public extension Api.messages {
enum TranslatedText: TypeConstructorDescription {
case translateNoResult
@ -1464,95 +1500,3 @@ public extension Api.photos {
}
}
public extension Api.photos {
enum Photos: TypeConstructorDescription {
case photos(photos: [Api.Photo], users: [Api.User])
case photosSlice(count: Int32, photos: [Api.Photo], users: [Api.User])
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .photos(let photos, let users):
if boxed {
buffer.appendInt32(-1916114267)
}
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(photos.count))
for item in photos {
item.serialize(buffer, true)
}
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(users.count))
for item in users {
item.serialize(buffer, true)
}
break
case .photosSlice(let count, let photos, let users):
if boxed {
buffer.appendInt32(352657236)
}
serializeInt32(count, buffer: buffer, boxed: false)
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(photos.count))
for item in photos {
item.serialize(buffer, true)
}
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(users.count))
for item in users {
item.serialize(buffer, true)
}
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .photos(let photos, let users):
return ("photos", [("photos", String(describing: photos)), ("users", String(describing: users))])
case .photosSlice(let count, let photos, let users):
return ("photosSlice", [("count", String(describing: count)), ("photos", String(describing: photos)), ("users", String(describing: users))])
}
}
public static func parse_photos(_ reader: BufferReader) -> Photos? {
var _1: [Api.Photo]?
if let _ = reader.readInt32() {
_1 = Api.parseVector(reader, elementSignature: 0, elementType: Api.Photo.self)
}
var _2: [Api.User]?
if let _ = reader.readInt32() {
_2 = Api.parseVector(reader, elementSignature: 0, elementType: Api.User.self)
}
let _c1 = _1 != nil
let _c2 = _2 != nil
if _c1 && _c2 {
return Api.photos.Photos.photos(photos: _1!, users: _2!)
}
else {
return nil
}
}
public static func parse_photosSlice(_ reader: BufferReader) -> Photos? {
var _1: Int32?
_1 = reader.readInt32()
var _2: [Api.Photo]?
if let _ = reader.readInt32() {
_2 = Api.parseVector(reader, elementSignature: 0, elementType: Api.Photo.self)
}
var _3: [Api.User]?
if let _ = reader.readInt32() {
_3 = Api.parseVector(reader, elementSignature: 0, elementType: Api.User.self)
}
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
if _c1 && _c2 && _c3 {
return Api.photos.Photos.photosSlice(count: _1!, photos: _2!, users: _3!)
}
else {
return nil
}
}
}
}

View File

@ -1,3 +1,95 @@
public extension Api.photos {
enum Photos: TypeConstructorDescription {
case photos(photos: [Api.Photo], users: [Api.User])
case photosSlice(count: Int32, photos: [Api.Photo], users: [Api.User])
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .photos(let photos, let users):
if boxed {
buffer.appendInt32(-1916114267)
}
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(photos.count))
for item in photos {
item.serialize(buffer, true)
}
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(users.count))
for item in users {
item.serialize(buffer, true)
}
break
case .photosSlice(let count, let photos, let users):
if boxed {
buffer.appendInt32(352657236)
}
serializeInt32(count, buffer: buffer, boxed: false)
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(photos.count))
for item in photos {
item.serialize(buffer, true)
}
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(users.count))
for item in users {
item.serialize(buffer, true)
}
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .photos(let photos, let users):
return ("photos", [("photos", String(describing: photos)), ("users", String(describing: users))])
case .photosSlice(let count, let photos, let users):
return ("photosSlice", [("count", String(describing: count)), ("photos", String(describing: photos)), ("users", String(describing: users))])
}
}
public static func parse_photos(_ reader: BufferReader) -> Photos? {
var _1: [Api.Photo]?
if let _ = reader.readInt32() {
_1 = Api.parseVector(reader, elementSignature: 0, elementType: Api.Photo.self)
}
var _2: [Api.User]?
if let _ = reader.readInt32() {
_2 = Api.parseVector(reader, elementSignature: 0, elementType: Api.User.self)
}
let _c1 = _1 != nil
let _c2 = _2 != nil
if _c1 && _c2 {
return Api.photos.Photos.photos(photos: _1!, users: _2!)
}
else {
return nil
}
}
public static func parse_photosSlice(_ reader: BufferReader) -> Photos? {
var _1: Int32?
_1 = reader.readInt32()
var _2: [Api.Photo]?
if let _ = reader.readInt32() {
_2 = Api.parseVector(reader, elementSignature: 0, elementType: Api.Photo.self)
}
var _3: [Api.User]?
if let _ = reader.readInt32() {
_3 = Api.parseVector(reader, elementSignature: 0, elementType: Api.User.self)
}
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
if _c1 && _c2 && _c3 {
return Api.photos.Photos.photosSlice(count: _1!, photos: _2!, users: _3!)
}
else {
return nil
}
}
}
}
public extension Api.stats {
enum BroadcastStats: TypeConstructorDescription {
case broadcastStats(period: Api.StatsDateRangeDays, followers: Api.StatsAbsValueAndPrev, viewsPerPost: Api.StatsAbsValueAndPrev, sharesPerPost: Api.StatsAbsValueAndPrev, enabledNotifications: Api.StatsPercentValue, growthGraph: Api.StatsGraph, followersGraph: Api.StatsGraph, muteGraph: Api.StatsGraph, topHoursGraph: Api.StatsGraph, interactionsGraph: Api.StatsGraph, ivInteractionsGraph: Api.StatsGraph, viewsBySourceGraph: Api.StatsGraph, newFollowersBySourceGraph: Api.StatsGraph, languagesGraph: Api.StatsGraph, recentMessageInteractions: [Api.MessageInteractionCounters])

View File

@ -6024,6 +6024,22 @@ public extension Api.functions.messages {
})
}
}
public extension Api.functions.messages {
static func transcribeAudio(peer: Api.InputPeer, msgId: Int32) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.messages.TranscribedAudio>) {
let buffer = Buffer()
buffer.appendInt32(647928393)
peer.serialize(buffer, true)
serializeInt32(msgId, buffer: buffer, boxed: false)
return (FunctionDescription(name: "messages.transcribeAudio", parameters: [("peer", String(describing: peer)), ("msgId", String(describing: msgId))]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.messages.TranscribedAudio? in
let reader = BufferReader(buffer)
var result: Api.messages.TranscribedAudio?
if let signature = reader.readInt32() {
result = Api.parse(reader, signature: signature) as? Api.messages.TranscribedAudio
}
return result
})
}
}
public extension Api.functions.messages {
static func translateText(flags: Int32, peer: Api.InputPeer?, msgId: Int32?, text: String?, fromLang: String?, toLang: String) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.messages.TranslatedText>) {
let buffer = Buffer()
@ -7222,11 +7238,11 @@ public extension Api.functions.upload {
}
}
public extension Api.functions.upload {
static func getFileHashes(location: Api.InputFileLocation, offset: Int32) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<[Api.FileHash]>) {
static func getFileHashes(location: Api.InputFileLocation, offset: Int64) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<[Api.FileHash]>) {
let buffer = Buffer()
buffer.appendInt32(-956147407)
buffer.appendInt32(-1856595926)
location.serialize(buffer, true)
serializeInt32(offset, buffer: buffer, boxed: false)
serializeInt64(offset, buffer: buffer, boxed: false)
return (FunctionDescription(name: "upload.getFileHashes", parameters: [("location", String(describing: location)), ("offset", String(describing: offset))]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> [Api.FileHash]? in
let reader = BufferReader(buffer)
var result: [Api.FileHash]?

View File

@ -808,8 +808,7 @@ public final class MediaStreamComponent: CombinedComponent {
"Point 3.Group 1.Fill 1": whiteColor,
"Point 1.Group 1.Fill 1": whiteColor
],
loop: false,
isAnimating: false
mode: .still
),
size: CGSize(width: 22.0, height: 22.0)
).tagged(moreAnimationTag))),

View File

@ -40,8 +40,8 @@ func telegramMediaActionFromApiAction(_ action: Api.MessageAction) -> TelegramMe
return TelegramMediaAction(action: .phoneCall(callId: callId, discardReason: discardReason, duration: duration, isVideo: isVideo))
case .messageActionEmpty:
return nil
case let .messageActionPaymentSent(currency, totalAmount):
return TelegramMediaAction(action: .paymentSent(currency: currency, totalAmount: totalAmount))
case let .messageActionPaymentSent(_, currency, totalAmount, invoiceSlug):
return TelegramMediaAction(action: .paymentSent(currency: currency, totalAmount: totalAmount, invoiceSlug: invoiceSlug))
case .messageActionPaymentSentMe:
return nil
case .messageActionScreenshotTaken:

View File

@ -39,7 +39,7 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
case messageAutoremoveTimeoutUpdated(Int32)
case gameScore(gameId: Int64, score: Int32)
case phoneCall(callId: Int64, discardReason: PhoneCallDiscardReason?, duration: Int32?, isVideo: Bool)
case paymentSent(currency: String, totalAmount: Int64)
case paymentSent(currency: String, totalAmount: Int64, invoiceSlug: String?)
case customText(text: String, entities: [MessageTextEntity])
case botDomainAccessGranted(domain: String)
case botSentSecureValues(types: [SentSecureValueType])
@ -88,7 +88,7 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
}
self = .phoneCall(callId: decoder.decodeInt64ForKey("i", orElse: 0), discardReason: discardReason, duration: decoder.decodeInt32ForKey("d", orElse: 0), isVideo: decoder.decodeInt32ForKey("vc", orElse: 0) != 0)
case 15:
self = .paymentSent(currency: decoder.decodeStringForKey("currency", orElse: ""), totalAmount: decoder.decodeInt64ForKey("ta", orElse: 0))
self = .paymentSent(currency: decoder.decodeStringForKey("currency", orElse: ""), totalAmount: decoder.decodeInt64ForKey("ta", orElse: 0), invoiceSlug: decoder.decodeOptionalStringForKey("invoiceSlug"))
case 16:
self = .customText(text: decoder.decodeStringForKey("text", orElse: ""), entities: decoder.decodeObjectArrayWithDecoderForKey("ent"))
case 17:
@ -172,10 +172,15 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
encoder.encodeInt32(13, forKey: "_rawValue")
encoder.encodeInt64(gameId, forKey: "i")
encoder.encodeInt32(score, forKey: "s")
case let .paymentSent(currency, totalAmount):
case let .paymentSent(currency, totalAmount, invoiceSlug):
encoder.encodeInt32(15, forKey: "_rawValue")
encoder.encodeString(currency, forKey: "currency")
encoder.encodeInt64(totalAmount, forKey: "ta")
if let invoiceSlug = invoiceSlug {
encoder.encodeString(invoiceSlug, forKey: "invoiceSlug")
} else {
encoder.encodeNil(forKey: "invoiceSlug")
}
case let .phoneCall(callId, discardReason, duration, isVideo):
encoder.encodeInt32(14, forKey: "_rawValue")
encoder.encodeInt64(callId, forKey: "i")

View File

@ -97,7 +97,11 @@ private class AdMessagesHistoryContextImpl {
self.opaqueId = try container.decode(Data.self, forKey: .opaqueId)
self.messageType = (try container.decodeIfPresent(MessageType.self, forKey: .messageType)) ?? .sponsored
if let messageType = try container.decodeIfPresent(Int32.self, forKey: .messageType) {
self.messageType = MessageType(rawValue: messageType) ?? .sponsored
} else {
self.messageType = .sponsored
}
self.text = try container.decode(String.self, forKey: .text)
self.textEntities = try container.decode([MessageTextEntity].self, forKey: .textEntities)
@ -116,7 +120,7 @@ private class AdMessagesHistoryContextImpl {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(self.opaqueId, forKey: .opaqueId)
try container.encode(self.messageType, forKey: .messageType)
try container.encode(self.messageType.rawValue, forKey: .messageType)
try container.encode(self.text, forKey: .text)
try container.encode(self.textEntities, forKey: .textEntities)

View File

@ -322,6 +322,10 @@ public extension TelegramEngine {
return _internal_translate(network: self.account.network, text: text, fromLang: fromLang, toLang: toLang)
}
public func transcribeAudio(messageId: MessageId) -> Signal<String?, NoError> {
return _internal_transcribeAudio(postbox: self.account.postbox, network: self.account.network, messageId: messageId)
}
public func requestWebView(peerId: PeerId, botId: PeerId, url: String?, payload: String?, themeParams: [String: Any]?, fromMenu: Bool, replyToMessageId: MessageId?) -> Signal<RequestWebViewResult, RequestWebViewError> {
return _internal_requestWebView(postbox: self.account.postbox, network: self.account.network, stateManager: self.account.stateManager, peerId: peerId, botId: botId, url: url, payload: payload, themeParams: themeParams, fromMenu: fromMenu, replyToMessageId: replyToMessageId)
}

View File

@ -28,3 +28,28 @@ func _internal_translate(network: Network, text: String, fromLang: String?, toLa
}
}
}
func _internal_transcribeAudio(postbox: Postbox, network: Network, messageId: MessageId) -> Signal<String?, NoError> {
return postbox.transaction { transaction -> Api.InputPeer? in
return transaction.getPeer(messageId.peerId).flatMap(apiInputPeer)
}
|> mapToSignal { inputPeer -> Signal<String?, NoError> in
guard let inputPeer = inputPeer else {
return .single(nil)
}
return network.request(Api.functions.messages.transcribeAudio(peer: inputPeer, msgId: messageId.id))
|> map(Optional.init)
|> `catch` { _ -> Signal<Api.messages.TranscribedAudio?, NoError> in
return .single(nil)
}
|> mapToSignal { result -> Signal<String?, NoError> in
guard let result = result else {
return .single(nil)
}
switch result {
case let .transcribedAudio(string):
return .single(string)
}
}
}
}

View File

@ -470,17 +470,23 @@ func _internal_sendBotPaymentForm(account: Account, formId: Int64, source: BotPa
for media in message.media {
if let action = media as? TelegramMediaAction {
if case .paymentSent = action.action {
switch source {
case let .slug(slug):
for media in message.media {
if let action = media as? TelegramMediaAction, case let .paymentSent(_, _, invoiceSlug?) = action.action, invoiceSlug == slug {
if case let .Id(id) = message.id {
receiptMessageId = id
}
}
}
case let .message(messageId):
for attribute in message.attributes {
if let reply = attribute as? ReplyMessageAttribute {
switch source {
case let .message(messageId):
if reply.messageId == messageId {
if case let .Id(id) = message.id {
receiptMessageId = id
}
}
case .slug:
break
}
}
}

View File

@ -437,7 +437,7 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
var argumentAttributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id)])
argumentAttributes[1] = MarkdownAttributeSet(font: titleBoldFont, textColor: primaryTextColor, additionalAttributes: [:])
attributedString = addAttributesToStringWithRanges(formatWithArgumentRanges(baseString, ranges, [authorName, gameTitle ?? ""]), body: bodyAttributes, argumentAttributes: argumentAttributes)
case let .paymentSent(currency, totalAmount):
case let .paymentSent(currency, totalAmount, _):
var invoiceMessage: EngineMessage?
for attribute in message.attributes {
if let attribute = attribute as? ReplyMessageAttribute, let message = message.associatedMessages[attribute.messageId] {

View File

@ -270,6 +270,10 @@ swift_library(
"//submodules/PremiumUI:PremiumUI",
"//submodules/Components/HierarchyTrackingLayer:HierarchyTrackingLayer",
"//submodules/Utils/RangeSet:RangeSet",
"//submodules/TelegramUI/Components/AudioTranscriptionButtonComponent:AudioTranscriptionButtonComponent",
"//submodules/TelegramUI/Components/AudioWaveformComponent:AudioWaveformComponent",
"//submodules/Media/ConvertOpusToAAC:ConvertOpusToAAC",
"//submodules/Media/LocalAudioTranscription:LocalAudioTranscription",
] + select({
"@build_bazel_rules_apple//apple:ios_armv7": [],
"@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets,

View File

@ -0,0 +1,22 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "AudioTranscriptionButtonComponent",
module_name = "AudioTranscriptionButtonComponent",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/ComponentFlow:ComponentFlow",
"//submodules/AppBundle:AppBundle",
"//submodules/Display:Display",
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/Components/LottieAnimationComponent:LottieAnimationComponent",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,188 @@
import Foundation
import UIKit
import ComponentFlow
import AppBundle
import Display
import TelegramPresentationData
import LottieAnimationComponent
public final class AudioTranscriptionButtonComponent: Component {
public enum TranscriptionState {
case possible
case inProgress
case expanded
case collapsed
}
public let theme: PresentationThemePartedColors
public let transcriptionState: TranscriptionState
public let pressed: () -> Void
public init(
theme: PresentationThemePartedColors,
transcriptionState: TranscriptionState,
pressed: @escaping () -> Void
) {
self.theme = theme
self.transcriptionState = transcriptionState
self.pressed = pressed
}
public static func ==(lhs: AudioTranscriptionButtonComponent, rhs: AudioTranscriptionButtonComponent) -> Bool {
if lhs.theme !== rhs.theme {
return false
}
if lhs.transcriptionState != rhs.transcriptionState {
return false
}
return true
}
public final class View: UIButton {
private var component: AudioTranscriptionButtonComponent?
private let backgroundLayer: SimpleLayer
private var inProgressLayer: SimpleShapeLayer?
private let animationView: ComponentHostView<Empty>
override init(frame: CGRect) {
self.backgroundLayer = SimpleLayer()
self.animationView = ComponentHostView<Empty>()
self.animationView.isUserInteractionEnabled = false
super.init(frame: frame)
self.backgroundLayer.masksToBounds = true
self.backgroundLayer.cornerRadius = 10.0
self.layer.addSublayer(self.backgroundLayer)
self.addSubview(self.animationView)
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
@objc private func pressed() {
self.component?.pressed()
}
func update(component: AudioTranscriptionButtonComponent, availableSize: CGSize, transition: Transition) -> CGSize {
let size = CGSize(width: 30.0, height: 30.0)
let foregroundColor = component.theme.bubble.withWallpaper.reactionActiveBackground
if self.component?.transcriptionState != component.transcriptionState {
switch component.transcriptionState {
case .inProgress:
if self.inProgressLayer == nil {
let inProgressLayer = SimpleShapeLayer()
inProgressLayer.isOpaque = false
inProgressLayer.backgroundColor = nil
inProgressLayer.fillColor = nil
inProgressLayer.lineCap = .round
inProgressLayer.lineWidth = 1.0
let path = UIBezierPath(roundedRect: CGRect(origin: CGPoint(), size: CGSize(width: 30.0, height: 30.0)), cornerRadius: 9.0).cgPath
inProgressLayer.path = path
self.inProgressLayer = inProgressLayer
inProgressLayer.didEnterHierarchy = { [weak inProgressLayer] in
guard let inProgressLayer = inProgressLayer else {
return
}
let endAnimation = CABasicAnimation(keyPath: "strokeEnd")
endAnimation.fromValue = CGFloat(0.0) as NSNumber
endAnimation.toValue = CGFloat(1.0) as NSNumber
endAnimation.duration = 1.25
endAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
endAnimation.fillMode = .forwards
endAnimation.repeatCount = .infinity
inProgressLayer.add(endAnimation, forKey: "strokeEnd")
let startAnimation = CABasicAnimation(keyPath: "strokeStart")
startAnimation.fromValue = CGFloat(0.0) as NSNumber
startAnimation.toValue = CGFloat(1.0) as NSNumber
startAnimation.duration = 1.25
startAnimation.timingFunction = CAMediaTimingFunction(name: .easeIn)
startAnimation.fillMode = .forwards
startAnimation.repeatCount = .infinity
inProgressLayer.add(startAnimation, forKey: "strokeStart")
}
self.layer.addSublayer(inProgressLayer)
}
default:
if let inProgressLayer = self.inProgressLayer {
self.inProgressLayer = nil
if case .none = transition.animation {
inProgressLayer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false, completion: { [weak inProgressLayer] _ in
inProgressLayer?.removeFromSuperlayer()
})
} else {
inProgressLayer.removeFromSuperlayer()
}
}
}
let animationName: String
switch component.transcriptionState {
case .possible:
animationName = "voiceToText"
case .inProgress:
animationName = "voiceToText"
case .collapsed:
animationName = "voiceToText"
case .expanded:
animationName = "textToVoice"
}
let animationSize = self.animationView.update(
transition: transition,
component: AnyComponent(LottieAnimationComponent(
animation: LottieAnimationComponent.Animation(
name: animationName,
colors: [
"icon.Group 3.Stroke 1": foregroundColor,
"icon.Group 1.Stroke 1": foregroundColor,
"icon.Group 4.Stroke 1": foregroundColor,
"icon.Group 2.Stroke 1": foregroundColor,
"Artboard Copy 2 Outlines.Group 5.Stroke 1": foregroundColor,
"Artboard Copy 2 Outlines.Group 1.Stroke 1": foregroundColor,
"Artboard Copy 2 Outlines.Group 4.Stroke 1": foregroundColor,
"Artboard Copy Outlines.Group 1.Stroke 1": foregroundColor,
],
mode: .animateTransitionFromPrevious
),
size: CGSize(width: 30.0, height: 30.0)
)),
environment: {},
containerSize: CGSize(width: 30.0, height: 30.0)
)
self.animationView.frame = CGRect(origin: CGPoint(x: floor((size.width - animationSize.width) / 2.0), y: floor((size.width - animationSize.height) / 2.0)), size: animationSize)
}
self.backgroundLayer.backgroundColor = component.theme.bubble.withWallpaper.reactionInactiveBackground.cgColor
self.inProgressLayer?.strokeColor = foregroundColor.cgColor
self.component = component
self.backgroundLayer.frame = CGRect(origin: CGPoint(), size: size)
if let inProgressLayer = self.inProgressLayer {
inProgressLayer.frame = CGRect(origin: CGPoint(), size: size)
}
return CGSize(width: min(availableSize.width, size.width), height: min(availableSize.height, size.height))
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}

View File

@ -0,0 +1,20 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "AudioWaveformComponent",
module_name = "AudioWaveformComponent",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/ComponentFlow:ComponentFlow",
"//submodules/AppBundle:AppBundle",
"//submodules/Display:Display",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,63 @@
import Foundation
import UIKit
import ComponentFlow
import Display
public final class AudioWaveformComponent: Component {
public let backgroundColor: UIColor
public let foregroundColor: UIColor
public let samples: Data
public let peak: Int32
public init(
backgroundColor: UIColor,
foregroundColor: UIColor,
samples: Data,
peak: Int32
) {
self.backgroundColor = backgroundColor
self.foregroundColor = foregroundColor
self.samples = samples
self.peak = peak
}
public static func ==(lhs: AudioWaveformComponent, rhs: AudioWaveformComponent) -> Bool {
if lhs.backgroundColor !== rhs.backgroundColor {
return false
}
if lhs.foregroundColor != rhs.foregroundColor {
return false
}
if lhs.samples != rhs.samples {
return false
}
if lhs.peak != rhs.peak {
return false
}
return true
}
public final class View: UIView {
private var component: AudioWaveformComponent?
override init(frame: CGRect) {
super.init(frame: frame)
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func update(component: AudioWaveformComponent, availableSize: CGSize, transition: Transition) -> CGSize {
return CGSize(width: availableSize.width, height: availableSize.height)
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
{"v":"5.8.1","fr":60,"ip":0,"op":20,"w":300,"h":300,"nm":"Comp 7","ddd":0,"assets":[],"layers":[{"ddd":0,"ind":1,"ty":4,"nm":"icon","sr":1,"ks":{"o":{"a":0,"k":100,"ix":11},"r":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[0]},{"t":19,"s":[-90]}],"ix":10},"p":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":0,"s":[150,150,0],"to":[0,0,0],"ti":[0,0,0]},{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":12,"s":[150,178,0],"to":[0,0,0],"ti":[0,0,0]},{"t":19,"s":[150,150,0]}],"ix":2,"l":2},"a":{"a":0,"k":[150,150,0],"ix":1,"l":2},"s":{"a":0,"k":[100,100,100],"ix":6,"l":2}},"ao":0,"shapes":[{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[176.7,163.3],[220,163.3]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[0]},{"t":12,"s":[100]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0.592156862745,0.592156862745,0.592156862745,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13.3,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 1","np":3,"cix":2,"bm":0,"ix":1,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":0,"k":{"i":[[0,0],[0,0],[-2.606,-0.912],[-0.503,-1.436],[0,0]],"o":[[0,0],[0.912,-2.606],[1.436,0.503],[0,0],[0,0]],"v":[[-35,45.248],[-4.719,-41.268],[1.652,-44.336],[4.719,-41.268],[35,45.248]],"c":false},"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[0]},{"t":14,"s":[100]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0.592156862745,0.592156862745,0.592156862745,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13.3,"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[198.3,144.752],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 2","np":3,"cix":2,"bm":0,"ix":2,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":0,"s":[{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[-17.5,35],[17.5,0],[-17.5,-35]],"c":false}]},{"t":19,"s":[{"i":[[0,0],[0,0],[0,0]],"o":[[0,0],[0,0],[0,0]],"v":[[4.2,60],[64.2,0],[4.2,-60]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"st","c":{"a":0,"k":[0.592156862745,0.592156862745,0.592156862745,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[13.3]},{"t":19,"s":[16.7]}],"ix":5},"lc":2,"lj":2,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[115.8,150],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 3","np":2,"cix":2,"bm":0,"ix":3,"mn":"ADBE Vector Group","hd":false},{"ty":"gr","it":[{"ind":0,"ty":"sh","ix":1,"ks":{"a":1,"k":[{"i":{"x":0.4,"y":1},"o":{"x":0.6,"y":0},"t":0,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[63.3,150],[130,150]],"c":false}]},{"t":19,"s":[{"i":[[0,0],[0,0]],"o":[[0,0],[0,0]],"v":[[93.3,150],[160,150]],"c":false}]}],"ix":2},"nm":"Path 1","mn":"ADBE Vector Shape - Group","hd":false},{"ty":"tm","s":{"a":1,"k":[{"i":{"x":[0.4],"y":[1]},"o":{"x":[0.6],"y":[0]},"t":0,"s":[0]},{"t":12,"s":[100]}],"ix":1},"e":{"a":0,"k":100,"ix":2},"o":{"a":0,"k":0,"ix":3},"m":1,"ix":2,"nm":"Trim Paths 1","mn":"ADBE Vector Filter - Trim","hd":false},{"ty":"st","c":{"a":0,"k":[0.592156862745,0.592156862745,0.592156862745,1],"ix":3},"o":{"a":0,"k":100,"ix":4},"w":{"a":0,"k":13.3,"ix":5},"lc":2,"lj":1,"ml":10,"bm":0,"nm":"Stroke 1","mn":"ADBE Vector Graphic - Stroke","hd":false},{"ty":"tr","p":{"a":0,"k":[0,0],"ix":2},"a":{"a":0,"k":[0,0],"ix":1},"s":{"a":0,"k":[100,100],"ix":3},"r":{"a":0,"k":0,"ix":6},"o":{"a":0,"k":100,"ix":7},"sk":{"a":0,"k":0,"ix":4},"sa":{"a":0,"k":0,"ix":5},"nm":"Transform"}],"nm":"Group 4","np":3,"cix":2,"bm":0,"ix":4,"mn":"ADBE Vector Group","hd":false}],"ip":0,"op":20,"st":0,"bm":0}],"markers":[]}

View File

@ -583,6 +583,7 @@ final class ChatMessageAttachedContentNode: ASDisplayNode {
dateAndStatusType: statusType,
displayReactions: false,
messageSelection: nil,
layoutConstants: layoutConstants,
constrainedSize: CGSize(width: constrainedSize.width - horizontalInsets.left - horizontalInsets.right, height: constrainedSize.height)
))
refineContentFileLayout = refineLayout

View File

@ -135,6 +135,7 @@ class ChatMessageFileBubbleContentNode: ChatMessageBubbleContentNode {
dateAndStatusType: statusType,
displayReactions: true,
messageSelection: item.message.groupingKey != nil ? selection : nil,
layoutConstants: layoutConstants,
constrainedSize: CGSize(width: constrainedSize.width - layoutConstants.file.bubbleInsets.left - layoutConstants.file.bubbleInsets.right, height: constrainedSize.height)
))

View File

@ -18,6 +18,12 @@ import MusicAlbumArtResources
import AudioBlob
import ContextUI
import ChatPresentationInterfaceState
import ComponentFlow
import AudioTranscriptionButtonComponent
import AudioWaveformComponent
import ShimmerEffect
import ConvertOpusToAAC
import LocalAudioTranscription
private struct FetchControls {
let fetch: (Bool) -> Void
@ -43,6 +49,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let dateAndStatusType: ChatMessageDateAndStatusType?
let displayReactions: Bool
let messageSelection: Bool?
let layoutConstants: ChatMessageItemLayoutConstants
let constrainedSize: CGSize
init(
@ -63,6 +70,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
dateAndStatusType: ChatMessageDateAndStatusType?,
displayReactions: Bool,
messageSelection: Bool?,
layoutConstants: ChatMessageItemLayoutConstants,
constrainedSize: CGSize
) {
self.context = context
@ -82,6 +90,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.dateAndStatusType = dateAndStatusType
self.displayReactions = displayReactions
self.messageSelection = messageSelection
self.layoutConstants = layoutConstants
self.constrainedSize = constrainedSize
}
}
@ -95,7 +104,11 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
private let fetchingCompactTextNode: ImmediateTextNode
private let waveformNode: AudioWaveformNode
private let waveformForegroundNode: AudioWaveformNode
private var waveformShimmerNode: ShimmerEffectNode?
private var waveformMaskNode: AudioWaveformNode?
private var waveformScrubbingNode: MediaPlayerScrubbingNode?
private var audioTranscriptionButton: ComponentHostView<Empty>?
private let textNode: TextNode
let dateAndStatusNode: ChatMessageDateAndStatusNode
private let consumableContentNode: ASImageNode
@ -157,6 +170,10 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
private var streamingCacheStatusFrame: CGRect?
private var fileIconImage: UIImage?
private var audioTranscriptionState: AudioTranscriptionButtonComponent.TranscriptionState = .possible
private var transcribedText: String?
private var transcribeDisposable: Disposable?
override init() {
self.titleNode = TextNode()
self.titleNode.displaysAsynchronously = false
@ -189,6 +206,10 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.waveformForegroundNode = AudioWaveformNode()
self.waveformForegroundNode.isLayerBacked = true
self.textNode = TextNode()
self.textNode.displaysAsynchronously = false
self.textNode.isUserInteractionEnabled = false
self.dateAndStatusNode = ChatMessageDateAndStatusNode()
self.consumableContentNode = ASImageNode()
@ -209,6 +230,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.playbackStatusDisposable.dispose()
self.fetchDisposable.dispose()
self.audioLevelEventsDisposable.dispose()
self.transcribeDisposable?.dispose()
}
override func didLoad() {
@ -275,15 +297,98 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
}
private func transcribe() {
guard let context = self.context, let message = self.message else {
return
}
if self.transcribedText == nil {
if self.transcribeDisposable == nil {
self.audioTranscriptionState = .inProgress
self.requestUpdateLayout(true)
if !"".isEmpty {
let signal: Signal<String?, NoError> = context.account.postbox.transaction { transaction -> Message? in
return transaction.getMessage(message.id)
}
|> mapToSignal { message -> Signal<String?, NoError> in
guard let message = message else {
return .single(nil)
}
guard let file = message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile else {
return .single(nil)
}
return context.account.postbox.mediaBox.resourceData(id: file.resource.id)
|> take(1)
|> mapToSignal { data -> Signal<String?, NoError> in
if !data.complete {
return .single(nil)
}
return .single(data.path)
}
}
|> mapToSignal { result -> Signal<String?, NoError> in
guard let result = result else {
return .single(nil)
}
return convertOpusToAAC(sourcePath: result, allocateTempFile: {
return TempBox.shared.tempFile(fileName: "audio.m4a").path
})
}
|> mapToSignal { result -> Signal<String?, NoError> in
guard let result = result else {
return .single(nil)
}
return transcribeAudio(path: result)
}
let _ = signal.start(next: { [weak self] result in
guard let strongSelf = self else {
return
}
strongSelf.transcribeDisposable = nil
strongSelf.audioTranscriptionState = .expanded
strongSelf.transcribedText = result
strongSelf.requestUpdateLayout(true)
})
} else {
self.transcribeDisposable = (context.engine.messages.transcribeAudio(messageId: message.id)
|> deliverOnMainQueue).start(next: { [weak self] result in
guard let strongSelf = self else {
return
}
strongSelf.transcribeDisposable = nil
strongSelf.audioTranscriptionState = .expanded
strongSelf.transcribedText = result
strongSelf.requestUpdateLayout(true)
})
}
}
} else {
switch self.audioTranscriptionState {
case .expanded:
self.audioTranscriptionState = .collapsed
self.requestUpdateLayout(true)
case .collapsed:
self.audioTranscriptionState = .expanded
self.requestUpdateLayout(true)
default:
break
}
}
}
func asyncLayout() -> (Arguments) -> (CGFloat, (CGSize) -> (CGFloat, (CGFloat) -> (CGSize, (Bool, ListViewItemUpdateAnimation) -> Void))) {
let currentFile = self.file
let titleAsyncLayout = TextNode.asyncLayout(self.titleNode)
let descriptionAsyncLayout = TextNode.asyncLayout(self.descriptionNode)
let descriptionMeasuringAsyncLayout = TextNode.asyncLayout(self.descriptionMeasuringNode)
let textAsyncLayout = TextNode.asyncLayout(self.textNode)
let statusLayout = self.dateAndStatusNode.asyncLayout()
let currentMessage = self.message
let transcribedText = self.transcribedText
let audioTranscriptionState = self.audioTranscriptionState
return { arguments in
return (CGFloat.greatestFiniteMagnitude, { constrainedSize in
@ -453,6 +558,17 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let (descriptionMeasuringLayout, descriptionMeasuringApply) = descriptionMeasuringAsyncLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: "\(fileSizeString) / \(fileSizeString)", font: descriptionFont, textColor: .black), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .middle, constrainedSize: textConstrainedSize, alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let descriptionMaxWidth = max(descriptionLayout.size.width, descriptionMeasuringLayout.size.width)
let textFont = arguments.presentationData.messageFont
let textString: NSAttributedString?
if let transcribedText = transcribedText, case .expanded = audioTranscriptionState {
textString = NSAttributedString(string: transcribedText, font: textFont, textColor: messageTheme.primaryTextColor)
} else {
textString = nil
}
let horizontalInset: CGFloat = (arguments.layoutConstants.bubble.edgeInset + arguments.layoutConstants.bubble.borderInset) * 2.0
let inlineTextConstrainedSize = CGSize(width: constrainedSize.width, height: constrainedSize.height)
let (textLayout, textApply) = textAsyncLayout(TextNodeLayoutArguments(attributedString: textString, backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: inlineTextConstrainedSize.width - horizontalInset, height: .greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let minVoiceWidth: CGFloat = 120.0
let maxVoiceWidth = constrainedSize.width
@ -517,6 +633,13 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
reactionSettings = ChatMessageDateAndStatusNode.TrailingReactionSettings(displayInline: displayReactionsInline, preferAdditionalInset: !displayReactionsInline)
}
let statusLayoutInput: ChatMessageDateAndStatusNode.LayoutInput
if let _ = textString {
statusLayoutInput = .trailingContent(contentWidth: textLayout.trailingLineWidth, reactionSettings: reactionSettings)
} else {
statusLayoutInput = .trailingContent(contentWidth: iconFrame == nil ? 1000.0 : controlAreaWidth, reactionSettings: reactionSettings)
}
statusSuggestedWidthAndContinue = statusLayout(ChatMessageDateAndStatusNode.Arguments(
context: arguments.context,
presentationData: arguments.presentationData,
@ -524,7 +647,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
impressionCount: viewCount,
dateText: dateText,
type: statusType,
layoutInput: .trailingContent(contentWidth: iconFrame == nil ? 1000.0 : controlAreaWidth, reactionSettings: reactionSettings),
layoutInput: statusLayoutInput,
constrainedSize: constrainedSize,
availableReactions: arguments.associatedData.availableReactions,
reactions: dateReactionsAndPeers.reactions,
@ -543,7 +666,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let descriptionAndStatusWidth = descriptionLayout.size.width
let calcDuration = max(minVoiceLength, min(maxVoiceLength, CGFloat(audioDuration)))
minLayoutWidth = minVoiceWidth + (maxVoiceWidth - minVoiceWidth) * (calcDuration - minVoiceLength) / (maxVoiceLength - minVoiceLength)
minLayoutWidth = 30.0 + 8.0 + minVoiceWidth + (maxVoiceWidth - minVoiceWidth) * (calcDuration - minVoiceLength) / (maxVoiceLength - minVoiceLength)
minLayoutWidth = max(descriptionAndStatusWidth + 56, minLayoutWidth)
} else {
minLayoutWidth = max(titleLayout.size.width, descriptionMaxWidth) + 44.0 + 8.0
@ -553,6 +676,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
minLayoutWidth = max(minLayoutWidth, statusSuggestedWidthAndContinue.0)
}
minLayoutWidth = max(minLayoutWidth, textLayout.size.width + horizontalInset)
let fileIconImage: UIImage?
if hasThumbnail {
fileIconImage = nil
@ -591,6 +716,11 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
fittedLayoutSize = CGSize(width: unionSize.width, height: unionSize.height)
}
if textString != nil {
fittedLayoutSize.width = max(fittedLayoutSize.width + horizontalInset, textLayout.size.width)
fittedLayoutSize.height += textLayout.size.height + 5.0
}
var statusSizeAndApply: (CGSize, (ListViewItemUpdateAnimation) -> Void)?
if let statusSuggestedWidthAndContinue = statusSuggestedWidthAndContinue {
statusSizeAndApply = statusSuggestedWidthAndContinue.1(boundingWidth)
@ -645,8 +775,41 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
} else {
statusReferenceFrame = progressFrame.offsetBy(dx: 0.0, dy: 8.0)
}
if textString == nil, strongSelf.textNode.supernode != nil, animation.isAnimated {
if let snapshotView = strongSelf.textNode.view.snapshotContentTree() {
snapshotView.frame = strongSelf.textNode.frame
strongSelf.view.insertSubview(snapshotView, aboveSubview: strongSelf.textNode.view)
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in
snapshotView?.removeFromSuperview()
})
}
}
let _ = textApply()
let textFrame = CGRect(origin: CGPoint(x: arguments.layoutConstants.text.bubbleInsets.left - arguments.layoutConstants.file.bubbleInsets.left, y: statusReferenceFrame.maxY + 1.0), size: textLayout.size)
strongSelf.textNode.frame = textFrame
if textString != nil {
if strongSelf.textNode.supernode == nil {
strongSelf.addSubnode(strongSelf.textNode)
if animation.isAnimated {
strongSelf.textNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
} else {
if strongSelf.textNode.supernode != nil {
strongSelf.textNode.removeFromSupernode()
}
}
if let statusSizeAndApply = statusSizeAndApply {
let statusFrame = CGRect(origin: CGPoint(x: statusReferenceFrame.minX, y: statusReferenceFrame.maxY + statusOffset), size: statusSizeAndApply.0)
let statusFrame: CGRect
if textString != nil {
statusFrame = CGRect(origin: CGPoint(x: fittedLayoutSize.width - 5.0 - statusSizeAndApply.0.width, y: textFrame.maxY + 4.0), size: statusSizeAndApply.0)
} else {
statusFrame = CGRect(origin: CGPoint(x: statusReferenceFrame.minX, y: statusReferenceFrame.maxY + statusOffset), size: statusSizeAndApply.0)
}
if strongSelf.dateAndStatusNode.supernode == nil {
strongSelf.dateAndStatusNode.frame = statusFrame
strongSelf.addSubnode(strongSelf.dateAndStatusNode)
@ -671,7 +834,60 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
strongSelf.waveformScrubbingNode = waveformScrubbingNode
strongSelf.addSubnode(waveformScrubbingNode)
}
strongSelf.waveformScrubbingNode?.frame = CGRect(origin: CGPoint(x: 57.0, y: 1.0), size: CGSize(width: boundingWidth - 60.0, height: 15.0))
let scrubbingFrame = CGRect(origin: CGPoint(x: 57.0, y: 1.0), size: CGSize(width: boundingWidth - 60.0 - 30.0 - 8.0, height: 15.0))
if case .inProgress = audioTranscriptionState {
if strongSelf.waveformShimmerNode == nil {
let waveformShimmerNode = ShimmerEffectNode()
strongSelf.waveformShimmerNode = waveformShimmerNode
strongSelf.addSubnode(waveformShimmerNode)
let waveformMaskNode = AudioWaveformNode()
strongSelf.waveformMaskNode = waveformMaskNode
waveformShimmerNode.view.mask = waveformMaskNode.view
}
if let audioWaveform = audioWaveform, let waveformShimmerNode = strongSelf.waveformShimmerNode, let waveformMaskNode = strongSelf.waveformMaskNode {
waveformShimmerNode.frame = scrubbingFrame
waveformShimmerNode.updateAbsoluteRect(scrubbingFrame, within: CGSize(width: scrubbingFrame.size.width + 60.0, height: scrubbingFrame.size.height + 4.0))
var shapes: [ShimmerEffectNode.Shape] = []
shapes.append(.rect(rect: CGRect(origin: CGPoint(), size: scrubbingFrame.size)))
waveformShimmerNode.update(
backgroundColor: .blue,
foregroundColor: messageTheme.mediaInactiveControlColor,
shimmeringColor: messageTheme.mediaActiveControlColor,
shapes: shapes,
horizontal: true,
effectSize: 60.0,
globalTimeOffset: false,
duration: 0.7,
size: scrubbingFrame.size
)
waveformMaskNode.frame = CGRect(origin: CGPoint(), size: scrubbingFrame.size)
waveformMaskNode.setup(color: .black, gravity: .bottom, waveform: audioWaveform)
}
} else {
if let waveformShimmerNode = strongSelf.waveformShimmerNode {
strongSelf.waveformShimmerNode = nil
if animation.isAnimated {
waveformShimmerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak waveformShimmerNode] _ in
waveformShimmerNode?.removeFromSupernode()
})
} else {
waveformShimmerNode.removeFromSupernode()
}
}
strongSelf.waveformMaskNode = nil
}
if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
waveformScrubbingNode.frame = scrubbingFrame
//animation.animator.updateFrame(layer: waveformScrubbingNode.layer, frame: scrubbingFrame, completion: nil)
//waveformScrubbingNode.update(size: scrubbingFrame.size, animator: animation.animator)
}
let waveformColor: UIColor
if arguments.incoming {
if consumableContentIcon != nil {
@ -684,10 +900,41 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
strongSelf.waveformNode.setup(color: waveformColor, gravity: .bottom, waveform: audioWaveform)
strongSelf.waveformForegroundNode.setup(color: messageTheme.mediaActiveControlColor, gravity: .bottom, waveform: audioWaveform)
} else if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
let audioTranscriptionButton: ComponentHostView<Empty>
if let current = strongSelf.audioTranscriptionButton {
audioTranscriptionButton = current
} else {
audioTranscriptionButton = ComponentHostView<Empty>()
strongSelf.audioTranscriptionButton = audioTranscriptionButton
strongSelf.view.addSubview(audioTranscriptionButton)
}
let audioTranscriptionButtonSize = audioTranscriptionButton.update(
transition: animation.isAnimated ? .easeInOut(duration: 0.3) : .immediate,
component: AnyComponent(AudioTranscriptionButtonComponent(
theme: arguments.incoming ? arguments.presentationData.theme.theme.chat.message.incoming : arguments.presentationData.theme.theme.chat.message.outgoing,
transcriptionState: audioTranscriptionState,
pressed: {
guard let strongSelf = self else {
return
}
strongSelf.transcribe()
}
)),
environment: {},
containerSize: CGSize(width: 30.0, height: 30.0)
)
animation.animator.updateFrame(layer: audioTranscriptionButton.layer, frame: CGRect(origin: CGPoint(x: boundingWidth - 30.0 + 3.0, y: -6.0), size: audioTranscriptionButtonSize), completion: nil)
} else {
if let waveformScrubbingNode = strongSelf.waveformScrubbingNode {
strongSelf.waveformScrubbingNode = nil
waveformScrubbingNode.removeFromSupernode()
}
if let audioTranscriptionButton = strongSelf.audioTranscriptionButton {
strongSelf.audioTranscriptionButton = nil
audioTranscriptionButton.removeFromSuperview()
}
}
if let iconFrame = iconFrame {
let iconNode: TransformImageNode
@ -1213,6 +1460,11 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
}
}
if let audioTranscriptionButton = self.audioTranscriptionButton {
if let result = audioTranscriptionButton.hitTest(self.view.convert(point, to: self.audioTranscriptionButton), with: event) {
return result
}
}
return super.hitTest(point, with: event)
}

View File

@ -170,8 +170,12 @@ final class MetalWallpaperBackgroundNode: ASDisplayNode, WallpaperBackgroundNode
}, selector: #selector(DisplayLinkTarget.event))
self.displayLink = displayLink
if #available(iOS 15.0, iOSApplicationExtension 15.0, *) {
if "".isEmpty {
displayLink.preferredFrameRateRange = CAFrameRateRange(minimum: 60.0, maximum: 60.0, preferred: 60.0)
} else {
displayLink.preferredFrameRateRange = CAFrameRateRange(minimum: Float(UIScreen.main.maximumFramesPerSecond), maximum: Float(UIScreen.main.maximumFramesPerSecond), preferred: Float(UIScreen.main.maximumFramesPerSecond))
}
}
displayLink.isPaused = false
if !"".isEmpty {