Update audio transcription UI

This commit is contained in:
Ali 2022-06-06 18:26:17 +04:00
parent 1c0113d616
commit 9ace1de622
5 changed files with 156 additions and 7 deletions

View File

@ -7698,7 +7698,7 @@ Sorry for the inconvenience.";
"Chat.AudioTranscriptionRateAction" = "Rate Transcription";
"Chat.AudioTranscriptionFeedbackTip" = "Thank you for your feedback.";
"Message.AudioTranscription.ErrorEmpty" = "No speech detected";
"Message.AudioTranscription.ErrorTooLong" = "The audio is too long";
"Message.AudioTranscription.ErrorTooLong" = "This voice message is too long to transcribe";
"WebApp.SelectChat" = "Select Chat";

View File

@ -278,6 +278,7 @@ swift_library(
"//submodules/Utils/RangeSet:RangeSet",
"//submodules/InAppPurchaseManager:InAppPurchaseManager",
"//submodules/TelegramUI/Components/AudioTranscriptionButtonComponent:AudioTranscriptionButtonComponent",
"//submodules/TelegramUI/Components/AudioTranscriptionPendingIndicatorComponent:AudioTranscriptionPendingIndicatorComponent",
"//submodules/TelegramUI/Components/AudioWaveformComponent:AudioWaveformComponent",
"//submodules/TelegramUI/Components/EditableChatTextNode:EditableChatTextNode",
"//submodules/TelegramUI/Components/EmojiTextAttachmentView:EmojiTextAttachmentView",

View File

@ -0,0 +1,20 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "AudioTranscriptionPendingIndicatorComponent",
module_name = "AudioTranscriptionPendingIndicatorComponent",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/ComponentFlow:ComponentFlow",
"//submodules/AppBundle:AppBundle",
"//submodules/Display:Display",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,91 @@
import Foundation
import UIKit
import ComponentFlow
import AppBundle
import Display
public final class AudioTranscriptionPendingIndicatorComponent: Component {
public let color: UIColor
public init(color: UIColor) {
self.color = color
}
public static func ==(lhs: AudioTranscriptionPendingIndicatorComponent, rhs: AudioTranscriptionPendingIndicatorComponent) -> Bool {
if lhs.color !== rhs.color {
return false
}
return true
}
public final class View: UIView {
private var component: AudioTranscriptionPendingIndicatorComponent?
private var dotLayers: [SimpleLayer] = []
override init(frame: CGRect) {
super.init(frame: frame)
for _ in 0 ..< 3 {
let dotLayer = SimpleLayer()
self.dotLayers.append(dotLayer)
self.layer.addSublayer(dotLayer)
}
self.dotLayers[0].didEnterHierarchy = { [weak self] in
self?.restartAnimations()
}
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func restartAnimations() {
let beginTime = self.layer.convertTime(CACurrentMediaTime(), from: nil)
for i in 0 ..< self.dotLayers.count {
let delay = Double(i) * 0.07
let animation = CABasicAnimation(keyPath: "opacity")
animation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.linear)
animation.beginTime = beginTime + delay
animation.fromValue = 0.0 as NSNumber
animation.toValue = 1.0 as NSNumber
animation.repeatCount = Float.infinity
animation.autoreverses = true
animation.fillMode = .both
self.dotLayers[i].add(animation, forKey: "idle")
}
}
func update(component: AudioTranscriptionPendingIndicatorComponent, availableSize: CGSize, transition: Transition) -> CGSize {
let dotSize: CGFloat = 2.0
let spacing: CGFloat = 3.0
if self.component?.color != component.color {
if let dotImage = generateFilledCircleImage(diameter: dotSize, color: component.color) {
for dotLayer in self.dotLayers {
dotLayer.contents = dotImage.cgImage
}
}
}
self.component = component
let size = CGSize(width: dotSize * CGFloat(self.dotLayers.count) + spacing * CGFloat(self.dotLayers.count - 1), height: dotSize)
for i in 0 ..< self.dotLayers.count {
self.dotLayers[i].frame = CGRect(origin: CGPoint(x: CGFloat(i) * (dotSize + spacing), y: 0.0), size: CGSize(width: dotSize, height: dotSize))
}
return CGSize(width: min(availableSize.width, size.width), height: min(availableSize.height, size.height))
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}

View File

@ -25,6 +25,7 @@ import ShimmerEffect
import ConvertOpusToAAC
import LocalAudioTranscription
import TextSelectionNode
import AudioTranscriptionPendingIndicatorComponent
private struct FetchControls {
let fetch: (Bool) -> Void
@ -138,6 +139,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
private var waveformScrubbingNode: MediaPlayerScrubbingNode?*/
private var audioTranscriptionButton: ComponentHostView<Empty>?
private var transcriptionPendingIndicator: ComponentHostView<Empty>?
private let textNode: TextNode
private let textClippingNode: ASDisplayNode
private var textSelectionNode: TextSelectionNode?
@ -634,7 +636,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let descriptionMaxWidth = max(descriptionLayout.size.width, descriptionMeasuringLayout.size.width)
let textFont = arguments.presentationData.messageFont
let textString: NSAttributedString?
var textString: NSAttributedString?
var updatedAudioTranscriptionState: AudioTranscriptionButtonComponent.TranscriptionState?
let transcribedText = transcribedText(message: arguments.message)
@ -650,14 +652,24 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let effectiveAudioTranscriptionState = updatedAudioTranscriptionState ?? audioTranscriptionState
var displayTrailingAnimatedDots = false
/*#if DEBUG
if "".isEmpty {
displayTrailingAnimatedDots = true
}
#endif*/
if let transcribedText = transcribedText, case .expanded = effectiveAudioTranscriptionState {
switch transcribedText {
case let .success(text, isPending):
var resultText = text
textString = NSAttributedString(string: text, font: textFont, textColor: messageTheme.primaryTextColor)
if isPending {
resultText += " [...]"
let modifiedString = NSMutableAttributedString(attributedString: textString!)
modifiedString.append(NSAttributedString(string: "...", font: textFont, textColor: .clear))
displayTrailingAnimatedDots = true
textString = modifiedString
}
textString = NSAttributedString(string: resultText, font: textFont, textColor: messageTheme.primaryTextColor)
case let .error(error):
let errorTextFont = Font.regular(floor(arguments.presentationData.fontSize.baseDisplaySize * 15.0 / 17.0))
let errorText: String
@ -870,10 +882,11 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
strongSelf.descriptionNode.frame = descriptionFrame
strongSelf.descriptionMeasuringNode.frame = CGRect(origin: CGPoint(), size: descriptionMeasuringLayout.size)
/*if let updatedAudioTranscriptionState = updatedAudioTranscriptionState {
if let updatedAudioTranscriptionState = updatedAudioTranscriptionState {
strongSelf.audioTranscriptionState = updatedAudioTranscriptionState
}
switch updatedAudioTranscriptionState {
/*switch updatedAudioTranscriptionState {
case .expanded:
info?.setInvertOffsetDirection()
default:
@ -1003,6 +1016,30 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
}
if displayTrailingAnimatedDots {
let transcriptionPendingIndicator: ComponentHostView<Empty>
if let current = strongSelf.transcriptionPendingIndicator {
transcriptionPendingIndicator = current
} else {
transcriptionPendingIndicator = ComponentHostView<Empty>()
strongSelf.transcriptionPendingIndicator = transcriptionPendingIndicator
strongSelf.textClippingNode.view.addSubview(transcriptionPendingIndicator)
}
let indicatorSize = transcriptionPendingIndicator.update(
transition: .immediate,
component: AnyComponent(AudioTranscriptionPendingIndicatorComponent(color: messageTheme.primaryTextColor)),
environment: {},
containerSize: CGSize(width: 100.0, height: 100.0)
)
transcriptionPendingIndicator.frame = CGRect(origin: CGPoint(x: strongSelf.textNode.frame.minX + textLayout.trailingLineWidth + 2.0, y: strongSelf.textNode.frame.maxY - indicatorSize.height - 6.0), size: indicatorSize)
} else {
if let transcriptionPendingIndicator = strongSelf.transcriptionPendingIndicator {
strongSelf.transcriptionPendingIndicator = nil
transcriptionPendingIndicator.removeFromSuperview()
}
}
if let textSelectionNode = strongSelf.textSelectionNode {
let shouldUpdateLayout = textSelectionNode.frame.size != textFrame.size
textSelectionNode.frame = CGRect(origin: CGPoint(), size: textFrame.size)