Audio level feedback

This commit is contained in:
Ali 2020-07-06 01:19:30 +04:00
parent 52aa4f5619
commit 3e25bb7bf5
9 changed files with 177 additions and 14 deletions

View File

@ -27,6 +27,7 @@ public protocol MediaManager: class {
func playlistControl(_ control: SharedMediaPlayerControlAction, type: MediaManagerPlayerType?)
func filteredPlaylistState(accountId: AccountRecordId, playlistId: SharedMediaPlaylistId, itemId: SharedMediaPlaylistItemId, type: MediaManagerPlayerType) -> Signal<SharedMediaPlayerItemPlaybackState?, NoError>
func filteredPlayerAudioLevelEvents(accountId: AccountRecordId, playlistId: SharedMediaPlaylistId, itemId: SharedMediaPlaylistItemId, type: MediaManagerPlayerType) -> Signal<Float, NoError>
func setOverlayVideoNode(_ node: OverlayMediaItemNode?)
func hasOverlayVideoNode(_ node: OverlayMediaItemNode) -> Bool

View File

@ -969,7 +969,7 @@ public final class MediaPlayer {
}
private let audioLevelPipe = ValuePipe<Float>()
public var audioLevelStream: Signal<Float, NoError> {
public var audioLevelEvents: Signal<Float, NoError> {
return self.audioLevelPipe.signal()
}

View File

@ -15,6 +15,9 @@ private final class AudioPlayerRendererBufferContext {
var state: AudioPlayerRendererState = .paused
let timebase: CMTimebase
let buffer: RingByteBuffer
var audioLevelPeak: Int16 = 0
var audioLevelPeakCount: Int = 0
var audioLevelPeakUpdate: Double = 0.0
var bufferMaxChannelSampleIndex: Int64 = 0
var lowWaterSize: Int
var notifyLowWater: () -> Void
@ -113,7 +116,6 @@ private func rendererInputProc(refCon: UnsafeMutableRawPointer, ioActionFlags: U
}
let rendererBuffer = context.buffer
var updatedLevel = false
while rendererFillOffset.0 < bufferList.count {
if let bufferData = bufferList[rendererFillOffset.0].mData {
@ -128,11 +130,31 @@ private func rendererInputProc(refCon: UnsafeMutableRawPointer, ioActionFlags: U
let consumeCount = bufferDataSize - dataOffset
let actualConsumedCount = rendererBuffer.dequeue(bufferData.advanced(by: dataOffset), count: consumeCount)
if !updatedLevel && actualConsumedCount > 0 {
updatedLevel = true
let value = bufferData.advanced(by: dataOffset).assumingMemoryBound(to: UInt16.self).pointee
context.updatedLevel(Float(value) / Float(UInt16.max))
var samplePtr = bufferData.advanced(by: dataOffset).assumingMemoryBound(to: Int16.self)
for _ in 0 ..< actualConsumedCount / 4 {
let sample: Int16 = abs(samplePtr.pointee)
samplePtr = samplePtr.advanced(by: 2)
if context.audioLevelPeak < sample {
context.audioLevelPeak = sample
}
context.audioLevelPeakCount += 1
if context.audioLevelPeakCount >= 1200 {
let level = Float(context.audioLevelPeak) / (4000.0)
/*let timestamp = CFAbsoluteTimeGetCurrent()
if !context.audioLevelPeakUpdate.isZero {
let delta = timestamp - context.audioLevelPeakUpdate
print("level = \(level), delta = \(delta)")
}
context.audioLevelPeakUpdate = timestamp*/
context.updatedLevel(level)
context.audioLevelPeak = 0
context.audioLevelPeakCount = 0
}
}
rendererFillOffset.1 += actualConsumedCount
if actualConsumedCount == 0 {

View File

@ -11,6 +11,22 @@ import TelegramUIPreferences
class ChatMessageFileBubbleContentNode: ChatMessageBubbleContentNode {
private let interactiveFileNode: ChatMessageInteractiveFileNode
override var visibility: ListViewItemNodeVisibility {
didSet {
var wasVisible = false
if case .visible = oldValue {
wasVisible = true
}
var isVisible = false
if case .visible = self.visibility {
isVisible = true
}
if wasVisible != isVisible {
self.interactiveFileNode.visibility = isVisible
}
}
}
required init() {
self.interactiveFileNode = ChatMessageInteractiveFileNode()

View File

@ -33,6 +33,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
private var iconNode: TransformImageNode?
private var statusNode: SemanticStatusNode?
private var playbackAudioLevelView: VoiceBlobView?
private var displayLinkAnimator: ConstantDisplayLinkAnimator?
private var streamingStatusNode: RadialStatusNode?
private var tapRecognizer: UITapGestureRecognizer?
@ -40,6 +42,8 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
private let playbackStatusDisposable = MetaDisposable()
private let playbackStatus = Promise<MediaPlayerStatus>()
private let audioLevelEventsDisposable = MetaDisposable()
private var playerUpdateTimer: SwiftSignalKit.Timer?
private var playerStatus: MediaPlayerStatus? {
didSet {
@ -54,6 +58,30 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
}
}
private var inputAudioLevel: CGFloat = 0.0
private var currentAudioLevel: CGFloat = 0.0
var visibility: Bool = false {
didSet {
if self.visibility != oldValue {
if self.visibility {
if self.displayLinkAnimator == nil {
self.displayLinkAnimator = ConstantDisplayLinkAnimator(update: { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.currentAudioLevel = strongSelf.currentAudioLevel * 0.9 + strongSelf.inputAudioLevel * 0.1
strongSelf.playbackAudioLevelView?.tick(strongSelf.currentAudioLevel)
})
}
self.displayLinkAnimator?.isPaused = false
} else {
self.displayLinkAnimator?.isPaused = true
}
}
}
}
private let fetchControls = Atomic<FetchControls?>(value: nil)
private var resourceStatus: FileMediaResourceStatus?
private var actualFetchStatus: MediaResourceStatus?
@ -120,6 +148,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
self.statusDisposable.dispose()
self.playbackStatusDisposable.dispose()
self.fetchDisposable.dispose()
self.audioLevelEventsDisposable.dispose()
}
override func didLoad() {
@ -204,6 +233,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
var updateImageSignal: Signal<(TransformImageArguments) -> DrawingContext?, NoError>?
var updatedStatusSignal: Signal<(FileMediaResourceStatus, MediaResourceStatus?), NoError>?
var updatedAudioLevelEventsSignal: Signal<Float, NoError>?
var updatedPlaybackStatusSignal: Signal<MediaPlayerStatus, NoError>?
var updatedFetchControls: FetchControls?
@ -241,11 +271,13 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|> map { resourceStatus, actualFetchStatus -> (FileMediaResourceStatus, MediaResourceStatus?) in
return (resourceStatus, actualFetchStatus)
}
updatedAudioLevelEventsSignal = messageFileMediaPlaybackAudioLevelEvents(context: context, file: file, message: message, isRecentActions: isRecentActions)
} else {
updatedStatusSignal = messageFileMediaResourceStatus(context: context, file: file, message: message, isRecentActions: isRecentActions)
|> map { resourceStatus -> (FileMediaResourceStatus, MediaResourceStatus?) in
return (resourceStatus, nil)
}
updatedAudioLevelEventsSignal = messageFileMediaPlaybackAudioLevelEvents(context: context, file: file, message: message, isRecentActions: isRecentActions)
}
updatedPlaybackStatusSignal = messageFileMediaPlaybackStatus(context: context, file: file, message: message, isRecentActions: isRecentActions)
}
@ -622,6 +654,17 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
}))
}
if let updatedAudioLevelEventsSignal = updatedAudioLevelEventsSignal {
strongSelf.audioLevelEventsDisposable.set((updatedAudioLevelEventsSignal
|> deliverOnMainQueue).start(next: { value in
guard let strongSelf = self else {
return
}
strongSelf.inputAudioLevel = CGFloat(value)
strongSelf.playbackAudioLevelView?.updateLevel(CGFloat(value))
}))
}
if let updatedPlaybackStatusSignal = updatedPlaybackStatusSignal {
strongSelf.playbackStatus.set(updatedPlaybackStatusSignal)
strongSelf.playbackStatusDisposable.set((updatedPlaybackStatusSignal |> deliverOnMainQueue).start(next: { [weak strongSelf] status in
@ -636,6 +679,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
strongSelf.waveformNode.displaysAsynchronously = !presentationData.isPreview
strongSelf.statusNode?.displaysAsynchronously = !presentationData.isPreview
strongSelf.statusNode?.frame = progressFrame
strongSelf.playbackAudioLevelView?.frame = progressFrame.insetBy(dx: -20.0, dy: -20.0)
strongSelf.progressFrame = progressFrame
strongSelf.streamingCacheStatusFrame = streamingCacheStatusFrame
strongSelf.fileIconImage = fileIconImage
@ -816,6 +860,14 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
let statusNode = SemanticStatusNode(backgroundNodeColor: backgroundNodeColor, foregroundNodeColor: foregroundNodeColor)
self.statusNode = statusNode
statusNode.frame = progressFrame
if self.playbackAudioLevelView == nil {
let playbackAudioLevelView = VoiceBlobView(frame: progressFrame.insetBy(dx: -20.0, dy: -20.0))
playbackAudioLevelView.setColor(presentationData.theme.theme.chat.inputPanel.actionControlFillColor)
self.playbackAudioLevelView = playbackAudioLevelView
self.view.addSubview(playbackAudioLevelView)
}
self.addSubnode(statusNode)
} else if let statusNode = self.statusNode {
statusNode.backgroundNodeColor = backgroundNodeColor

View File

@ -33,6 +33,18 @@ func messageFileMediaPlaybackStatus(context: AccountContext, file: TelegramMedia
}
}
func messageFileMediaPlaybackAudioLevelEvents(context: AccountContext, file: TelegramMediaFile, message: Message, isRecentActions: Bool) -> Signal<Float, NoError> {
guard let playerType = peerMessageMediaPlayerType(message) else {
return .never()
}
if let (playlistId, itemId) = peerMessagesMediaPlaylistAndItemId(message, isRecentActions: isRecentActions) {
return context.sharedContext.mediaManager.filteredPlayerAudioLevelEvents(accountId: context.account.id, playlistId: playlistId, itemId: itemId, type: playerType)
} else {
return .never()
}
}
func messageFileMediaResourceStatus(context: AccountContext, file: TelegramMediaFile, message: Message, isRecentActions: Bool, isSharedMedia: Bool = false) -> Signal<FileMediaResourceStatus, NoError> {
let playbackStatus = internalMessageFileMediaPlaybackStatus(context: context, file: file, message: message, isRecentActions: isRecentActions) |> map { status -> MediaPlayerPlaybackStatus? in
return status?.status

View File

@ -166,6 +166,7 @@ final class ManagedAudioRecorderContext {
private var micLevelPeak: Int16 = 0
private var micLevelPeakCount: Int = 0
private var audioLevelPeakUpdate: Double = 0.0
fileprivate var isPaused = false
@ -580,6 +581,12 @@ final class ManagedAudioRecorderContext {
if self.micLevelPeakCount >= 1200 {
let level = Float(self.micLevelPeak) / 4000.0
/*let timestamp = CFAbsoluteTimeGetCurrent()
if !self.audioLevelPeakUpdate.isZero {
let delta = timestamp - self.audioLevelPeakUpdate
print("level = \(level), delta = \(delta)")
}
self.audioLevelPeakUpdate = timestamp*/
self.micLevel.set(level)
self.micLevelPeak = 0
self.micLevelPeakCount = 0

View File

@ -70,24 +70,34 @@ public final class MediaManagerImpl: NSObject, MediaManager {
private var nextPlayerIndex: Int32 = 0
private let voiceMediaPlayerStateDisposable = MetaDisposable()
private var voiceMediaPlayer: SharedMediaPlayer? {
didSet {
if self.voiceMediaPlayer !== oldValue {
if let voiceMediaPlayer = self.voiceMediaPlayer {
let account = voiceMediaPlayer.account
self.voiceMediaPlayerStateValue.set(voiceMediaPlayer.playbackState
|> map { state -> (Account, SharedMediaPlayerItemPlaybackStateOrLoading)? in
guard let state = state else {
return nil
self.voiceMediaPlayerStateDisposable.set((voiceMediaPlayer.playbackState
|> deliverOnMainQueue).start(next: { [weak self, weak voiceMediaPlayer] state in
guard let strongSelf = self else {
return
}
guard let state = state, let voiceMediaPlayer = voiceMediaPlayer else {
strongSelf.voiceMediaPlayerStateValue.set(.single(nil))
return
}
if case let .item(item) = state {
return (account, .state(item))
strongSelf.voiceMediaPlayerStateValue.set(.single((account, .state(item))))
let audioLevelValue: (AccountRecordId, SharedMediaPlaylistId, SharedMediaPlaylistItemId, Signal<Float, NoError>)? = (account.id, item.playlistId, item.item.id, voiceMediaPlayer.audioLevel)
strongSelf.voiceMediaPlayerAudioLevelEvents.set(.single(audioLevelValue))
} else {
return (account, .loading)
strongSelf.voiceMediaPlayerStateValue.set(.single((account, .loading)))
strongSelf.voiceMediaPlayerAudioLevelEvents.set(.single(nil))
}
} |> deliverOnMainQueue)
}))
} else {
self.voiceMediaPlayerStateDisposable.set(nil)
self.voiceMediaPlayerStateValue.set(.single(nil))
self.voiceMediaPlayerAudioLevelEvents.set(.single(nil))
}
}
}
@ -97,6 +107,8 @@ public final class MediaManagerImpl: NSObject, MediaManager {
return self.voiceMediaPlayerStateValue.get()
}
private let voiceMediaPlayerAudioLevelEvents = Promise<(AccountRecordId, SharedMediaPlaylistId, SharedMediaPlaylistItemId, Signal<Float, NoError>)?>(nil)
private var musicMediaPlayer: SharedMediaPlayer? {
didSet {
if self.musicMediaPlayer !== oldValue {
@ -427,6 +439,7 @@ public final class MediaManagerImpl: NSObject, MediaManager {
self.setPlaylistByTypeDisposables.dispose()
self.mediaPlaybackStateDisposable.dispose()
self.globalAudioSessionForegroundDisposable.dispose()
self.voiceMediaPlayerStateDisposable.dispose()
}
public func audioRecorder(beginWithTone: Bool, applicationBindings: TelegramApplicationBindings, beganWithTone: @escaping (Bool) -> Void) -> Signal<ManagedAudioRecorder?, NoError> {
@ -569,6 +582,26 @@ public final class MediaManagerImpl: NSObject, MediaManager {
})
}
public func filteredPlayerAudioLevelEvents(accountId: AccountRecordId, playlistId: SharedMediaPlaylistId, itemId: SharedMediaPlaylistItemId, type: MediaManagerPlayerType) -> Signal<Float, NoError> {
switch type {
case .voice:
return self.voiceMediaPlayerAudioLevelEvents.get()
|> mapToSignal { value -> Signal<Float, NoError> in
guard let value = value else {
return .never()
}
let (accountIdValue, playlistIdValue, itemIdValue, signal) = value
if accountIdValue == accountId && playlistId.isEqual(to: playlistIdValue) && itemId.isEqual(to: itemIdValue) {
return signal
} else {
return .never()
}
}
case .music:
return .never()
}
}
@objc func playCommandEvent(_ command: AnyObject) -> MPRemoteCommandHandlerStatus {
self.playlistControl(.playback(.play), type: nil)

View File

@ -143,7 +143,27 @@ final class SharedMediaPlayer {
return self.playbackStateValue.get()
}
private var playbackItem: SharedMediaPlaybackItem?
private let audioLevelPipe = ValuePipe<Float>()
var audioLevel: Signal<Float, NoError> {
return self.audioLevelPipe.signal()
}
private let audioLevelDisposable = MetaDisposable()
private var playbackItem: SharedMediaPlaybackItem? {
didSet {
if playbackItem != oldValue {
switch playbackItem {
case let .audio(player):
let audioLevelPipe = self.audioLevelPipe
self.audioLevelDisposable.set((player.audioLevelEvents.start(next: { [weak audioLevelPipe] value in
audioLevelPipe?.putNext(value)
})))
default:
self.audioLevelDisposable.set(nil)
}
}
}
}
private var currentPlayedToEnd = false
private var scheduledPlaybackAction: SharedMediaPlayerPlaybackControlAction?
private var scheduledStartTime: Double?