mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Backport fixes
This commit is contained in:
parent
233a56b447
commit
789614e24d
@ -168,11 +168,14 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
|||||||
private let mediaDataReaderParams: MediaDataReaderParams
|
private let mediaDataReaderParams: MediaDataReaderParams
|
||||||
private let audioSessionManager: ManagedAudioSession
|
private let audioSessionManager: ManagedAudioSession
|
||||||
private let onSeeked: (() -> Void)?
|
private let onSeeked: (() -> Void)?
|
||||||
|
private weak var playerNode: MediaPlayerNode?
|
||||||
|
|
||||||
private let renderSynchronizer: AVSampleBufferRenderSynchronizer
|
private let renderSynchronizer: AVSampleBufferRenderSynchronizer
|
||||||
private var videoRenderer: AVSampleBufferDisplayLayer
|
private var videoRenderer: AVSampleBufferDisplayLayer
|
||||||
private var audioRenderer: AVSampleBufferAudioRenderer?
|
private var audioRenderer: AVSampleBufferAudioRenderer?
|
||||||
|
|
||||||
|
private var didNotifySentVideoFrames: Bool = false
|
||||||
|
|
||||||
private var partsState = ChunkMediaPlayerPartsState(duration: nil, content: .parts([]))
|
private var partsState = ChunkMediaPlayerPartsState(duration: nil, content: .parts([]))
|
||||||
private var loadedParts: [LoadedPart] = []
|
private var loadedParts: [LoadedPart] = []
|
||||||
private var loadedPartsMediaData: QueueLocalObject<LoadedPartsMediaData>
|
private var loadedPartsMediaData: QueueLocalObject<LoadedPartsMediaData>
|
||||||
@ -244,6 +247,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
|||||||
self.mediaDataReaderParams = params
|
self.mediaDataReaderParams = params
|
||||||
self.audioSessionManager = audioSessionManager
|
self.audioSessionManager = audioSessionManager
|
||||||
self.onSeeked = onSeeked
|
self.onSeeked = onSeeked
|
||||||
|
self.playerNode = playerNode
|
||||||
|
|
||||||
self.loadedPartsMediaData = QueueLocalObject(queue: self.dataQueue, generate: {
|
self.loadedPartsMediaData = QueueLocalObject(queue: self.dataQueue, generate: {
|
||||||
return LoadedPartsMediaData()
|
return LoadedPartsMediaData()
|
||||||
@ -918,10 +922,11 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
|||||||
videoTarget = self.videoRenderer
|
videoTarget = self.videoRenderer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let didNotifySentVideoFrames = self.didNotifySentVideoFrames
|
||||||
videoTarget.requestMediaDataWhenReady(on: self.dataQueue.queue, using: { [weak self] in
|
videoTarget.requestMediaDataWhenReady(on: self.dataQueue.queue, using: { [weak self] in
|
||||||
if let loadedPartsMediaData = loadedPartsMediaData.unsafeGet() {
|
if let loadedPartsMediaData = loadedPartsMediaData.unsafeGet() {
|
||||||
let bufferIsReadyForMoreData = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: videoTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: true)
|
let bufferFillResult = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: videoTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: true)
|
||||||
if bufferIsReadyForMoreData {
|
if bufferFillResult.bufferIsReadyForMoreData {
|
||||||
videoTarget.stopRequestingMediaData()
|
videoTarget.stopRequestingMediaData()
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
guard let self else {
|
guard let self else {
|
||||||
@ -931,6 +936,21 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
|||||||
self.updateInternalState()
|
self.updateInternalState()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if !didNotifySentVideoFrames {
|
||||||
|
Queue.mainQueue().async {
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if self.didNotifySentVideoFrames {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.didNotifySentVideoFrames = true
|
||||||
|
if #available(iOS 17.4, *) {
|
||||||
|
} else {
|
||||||
|
self.playerNode?.hasSentFramesToDisplay?()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -941,8 +961,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
|||||||
let audioTarget = audioRenderer
|
let audioTarget = audioRenderer
|
||||||
audioTarget.requestMediaDataWhenReady(on: self.dataQueue.queue, using: { [weak self] in
|
audioTarget.requestMediaDataWhenReady(on: self.dataQueue.queue, using: { [weak self] in
|
||||||
if let loadedPartsMediaData = loadedPartsMediaData.unsafeGet() {
|
if let loadedPartsMediaData = loadedPartsMediaData.unsafeGet() {
|
||||||
let bufferIsReadyForMoreData = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: audioTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: false)
|
let bufferFillResult = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: audioTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: false)
|
||||||
if bufferIsReadyForMoreData {
|
if bufferFillResult.bufferIsReadyForMoreData {
|
||||||
audioTarget.stopRequestingMediaData()
|
audioTarget.stopRequestingMediaData()
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
guard let self else {
|
guard let self else {
|
||||||
@ -957,8 +977,9 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static func fillRendererBuffer(bufferTarget: AVQueuedSampleBufferRendering, loadedPartsMediaData: LoadedPartsMediaData, isVideo: Bool) -> Bool {
|
private static func fillRendererBuffer(bufferTarget: AVQueuedSampleBufferRendering, loadedPartsMediaData: LoadedPartsMediaData, isVideo: Bool) -> (bufferIsReadyForMoreData: Bool, didEnqueue: Bool) {
|
||||||
var bufferIsReadyForMoreData = true
|
var bufferIsReadyForMoreData = true
|
||||||
|
var didEnqueue = false
|
||||||
outer: while true {
|
outer: while true {
|
||||||
if !bufferTarget.isReadyForMoreMediaData {
|
if !bufferTarget.isReadyForMoreMediaData {
|
||||||
bufferIsReadyForMoreData = false
|
bufferIsReadyForMoreData = false
|
||||||
@ -1054,6 +1075,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
|||||||
print("Enqueue audio \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value) next: \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value + 1024)")
|
print("Enqueue audio \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value) next: \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value + 1024)")
|
||||||
}*/
|
}*/
|
||||||
bufferTarget.enqueue(sampleBuffer)
|
bufferTarget.enqueue(sampleBuffer)
|
||||||
|
didEnqueue = true
|
||||||
hasData = true
|
hasData = true
|
||||||
continue outer
|
continue outer
|
||||||
case .waitingForMoreData:
|
case .waitingForMoreData:
|
||||||
@ -1067,7 +1089,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return bufferIsReadyForMoreData
|
return (bufferIsReadyForMoreData: bufferIsReadyForMoreData, didEnqueue: didEnqueue)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1743,7 +1743,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
strongSelf.onMutedSpeechActivityDetected?(value)
|
strongSelf.onMutedSpeechActivityDetected?(value)
|
||||||
}
|
}
|
||||||
}, encryptionKey: encryptionKey, isConference: self.isConference, sharedAudioDevice: self.sharedAudioDevice))
|
}, encryptionKey: encryptionKey, isConference: self.isConference, isStream: self.isStream, sharedAudioDevice: self.sharedAudioDevice))
|
||||||
}
|
}
|
||||||
|
|
||||||
self.genericCallContext = genericCallContext
|
self.genericCallContext = genericCallContext
|
||||||
@ -3112,7 +3112,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|
|
||||||
self.hasScreencast = true
|
self.hasScreencast = true
|
||||||
|
|
||||||
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, enableSystemMute: false, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in }, encryptionKey: nil, isConference: self.isConference, sharedAudioDevice: nil)
|
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, enableSystemMute: false, preferX264: false, logPath: "", onMutedSpeechActivityDetected: { _ in }, encryptionKey: nil, isConference: self.isConference, isStream: false, sharedAudioDevice: nil)
|
||||||
self.screencastCallContext = screencastCallContext
|
self.screencastCallContext = screencastCallContext
|
||||||
|
|
||||||
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
||||||
|
@ -498,6 +498,7 @@ public final class OngoingGroupCallContext {
|
|||||||
onMutedSpeechActivityDetected: @escaping (Bool) -> Void,
|
onMutedSpeechActivityDetected: @escaping (Bool) -> Void,
|
||||||
encryptionKey: Data?,
|
encryptionKey: Data?,
|
||||||
isConference: Bool,
|
isConference: Bool,
|
||||||
|
isStream: Bool,
|
||||||
sharedAudioDevice: OngoingCallContext.AudioDevice?
|
sharedAudioDevice: OngoingCallContext.AudioDevice?
|
||||||
) {
|
) {
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
@ -508,7 +509,7 @@ public final class OngoingGroupCallContext {
|
|||||||
let tempStatsLogPath = self.tempStatsLogFile.path
|
let tempStatsLogPath = self.tempStatsLogFile.path
|
||||||
|
|
||||||
#if os(iOS)
|
#if os(iOS)
|
||||||
if sharedAudioDevice == nil {
|
if sharedAudioDevice == nil && !isStream {
|
||||||
self.audioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: false)
|
self.audioDevice = OngoingCallContext.AudioDevice.create(enableSystemMute: false)
|
||||||
} else {
|
} else {
|
||||||
self.audioDevice = sharedAudioDevice
|
self.audioDevice = sharedAudioDevice
|
||||||
@ -1128,10 +1129,10 @@ public final class OngoingGroupCallContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, enableSystemMute: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void, encryptionKey: Data?, isConference: Bool, sharedAudioDevice: OngoingCallContext.AudioDevice?) {
|
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, enableSystemMute: Bool, preferX264: Bool, logPath: String, onMutedSpeechActivityDetected: @escaping (Bool) -> Void, encryptionKey: Data?, isConference: Bool, isStream: Bool, sharedAudioDevice: OngoingCallContext.AudioDevice?) {
|
||||||
let queue = self.queue
|
let queue = self.queue
|
||||||
self.impl = QueueLocalObject(queue: queue, generate: {
|
self.impl = QueueLocalObject(queue: queue, generate: {
|
||||||
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, enableSystemMute: enableSystemMute, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected, encryptionKey: encryptionKey, isConference: isConference, sharedAudioDevice: sharedAudioDevice)
|
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, enableSystemMute: enableSystemMute, preferX264: preferX264, logPath: logPath, onMutedSpeechActivityDetected: onMutedSpeechActivityDetected, encryptionKey: encryptionKey, isConference: isConference, isStream: isStream, sharedAudioDevice: sharedAudioDevice)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"app": "11.6.2",
|
"app": "11.6.3",
|
||||||
"xcode": "16.0",
|
"xcode": "16.0",
|
||||||
"bazel": "7.3.1:981f82a470bad1349322b6f51c9c6ffa0aa291dab1014fac411543c12e661dff",
|
"bazel": "7.3.1:981f82a470bad1349322b6f51c9c6ffa0aa291dab1014fac411543c12e661dff",
|
||||||
"macos": "15.0"
|
"macos": "15.0"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user