Various improvements

This commit is contained in:
Isaac 2025-02-28 00:34:28 +01:00
parent e5908c0bc8
commit 23ebf39221
9 changed files with 448 additions and 360 deletions

View File

@ -16,6 +16,7 @@ NS_ASSUME_NONNULL_BEGIN
- (void *)impl;
- (int32_t)sendToDecoder:(FFMpegAVCodecContext *)codecContext;
- (void)reuse;
@end

View File

@ -77,6 +77,7 @@ int FFMpegCodecIdAV1 = AV_CODEC_ID_AV1;
}
- (bool)readFrameIntoPacket:(FFMpegPacket *)packet {
[packet reuse];
int result = av_read_frame(_impl, (AVPacket *)[packet impl]);
return result >= 0;
}

View File

@ -65,4 +65,8 @@
return avcodec_send_packet((AVCodecContext *)[codecContext impl], _impl);
}
- (void)reuse {
av_packet_unref(_impl);
}
@end

View File

@ -287,7 +287,7 @@ typedef enum
NSTimeInterval duration = trimEndPosition - trimStartPosition;
if (trimEndPosition - trimStartPosition < self.minimumLength)
if (trimEndPosition - trimStartPosition < strongSelf.minimumLength)
return;
if (strongSelf.maximumLength > DBL_EPSILON && duration > strongSelf.maximumLength)

View File

@ -472,12 +472,16 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
return self.partsStateValue.get()
}
private var resourceSizeDisposable: Disposable?
private var completeFetchDisposable: Disposable?
private var seekTimestamp: Double?
private var currentLookaheadId: Int = 0
private var lookahead: FFMpegLookahead?
private var resolvedResourceSize: Int64?
private var pendingSeek: (id: Int, position: Double)?
init(resource: ChunkMediaPlayerV2.SourceDescription.ResourceDescription) {
self.resource = resource
@ -494,16 +498,43 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
}
deinit {
self.resourceSizeDisposable?.dispose()
self.completeFetchDisposable?.dispose()
}
func seek(id: Int, position: Double) {
if self.resource.size == 0 && self.resolvedResourceSize == nil {
self.pendingSeek = (id, position)
if self.resourceSizeDisposable == nil {
self.resourceSizeDisposable = (self.resource.postbox.mediaBox.resourceData(self.resource.reference.resource, option: .complete(waitUntilFetchStatus: false))
|> deliverOnMainQueue).start(next: { [weak self] data in
guard let self else {
return
}
if data.complete {
if self.resolvedResourceSize == nil {
self.resolvedResourceSize = data.size
if let pendingSeek = self.pendingSeek {
self.seek(id: pendingSeek.id, position: pendingSeek.position)
}
}
}
})
}
return
}
self.seekTimestamp = position
self.currentLookaheadId += 1
let lookaheadId = self.currentLookaheadId
let resource = self.resource
let resourceSize = self.resolvedResourceSize ?? Int64(resource.size)
let updateState: (FFMpegLookahead.State) -> Void = { [weak self] state in
Queue.mainQueue().async {
guard let self else {
@ -559,7 +590,7 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
return ChunkMediaPlayerPartsState.DirectReader.Stream(
mediaBox: resource.postbox.mediaBox,
resource: resource.reference.resource,
size: resource.size,
size: resourceSize,
index: media.info.index,
seek: (streamIndex: state.seek.streamIndex, pts: state.seek.pts),
maxReadablePts: (streamIndex: maxReadablePts.streamIndex, pts: maxReadablePts.pts, isEnded: state.isEnded),
@ -602,18 +633,18 @@ final class ChunkMediaPlayerDirectFetchSourceImpl: ChunkMediaPlayerSourceImpl {
).startStrict()
},
getDataInRange: { range, completion in
return resource.postbox.mediaBox.resourceData(resource.reference.resource, size: resource.size, in: range, mode: .complete).start(next: { result, isComplete in
return resource.postbox.mediaBox.resourceData(resource.reference.resource, size: resourceSize, in: range, mode: .complete).start(next: { result, isComplete in
completion(isComplete ? result : nil)
})
},
isDataCachedInRange: { range in
return resource.postbox.mediaBox.internal_resourceDataIsCached(
id: resource.reference.resource.id,
size: resource.size,
size: resourceSize,
in: range
)
},
size: self.resource.size
size: resourceSize
)
}

View File

@ -447,6 +447,7 @@ private final class SharedHLSVideoJSContext: NSObject {
}
guard let instance = self.contextReferences[instanceId]?.contentNode else {
self.contextReferences.removeValue(forKey: instanceId)
self.cleanupContextsIfEmpty()
return
}
guard let eventData = message["data"] as? [String: Any] else {
@ -484,6 +485,10 @@ private final class SharedHLSVideoJSContext: NSObject {
self.jsContext = nil
}
self.isJsContextReady = false
self.videoElements.removeAll()
self.mediaSources.removeAll()
self.sourceBuffers.removeAll()
}
private func bridgeInvoke(
@ -848,9 +853,22 @@ private final class SharedHLSVideoJSContext: NSObject {
self.jsContext?.evaluateJavaScript("window.hlsPlayer_destroyInstance(\(contextInstanceId));")
self.cleanupContextsIfEmpty()
}
}
}
private func cleanupContextsIfEmpty() {
if self.contextReferences.isEmpty {
if self.emptyTimer == nil {
self.emptyTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 10.0, repeats: false, block: { [weak self] timer in
let disposeTimeout: Double
#if DEBUG
disposeTimeout = 0.5
#else
disposeTimeout = 10.0
#endif
self.emptyTimer = Foundation.Timer.scheduledTimer(withTimeInterval: disposeTimeout, repeats: false, block: { [weak self] timer in
guard let self else {
return
}
@ -864,8 +882,6 @@ private final class SharedHLSVideoJSContext: NSObject {
}
}
}
}
}
func initializeWhenReady(context: HLSVideoJSNativeContentNode, urlPrefix: String) {
self.pendingInitializeInstanceIds.append((context.instanceId, urlPrefix))
@ -894,6 +910,7 @@ private final class SharedHLSVideoJSContext: NSObject {
for (instanceId, urlPrefix) in pendingInitializeInstanceIds {
guard let _ = self.contextReferences[instanceId]?.contentNode else {
self.contextReferences.removeValue(forKey: instanceId)
self.cleanupContextsIfEmpty()
continue
}
userScriptJs.append("window.hlsPlayer_makeInstance(\(instanceId));\n")

View File

@ -86,9 +86,14 @@ private var ObjCKey_ContextReference: Int?
}
deinit {
self.cleanup()
}
func cleanup() {
for (_, timer) in self.timers {
timer.invalidate()
}
self.timers.removeAll()
}
func register(jsContext: JSContext) {
@ -175,6 +180,7 @@ final class WebViewNativeJSContextImpl: HLSJSContext {
fileprivate final class Impl {
let queue: Queue
let context: JSContext
let timeoutPolyfill: TimeoutPolyfill
let handleScriptMessage: ([String: Any]) -> Void
init(queue: Queue, handleScriptMessage: @escaping ([String: Any]) -> Void) {
@ -182,6 +188,8 @@ final class WebViewNativeJSContextImpl: HLSJSContext {
self.context = JSContext()
self.handleScriptMessage = handleScriptMessage
self.timeoutPolyfill = TimeoutPolyfill(queue: self.queue)
#if DEBUG
if #available(iOS 16.4, *) {
self.context.isInspectable = true
@ -197,9 +205,8 @@ final class WebViewNativeJSContextImpl: HLSJSContext {
}
}
let timeoutPolyfill = TimeoutPolyfill(queue: self.queue)
self.context.setObject(timeoutPolyfill, forKeyedSubscript: "_timeoutPolyfill" as (NSCopying & NSObjectProtocol))
timeoutPolyfill.register(jsContext: self.context)
self.context.setObject(self.timeoutPolyfill, forKeyedSubscript: "_timeoutPolyfill" as (NSCopying & NSObjectProtocol))
self.timeoutPolyfill.register(jsContext: self.context)
self.context.setObject(JsCorePolyfills(queue: self.queue, context: Reference(context: self)), forKeyedSubscript: "_JsCorePolyfills" as (NSCopying & NSObjectProtocol))
@ -219,6 +226,7 @@ final class WebViewNativeJSContextImpl: HLSJSContext {
}
deinit {
self.timeoutPolyfill.cleanup()
print("WebViewNativeJSContextImpl.deinit")
}

View File

@ -148,6 +148,8 @@ public:
}
void UpdateAudioCallback(webrtc::AudioTransport *previousAudioCallback, webrtc::AudioTransport *audioCallback) {
_mutex.Lock();
if (audioCallback) {
_audioTransports.push_back(audioCallback);
} else if (previousAudioCallback) {
@ -158,6 +160,8 @@ public:
}
}
}
_mutex.Unlock();
}
virtual int32_t RegisterAudioCallback(webrtc::AudioTransport *audioCallback) override {
@ -467,9 +471,10 @@ public:
bool keyPressed,
uint32_t& newMicLevel
) override {
_mutex.Lock();
if (!_audioTransports.empty()) {
for (size_t i = 0; i < _audioTransports.size(); i++) {
auto result = _audioTransports[_audioTransports.size() - 1]->RecordedDataIsAvailable(
_audioTransports[_audioTransports.size() - 1]->RecordedDataIsAvailable(
audioSamples,
nSamples,
nBytesPerSample,
@ -481,15 +486,10 @@ public:
keyPressed,
newMicLevel
);
if (i == _audioTransports.size() - 1) {
return result;
}
}
_mutex.Unlock();
return 0;
} else {
return 0;
}
}
virtual int32_t RecordedDataIsAvailable(
@ -505,9 +505,10 @@ public:
uint32_t& newMicLevel,
absl::optional<int64_t> estimatedCaptureTimeNS
) override {
_mutex.Lock();
if (!_audioTransports.empty()) {
for (size_t i = _audioTransports.size() - 1; i < _audioTransports.size(); i++) {
auto result = _audioTransports[_audioTransports.size() - 1]->RecordedDataIsAvailable(
_audioTransports[_audioTransports.size() - 1]->RecordedDataIsAvailable(
audioSamples,
nSamples,
nBytesPerSample,
@ -520,14 +521,10 @@ public:
newMicLevel,
estimatedCaptureTimeNS
);
if (i == _audioTransports.size() - 1) {
return result;
}
}
_mutex.Unlock();
return 0;
} else {
return 0;
}
}
// Implementation has to setup safe values for all specified out parameters.
@ -541,8 +538,11 @@ public:
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms
) override {
_mutex.Lock();
int32_t result = 0;
if (!_audioTransports.empty()) {
return _audioTransports[_audioTransports.size() - 1]->NeedMorePlayData(
result = _audioTransports[_audioTransports.size() - 1]->NeedMorePlayData(
nSamples,
nBytesPerSample,
nChannels,
@ -554,8 +554,11 @@ public:
);
} else {
nSamplesOut = 0;
return 0;
}
_mutex.Unlock();
return result;
}
virtual void PullRenderData(
@ -567,6 +570,8 @@ public:
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms
) override {
_mutex.Lock();
if (!_audioTransports.empty()) {
_audioTransports[_audioTransports.size() - 1]->PullRenderData(
bits_per_sample,
@ -578,6 +583,8 @@ public:
ntp_time_ms
);
}
_mutex.Unlock();
}
public:
@ -612,6 +619,7 @@ public:
private:
bool _isStarted = false;
std::vector<webrtc::AudioTransport *> _audioTransports;
webrtc::Mutex _mutex;
};
class WrappedChildAudioDeviceModule : public tgcalls::DefaultWrappedAudioDeviceModule {