diff --git a/submodules/MediaPlayer/Sources/FFMpegMediaFrameSourceContext.swift b/submodules/MediaPlayer/Sources/FFMpegMediaFrameSourceContext.swift index e70d62a4f4..5d6bd9bf39 100644 --- a/submodules/MediaPlayer/Sources/FFMpegMediaFrameSourceContext.swift +++ b/submodules/MediaPlayer/Sources/FFMpegMediaFrameSourceContext.swift @@ -551,7 +551,7 @@ final class FFMpegMediaFrameSourceContext: NSObject { for stream in [initializedState.videoStream, initializedState.audioStream] { if let stream = stream { let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale) - initializedState.avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value) + initializedState.avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value, positionOnKeyframe: true) break } } diff --git a/submodules/MediaPlayer/Sources/SoftwareVideoSource.swift b/submodules/MediaPlayer/Sources/SoftwareVideoSource.swift index 4c8976df32..216070c84f 100644 --- a/submodules/MediaPlayer/Sources/SoftwareVideoSource.swift +++ b/submodules/MediaPlayer/Sources/SoftwareVideoSource.swift @@ -174,7 +174,7 @@ public final class SoftwareVideoSource { } else { if let avFormatContext = self.avFormatContext, let videoStream = self.videoStream { endOfStream = true - avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0) + avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0, positionOnKeyframe: true) } else { endOfStream = true break @@ -228,7 +228,7 @@ public final class SoftwareVideoSource { public func seek(timestamp: Double) { if let stream = self.videoStream, let avFormatContext = self.avFormatContext { let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale) - avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value) + avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value, positionOnKeyframe: true) stream.decoder.reset() } } diff --git a/submodules/MediaPlayer/Sources/UniversalSoftwareVideoSource.swift b/submodules/MediaPlayer/Sources/UniversalSoftwareVideoSource.swift index e2eb160c36..d942ef61d4 100644 --- a/submodules/MediaPlayer/Sources/UniversalSoftwareVideoSource.swift +++ b/submodules/MediaPlayer/Sources/UniversalSoftwareVideoSource.swift @@ -6,6 +6,7 @@ import FFMpeg private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer?, bufferSize: Int32) -> Int32 { let context = Unmanaged.fromOpaque(userData!).takeUnretainedValue() + let data: Signal let resourceSize: Int = context.size @@ -217,17 +218,8 @@ private final class UniversalSoftwareVideoSourceImpl { frames.append(frame) } } else { - if endOfStream { - break - } else { - if let avFormatContext = self.avFormatContext, let videoStream = self.videoStream { - endOfStream = true - avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0) - } else { - endOfStream = true - break - } - } + endOfStream = true + break } } @@ -240,31 +232,33 @@ private final class UniversalSoftwareVideoSourceImpl { return (frames.first, endOfStream) } - func readImage() -> (UIImage?, CGFloat, CGFloat, Bool) { - if let videoStream = self.videoStream { - self.currentNumberOfReads = 0 - self.currentReadBytes = 0 - for i in 0 ..< 10 { - let (decodableFrame, loop) = self.readDecodableFrame() - if let decodableFrame = decodableFrame { - if let renderedFrame = videoStream.decoder.render(frame: decodableFrame) { - print("Frame rendered in \(self.currentNumberOfReads) reads, \(self.currentReadBytes) bytes, total frames read: \(i + 1)") - return (renderedFrame, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) - } - } - } - return (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), true) - } else { - return (nil, 0.0, 1.0, false) + private func seek(timestamp: Double) { + if let stream = self.videoStream, let avFormatContext = self.avFormatContext { + let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale) + avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value, positionOnKeyframe: true) + stream.decoder.reset() } } - public func seek(timestamp: Double) { - if let stream = self.videoStream, let avFormatContext = self.avFormatContext { - let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale) - avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value) - stream.decoder.reset() + func readImage(at timestamp: Double) -> (UIImage?, CGFloat, CGFloat, Bool) { + guard let videoStream = self.videoStream, let _ = self.avFormatContext else { + return (nil, 0.0, 1.0, false) } + + self.seek(timestamp: timestamp) + + self.currentNumberOfReads = 0 + self.currentReadBytes = 0 + for i in 0 ..< 10 { + let (decodableFrame, loop) = self.readDecodableFrame() + if let decodableFrame = decodableFrame { + if let renderedFrame = videoStream.decoder.render(frame: decodableFrame) { + print("Frame rendered in \(self.currentNumberOfReads) reads, \(self.currentReadBytes) bytes, total frames read: \(i + 1)") + return (renderedFrame, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) + } + } + } + return (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), true) } } @@ -339,8 +333,7 @@ private final class UniversalSoftwareVideoSourceThread: NSObject { source.requiredDataIsNotLocallyAvailable = params.requiredDataIsNotLocallyAvailable source.state.set(.generatingFrame) let startTime = CFAbsoluteTimeGetCurrent() - source.seek(timestamp: params.timestamp) - let image = source.readImage().0 + let image = source.readImage(at: params.timestamp).0 params.completion(image) source.state.set(.ready) print("take frame: \(CFAbsoluteTimeGetCurrent() - startTime) s") diff --git a/submodules/ffmpeg/FFMpeg/FFMpegAVFormatContext.h b/submodules/ffmpeg/FFMpeg/FFMpegAVFormatContext.h index 2daa54e8a9..33db0a7308 100644 --- a/submodules/ffmpeg/FFMpeg/FFMpegAVFormatContext.h +++ b/submodules/ffmpeg/FFMpeg/FFMpegAVFormatContext.h @@ -37,7 +37,7 @@ extern int FFMpegCodecIdMPEG4; - (void)setIOContext:(FFMpegAVIOContext *)ioContext; - (bool)openInput; - (bool)findStreamInfo; -- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts; +- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts positionOnKeyframe:(bool)positionOnKeyframe; - (bool)readFrameIntoPacket:(FFMpegPacket *)packet; - (NSArray *)streamIndicesForType:(FFMpegAVFormatStreamType)type; - (bool)isAttachedPicAtStreamIndex:(int32_t)streamIndex; diff --git a/submodules/ffmpeg/FFMpeg/FFMpegAVFormatContext.m b/submodules/ffmpeg/FFMpeg/FFMpegAVFormatContext.m index 86c0b9174f..76c1987031 100644 --- a/submodules/ffmpeg/FFMpeg/FFMpegAVFormatContext.m +++ b/submodules/ffmpeg/FFMpeg/FFMpegAVFormatContext.m @@ -54,8 +54,12 @@ int FFMpegCodecIdMPEG4 = AV_CODEC_ID_MPEG4; return result >= 0; } -- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts { - av_seek_frame(_impl, streamIndex, pts, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME); +- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts positionOnKeyframe:(bool)positionOnKeyframe { + int options = AVSEEK_FLAG_FRAME | AVSEEK_FLAG_BACKWARD; + if (!positionOnKeyframe) { + options |= AVSEEK_FLAG_ANY; + } + av_seek_frame(_impl, streamIndex, pts, options); } - (bool)readFrameIntoPacket:(FFMpegPacket *)packet {