mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Improve UniversalSoftwareVideoSource seeking
This commit is contained in:
parent
23b399f94e
commit
3a1a825e69
@ -551,7 +551,7 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
|||||||
for stream in [initializedState.videoStream, initializedState.audioStream] {
|
for stream in [initializedState.videoStream, initializedState.audioStream] {
|
||||||
if let stream = stream {
|
if let stream = stream {
|
||||||
let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale)
|
let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale)
|
||||||
initializedState.avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value)
|
initializedState.avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value, positionOnKeyframe: true)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -174,7 +174,7 @@ public final class SoftwareVideoSource {
|
|||||||
} else {
|
} else {
|
||||||
if let avFormatContext = self.avFormatContext, let videoStream = self.videoStream {
|
if let avFormatContext = self.avFormatContext, let videoStream = self.videoStream {
|
||||||
endOfStream = true
|
endOfStream = true
|
||||||
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0)
|
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0, positionOnKeyframe: true)
|
||||||
} else {
|
} else {
|
||||||
endOfStream = true
|
endOfStream = true
|
||||||
break
|
break
|
||||||
@ -228,7 +228,7 @@ public final class SoftwareVideoSource {
|
|||||||
public func seek(timestamp: Double) {
|
public func seek(timestamp: Double) {
|
||||||
if let stream = self.videoStream, let avFormatContext = self.avFormatContext {
|
if let stream = self.videoStream, let avFormatContext = self.avFormatContext {
|
||||||
let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale)
|
let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale)
|
||||||
avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value)
|
avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value, positionOnKeyframe: true)
|
||||||
stream.decoder.reset()
|
stream.decoder.reset()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,7 @@ import FFMpeg
|
|||||||
|
|
||||||
private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
|
private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
|
||||||
let context = Unmanaged<UniversalSoftwareVideoSourceImpl>.fromOpaque(userData!).takeUnretainedValue()
|
let context = Unmanaged<UniversalSoftwareVideoSourceImpl>.fromOpaque(userData!).takeUnretainedValue()
|
||||||
|
|
||||||
let data: Signal<Data, NoError>
|
let data: Signal<Data, NoError>
|
||||||
|
|
||||||
let resourceSize: Int = context.size
|
let resourceSize: Int = context.size
|
||||||
@ -217,17 +218,8 @@ private final class UniversalSoftwareVideoSourceImpl {
|
|||||||
frames.append(frame)
|
frames.append(frame)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if endOfStream {
|
endOfStream = true
|
||||||
break
|
break
|
||||||
} else {
|
|
||||||
if let avFormatContext = self.avFormatContext, let videoStream = self.videoStream {
|
|
||||||
endOfStream = true
|
|
||||||
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0)
|
|
||||||
} else {
|
|
||||||
endOfStream = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -240,31 +232,33 @@ private final class UniversalSoftwareVideoSourceImpl {
|
|||||||
return (frames.first, endOfStream)
|
return (frames.first, endOfStream)
|
||||||
}
|
}
|
||||||
|
|
||||||
func readImage() -> (UIImage?, CGFloat, CGFloat, Bool) {
|
private func seek(timestamp: Double) {
|
||||||
if let videoStream = self.videoStream {
|
if let stream = self.videoStream, let avFormatContext = self.avFormatContext {
|
||||||
self.currentNumberOfReads = 0
|
let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale)
|
||||||
self.currentReadBytes = 0
|
avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value, positionOnKeyframe: true)
|
||||||
for i in 0 ..< 10 {
|
stream.decoder.reset()
|
||||||
let (decodableFrame, loop) = self.readDecodableFrame()
|
|
||||||
if let decodableFrame = decodableFrame {
|
|
||||||
if let renderedFrame = videoStream.decoder.render(frame: decodableFrame) {
|
|
||||||
print("Frame rendered in \(self.currentNumberOfReads) reads, \(self.currentReadBytes) bytes, total frames read: \(i + 1)")
|
|
||||||
return (renderedFrame, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), true)
|
|
||||||
} else {
|
|
||||||
return (nil, 0.0, 1.0, false)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func seek(timestamp: Double) {
|
func readImage(at timestamp: Double) -> (UIImage?, CGFloat, CGFloat, Bool) {
|
||||||
if let stream = self.videoStream, let avFormatContext = self.avFormatContext {
|
guard let videoStream = self.videoStream, let _ = self.avFormatContext else {
|
||||||
let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale)
|
return (nil, 0.0, 1.0, false)
|
||||||
avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value)
|
|
||||||
stream.decoder.reset()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.seek(timestamp: timestamp)
|
||||||
|
|
||||||
|
self.currentNumberOfReads = 0
|
||||||
|
self.currentReadBytes = 0
|
||||||
|
for i in 0 ..< 10 {
|
||||||
|
let (decodableFrame, loop) = self.readDecodableFrame()
|
||||||
|
if let decodableFrame = decodableFrame {
|
||||||
|
if let renderedFrame = videoStream.decoder.render(frame: decodableFrame) {
|
||||||
|
print("Frame rendered in \(self.currentNumberOfReads) reads, \(self.currentReadBytes) bytes, total frames read: \(i + 1)")
|
||||||
|
return (renderedFrame, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -339,8 +333,7 @@ private final class UniversalSoftwareVideoSourceThread: NSObject {
|
|||||||
source.requiredDataIsNotLocallyAvailable = params.requiredDataIsNotLocallyAvailable
|
source.requiredDataIsNotLocallyAvailable = params.requiredDataIsNotLocallyAvailable
|
||||||
source.state.set(.generatingFrame)
|
source.state.set(.generatingFrame)
|
||||||
let startTime = CFAbsoluteTimeGetCurrent()
|
let startTime = CFAbsoluteTimeGetCurrent()
|
||||||
source.seek(timestamp: params.timestamp)
|
let image = source.readImage(at: params.timestamp).0
|
||||||
let image = source.readImage().0
|
|
||||||
params.completion(image)
|
params.completion(image)
|
||||||
source.state.set(.ready)
|
source.state.set(.ready)
|
||||||
print("take frame: \(CFAbsoluteTimeGetCurrent() - startTime) s")
|
print("take frame: \(CFAbsoluteTimeGetCurrent() - startTime) s")
|
||||||
|
@ -37,7 +37,7 @@ extern int FFMpegCodecIdMPEG4;
|
|||||||
- (void)setIOContext:(FFMpegAVIOContext *)ioContext;
|
- (void)setIOContext:(FFMpegAVIOContext *)ioContext;
|
||||||
- (bool)openInput;
|
- (bool)openInput;
|
||||||
- (bool)findStreamInfo;
|
- (bool)findStreamInfo;
|
||||||
- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts;
|
- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts positionOnKeyframe:(bool)positionOnKeyframe;
|
||||||
- (bool)readFrameIntoPacket:(FFMpegPacket *)packet;
|
- (bool)readFrameIntoPacket:(FFMpegPacket *)packet;
|
||||||
- (NSArray<NSNumber *> *)streamIndicesForType:(FFMpegAVFormatStreamType)type;
|
- (NSArray<NSNumber *> *)streamIndicesForType:(FFMpegAVFormatStreamType)type;
|
||||||
- (bool)isAttachedPicAtStreamIndex:(int32_t)streamIndex;
|
- (bool)isAttachedPicAtStreamIndex:(int32_t)streamIndex;
|
||||||
|
@ -54,8 +54,12 @@ int FFMpegCodecIdMPEG4 = AV_CODEC_ID_MPEG4;
|
|||||||
return result >= 0;
|
return result >= 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts {
|
- (void)seekFrameForStreamIndex:(int32_t)streamIndex pts:(int64_t)pts positionOnKeyframe:(bool)positionOnKeyframe {
|
||||||
av_seek_frame(_impl, streamIndex, pts, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
|
int options = AVSEEK_FLAG_FRAME | AVSEEK_FLAG_BACKWARD;
|
||||||
|
if (!positionOnKeyframe) {
|
||||||
|
options |= AVSEEK_FLAG_ANY;
|
||||||
|
}
|
||||||
|
av_seek_frame(_impl, streamIndex, pts, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (bool)readFrameIntoPacket:(FFMpegPacket *)packet {
|
- (bool)readFrameIntoPacket:(FFMpegPacket *)packet {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user