Video scrubbing preview

This commit is contained in:
Peter 2019-07-12 22:12:29 +04:00
parent 464738d8a8
commit baf2dbbb64
12 changed files with 708 additions and 6 deletions

View File

@ -1,9 +1,22 @@
import CoreMedia
import Accelerate
import FFMpeg
import Accelerate
private let bufferCount = 32
private let deviceColorSpace: CGColorSpace = {
if #available(iOSApplicationExtension 9.3, iOS 9.3, *) {
if let colorSpace = CGColorSpace(name: CGColorSpace.displayP3) {
return colorSpace
} else {
return CGColorSpaceCreateDeviceRGB()
}
} else {
return CGColorSpaceCreateDeviceRGB()
}
}()
public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
private let codecContext: FFMpegAVCodecContext
@ -63,6 +76,17 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
return nil
}
public func render(frame: MediaTrackDecodableFrame) -> UIImage? {
let status = frame.packet.send(toDecoder: self.codecContext)
if status == 0 {
if self.codecContext.receive(into: self.videoFrame) {
return convertVideoFrameToImage(self.videoFrame)
}
}
return nil
}
public func takeRemainingFrame() -> MediaTrackFrame? {
if !self.delayedFrames.isEmpty {
var minFrameIndex = 0
@ -79,6 +103,53 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
}
}
private func convertVideoFrameToImage(_ frame: FFMpegAVFrame) -> UIImage? {
var info = vImage_YpCbCrToARGB()
var pixelRange: vImage_YpCbCrPixelRange
switch frame.colorRange {
case .full:
pixelRange = vImage_YpCbCrPixelRange(Yp_bias: 0, CbCr_bias: 128, YpRangeMax: 255, CbCrRangeMax: 255, YpMax: 255, YpMin: 0, CbCrMax: 255, CbCrMin: 0)
default:
pixelRange = vImage_YpCbCrPixelRange(Yp_bias: 16, CbCr_bias: 128, YpRangeMax: 235, CbCrRangeMax: 240, YpMax: 255, YpMin: 0, CbCrMax: 255, CbCrMin: 0)
}
var result = kvImageNoError
result = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_709_2, &pixelRange, &info, kvImage420Yp8_Cb8_Cr8, kvImageARGB8888, 0)
if result != kvImageNoError {
return nil
}
var srcYp = vImage_Buffer(data: frame.data[0], height: vImagePixelCount(frame.height), width: vImagePixelCount(frame.width), rowBytes: Int(frame.lineSize[0]))
var srcCb = vImage_Buffer(data: frame.data[1], height: vImagePixelCount(frame.height), width: vImagePixelCount(frame.width / 2), rowBytes: Int(frame.lineSize[1]))
var srcCr = vImage_Buffer(data: frame.data[2], height: vImagePixelCount(frame.height), width: vImagePixelCount(frame.width / 2), rowBytes: Int(frame.lineSize[2]))
let argbBytesPerRow = (4 * Int(frame.width) + 15) & (~15)
let argbLength = argbBytesPerRow * Int(frame.height)
let argb = malloc(argbLength)!
guard let provider = CGDataProvider(dataInfo: argb, data: argb, size: argbLength, releaseData: { bytes, _, _ in
free(bytes)
}) else {
return nil
}
var dst = vImage_Buffer(data: argb, height: vImagePixelCount(frame.height), width: vImagePixelCount(frame.width), rowBytes: argbBytesPerRow)
var permuteMap: [UInt8] = [3, 2, 1, 0]
result = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp, &srcCb, &srcCr, &dst, &info, &permuteMap, 0x00, 0)
if result != kvImageNoError {
return nil
}
let bitmapInfo = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.noneSkipFirst.rawValue)
guard let image = CGImage(width: Int(frame.width), height: Int(frame.height), bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: argbBytesPerRow, space: deviceColorSpace, bitmapInfo: bitmapInfo, provider: provider, decode: nil, shouldInterpolate: false, intent: .defaultIntent) else {
return nil
}
return UIImage(cgImage: image, scale: 1.0, orientation: .up)
}
private func convertVideoFrame(_ frame: FFMpegAVFrame, pts: CMTime, dts: CMTime, duration: CMTime) -> MediaTrackFrame? {
if frame.data[0] == nil {
return nil
@ -100,9 +171,6 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
ioSurfaceProperties["IOSurfaceIsGlobal"] = true as NSNumber
var options: [String: Any] = [kCVPixelBufferBytesPerRowAlignmentKey as String: frame.lineSize[0] as NSNumber]
/*if #available(iOSApplicationExtension 9.0, iOS 9.0, *) {
options[kCVPixelBufferOpenGLESTextureCacheCompatibilityKey as String] = true as NSNumber
}*/
options[kCVPixelBufferIOSurfacePropertiesKey as String] = ioSurfaceProperties
CVPixelBufferCreate(kCFAllocatorDefault,

View File

@ -0,0 +1,131 @@
import Foundation
import SwiftSignalKit
import Postbox
import TelegramCore
import FFMpeg
private final class FramePreviewContext {
let source: UniversalSoftwareVideoSource
init(source: UniversalSoftwareVideoSource) {
self.source = source
}
}
private func initializedPreviewContext(queue: Queue, postbox: Postbox, fileReference: FileMediaReference) -> Signal<QueueLocalObject<FramePreviewContext>, NoError> {
return Signal { subscriber in
let source = UniversalSoftwareVideoSource(mediaBox: postbox.mediaBox, fileReference: fileReference)
let readyDisposable = (source.ready
|> filter { $0 }).start(next: { _ in
subscriber.putNext(QueueLocalObject(queue: queue, generate: {
return FramePreviewContext(source: source)
}))
})
return ActionDisposable {
readyDisposable.dispose()
}
}
}
private final class MediaPlayerFramePreviewImpl {
private let queue: Queue
private let context: Promise<QueueLocalObject<FramePreviewContext>>
private let currentFrameDisposable = MetaDisposable()
private var currentFrameTimestamp: Double?
private var nextFrameTimestamp: Double?
fileprivate let framePipe = ValuePipe<UIImage>()
init(queue: Queue, postbox: Postbox, fileReference: FileMediaReference) {
self.queue = queue
self.context = Promise()
self.context.set(initializedPreviewContext(queue: queue, postbox: postbox, fileReference: fileReference))
}
deinit {
assert(self.queue.isCurrent())
self.currentFrameDisposable.dispose()
}
func generateFrame(at timestamp: Double) {
if self.currentFrameTimestamp != nil {
self.nextFrameTimestamp = timestamp
return
}
self.currentFrameTimestamp = timestamp
let queue = self.queue
let takeDisposable = MetaDisposable()
let disposable = (self.context.get()
|> take(1)).start(next: { [weak self] context in
queue.async {
guard context.queue === queue else {
return
}
context.with { context in
let disposable = context.source.takeFrame(at: timestamp).start(next: { image in
guard let strongSelf = self else {
return
}
if let image = image {
strongSelf.framePipe.putNext(image)
}
strongSelf.currentFrameTimestamp = nil
if let nextFrameTimestamp = strongSelf.nextFrameTimestamp {
strongSelf.nextFrameTimestamp = nil
strongSelf.generateFrame(at: nextFrameTimestamp)
}
})
takeDisposable.set(disposable)
}
}
})
self.currentFrameDisposable.set(ActionDisposable {
takeDisposable.dispose()
disposable.dispose()
})
}
func cancelPendingFrames() {
self.nextFrameTimestamp = nil
self.currentFrameTimestamp = nil
self.currentFrameDisposable.set(nil)
}
}
public final class MediaPlayerFramePreview {
private let queue: Queue
private let impl: QueueLocalObject<MediaPlayerFramePreviewImpl>
public var generatedFrames: Signal<UIImage?, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.impl.with { impl in
disposable.set(impl.framePipe.signal().start(next: { image in
subscriber.putNext(image)
}))
}
return disposable
}
}
public init(postbox: Postbox, fileReference: FileMediaReference) {
let queue = Queue()
self.queue = queue
self.impl = QueueLocalObject(queue: queue, generate: {
return MediaPlayerFramePreviewImpl(queue: queue, postbox: postbox, fileReference: fileReference)
})
}
public func generateFrame(at timestamp: Double) {
self.impl.with { impl in
impl.generateFrame(at: timestamp)
}
}
public func cancelPendingFrames() {
self.impl.with { impl in
impl.cancelPendingFrames()
}
}
}

View File

@ -188,6 +188,7 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
public var playbackStatusUpdated: ((MediaPlayerPlaybackStatus?) -> Void)?
public var playerStatusUpdated: ((MediaPlayerStatus?) -> Void)?
public var seek: ((Double) -> Void)?
public var update: ((Double?, CGFloat) -> Void)?
private let _scrubbingTimestamp = Promise<Double?>(nil)
public var scrubbingTimestamp: Signal<Double?, NoError> {
@ -378,6 +379,7 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
strongSelf.scrubbingBeginTimestamp = statusValue.timestamp
strongSelf.scrubbingTimestampValue = statusValue.timestamp
strongSelf._scrubbingTimestamp.set(.single(strongSelf.scrubbingTimestampValue))
strongSelf.update?(strongSelf.scrubbingTimestampValue, CGFloat(statusValue.timestamp / statusValue.duration))
strongSelf.updateProgressAnimations()
}
}
@ -385,8 +387,10 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
handleNodeContainer.updateScrubbing = { [weak self] addedFraction in
if let strongSelf = self {
if let statusValue = strongSelf.statusValue, let scrubbingBeginTimestamp = strongSelf.scrubbingBeginTimestamp, Double(0.0).isLess(than: statusValue.duration) {
strongSelf.scrubbingTimestampValue = max(0.0, min(statusValue.duration, scrubbingBeginTimestamp + statusValue.duration * Double(addedFraction)))
let timestampValue = max(0.0, min(statusValue.duration, scrubbingBeginTimestamp + statusValue.duration * Double(addedFraction)))
strongSelf.scrubbingTimestampValue = timestampValue
strongSelf._scrubbingTimestamp.set(.single(strongSelf.scrubbingTimestampValue))
strongSelf.update?(timestampValue, CGFloat(timestampValue / statusValue.duration))
strongSelf.updateProgressAnimations()
}
}
@ -408,6 +412,7 @@ public final class MediaPlayerScrubbingNode: ASDisplayNode {
}
strongSelf.seek?(scrubbingTimestampValue)
}
strongSelf.update?(nil, 0.0)
strongSelf.updateProgressAnimations()
}
}

View File

@ -0,0 +1,368 @@
import Foundation
import SwiftSignalKit
import Postbox
import TelegramCore
import FFMpeg
private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
let context = Unmanaged<UniversalSoftwareVideoSourceImpl>.fromOpaque(userData!).takeUnretainedValue()
let data: Signal<Data, NoError>
let resourceSize: Int = context.size
let readCount = min(resourceSize - context.readingOffset, Int(bufferSize))
let requestRange: Range<Int> = context.readingOffset ..< (context.readingOffset + readCount)
let semaphore = DispatchSemaphore(value: 0)
data = context.mediaBox.resourceData(context.fileReference.media.resource, size: context.size, in: requestRange, mode: .complete)
var fetchedData: Data?
let disposable = data.start(next: { data in
if data.count == readCount {
fetchedData = data
semaphore.signal()
}
})
let cancelDisposable = context.cancelRead.start(next: { value in
if value {
semaphore.signal()
}
})
semaphore.wait()
disposable.dispose()
cancelDisposable.dispose()
if let fetchedData = fetchedData {
fetchedData.withUnsafeBytes { (bytes: UnsafePointer<UInt8>) -> Void in
memcpy(buffer, bytes, fetchedData.count)
}
let fetchedCount = Int32(fetchedData.count)
context.readingOffset += Int(fetchedCount)
return fetchedCount
} else {
return 0
}
}
private func seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whence: Int32) -> Int64 {
let context = Unmanaged<UniversalSoftwareVideoSourceImpl>.fromOpaque(userData!).takeUnretainedValue()
if (whence & FFMPEG_AVSEEK_SIZE) != 0 {
return Int64(context.size)
} else {
context.readingOffset = Int(offset)
return offset
}
}
private final class SoftwareVideoStream {
let index: Int
let fps: CMTime
let timebase: CMTime
let duration: CMTime
let decoder: FFMpegMediaVideoFrameDecoder
let rotationAngle: Double
let aspect: Double
init(index: Int, fps: CMTime, timebase: CMTime, duration: CMTime, decoder: FFMpegMediaVideoFrameDecoder, rotationAngle: Double, aspect: Double) {
self.index = index
self.fps = fps
self.timebase = timebase
self.duration = duration
self.decoder = decoder
self.rotationAngle = rotationAngle
self.aspect = aspect
}
}
private final class UniversalSoftwareVideoSourceImpl {
fileprivate let mediaBox: MediaBox
fileprivate let fileReference: FileMediaReference
fileprivate let size: Int
fileprivate let state: ValuePromise<UniversalSoftwareVideoSourceState>
fileprivate var avIoContext: FFMpegAVIOContext!
fileprivate var avFormatContext: FFMpegAVFormatContext!
fileprivate var videoStream: SoftwareVideoStream!
fileprivate var readingOffset: Int = 0
fileprivate var cancelRead: Signal<Bool, NoError>
init?(mediaBox: MediaBox, fileReference: FileMediaReference, state: ValuePromise<UniversalSoftwareVideoSourceState>, cancelInitialization: Signal<Bool, NoError>) {
guard let size = fileReference.media.size else {
return nil
}
self.mediaBox = mediaBox
self.fileReference = fileReference
self.size = size
self.state = state
state.set(.initializing)
self.cancelRead = cancelInitialization
let ioBufferSize = 64 * 1024
guard let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, seek: seekCallback) else {
return nil
}
self.avIoContext = avIoContext
let avFormatContext = FFMpegAVFormatContext()
avFormatContext.setIO(avIoContext)
if !avFormatContext.openInput() {
return nil
}
if !avFormatContext.findStreamInfo() {
return nil
}
self.avFormatContext = avFormatContext
var videoStream: SoftwareVideoStream?
for streamIndexNumber in avFormatContext.streamIndices(for: FFMpegAVFormatStreamTypeVideo) {
let streamIndex = streamIndexNumber.int32Value
if avFormatContext.isAttachedPic(atStreamIndex: streamIndex) {
continue
}
let codecId = avFormatContext.codecId(atStreamIndex: streamIndex)
let fpsAndTimebase = avFormatContext.fpsAndTimebase(forStreamIndex: streamIndex, defaultTimeBase: CMTimeMake(value: 1, timescale: 40000))
let (fps, timebase) = (fpsAndTimebase.fps, fpsAndTimebase.timebase)
let duration = CMTimeMake(value: avFormatContext.duration(atStreamIndex: streamIndex), timescale: timebase.timescale)
let metrics = avFormatContext.metricsForStream(at: streamIndex)
let rotationAngle: Double = metrics.rotationAngle
let aspect = Double(metrics.width) / Double(metrics.height)
if let codec = FFMpegAVCodec.find(forId: codecId) {
let codecContext = FFMpegAVCodecContext(codec: codec)
if avFormatContext.codecParams(atStreamIndex: streamIndex, to: codecContext) {
if codecContext.open() {
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
break
}
}
}
}
if let videoStream = videoStream {
self.videoStream = videoStream
} else {
return nil
}
state.set(.ready)
}
private func readPacketInternal() -> FFMpegPacket? {
guard let avFormatContext = self.avFormatContext else {
return nil
}
let packet = FFMpegPacket()
if avFormatContext.readFrame(into: packet) {
return packet
} else {
return nil
}
}
func readDecodableFrame() -> (MediaTrackDecodableFrame?, Bool) {
var frames: [MediaTrackDecodableFrame] = []
var endOfStream = false
while frames.isEmpty {
if let packet = self.readPacketInternal() {
if let videoStream = videoStream, Int(packet.streamIndex) == videoStream.index {
let packetPts = packet.pts
let pts = CMTimeMake(value: packetPts, timescale: videoStream.timebase.timescale)
let dts = CMTimeMake(value: packet.dts, timescale: videoStream.timebase.timescale)
let duration: CMTime
let frameDuration = packet.duration
if frameDuration != 0 {
duration = CMTimeMake(value: frameDuration * videoStream.timebase.value, timescale: videoStream.timebase.timescale)
} else {
duration = videoStream.fps
}
let frame = MediaTrackDecodableFrame(type: .video, packet: packet, pts: pts, dts: dts, duration: duration)
frames.append(frame)
}
} else {
if endOfStream {
break
} else {
if let avFormatContext = self.avFormatContext, let videoStream = self.videoStream {
endOfStream = true
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0)
} else {
endOfStream = true
break
}
}
}
}
if endOfStream {
if let videoStream = self.videoStream {
videoStream.decoder.reset()
}
}
return (frames.first, endOfStream)
}
func readImage() -> (UIImage?, CGFloat, CGFloat, Bool) {
if let videoStream = self.videoStream {
for _ in 0 ..< 10 {
let (decodableFrame, loop) = self.readDecodableFrame()
if let decodableFrame = decodableFrame {
if let renderedFrame = videoStream.decoder.render(frame: decodableFrame) {
return (renderedFrame, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
}
}
}
return (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), true)
} else {
return (nil, 0.0, 1.0, false)
}
}
public func seek(timestamp: Double) {
if let stream = self.videoStream, let avFormatContext = self.avFormatContext {
let pts = CMTimeMakeWithSeconds(timestamp, preferredTimescale: stream.timebase.timescale)
avFormatContext.seekFrame(forStreamIndex: Int32(stream.index), pts: pts.value)
stream.decoder.reset()
}
}
}
private enum UniversalSoftwareVideoSourceState {
case initializing
case failed
case ready
case generatingFrame
}
private final class UniversalSoftwareVideoSourceThreadParams: NSObject {
let mediaBox: MediaBox
let fileReference: FileMediaReference
let state: ValuePromise<UniversalSoftwareVideoSourceState>
let cancelInitialization: Signal<Bool, NoError>
init(mediaBox: MediaBox, fileReference: FileMediaReference, state: ValuePromise<UniversalSoftwareVideoSourceState>, cancelInitialization: Signal<Bool, NoError>) {
self.mediaBox = mediaBox
self.fileReference = fileReference
self.state = state
self.cancelInitialization = cancelInitialization
}
}
private final class UniversalSoftwareVideoSourceTakeFrameParams: NSObject {
let timestamp: Double
let completion: (UIImage?) -> Void
let cancel: Signal<Bool, NoError>
init(timestamp: Double, completion: @escaping (UIImage?) -> Void, cancel: Signal<Bool, NoError>) {
self.timestamp = timestamp
self.completion = completion
self.cancel = cancel
}
}
private final class UniversalSoftwareVideoSourceThread: NSObject {
@objc static func entryPoint(_ params: UniversalSoftwareVideoSourceThreadParams) {
let runLoop = RunLoop.current
let timer = Timer(fireAt: .distantFuture, interval: 0.0, target: UniversalSoftwareVideoSourceThread.self, selector: #selector(UniversalSoftwareVideoSourceThread.none), userInfo: nil, repeats: false)
runLoop.add(timer, forMode: .common)
let source = UniversalSoftwareVideoSourceImpl(mediaBox: params.mediaBox, fileReference: params.fileReference, state: params.state, cancelInitialization: params.cancelInitialization)
Thread.current.threadDictionary["source"] = source
while true {
runLoop.run(mode: .default, before: .distantFuture)
if Thread.current.threadDictionary["UniversalSoftwareVideoSourceThread_stop"] != nil {
break
}
}
Thread.current.threadDictionary.removeObject(forKey: "source")
}
@objc static func none() {
}
@objc static func stop() {
Thread.current.threadDictionary["UniversalSoftwareVideoSourceThread_stop"] = "true"
}
@objc static func takeFrame(_ params: UniversalSoftwareVideoSourceTakeFrameParams) {
guard let source = Thread.current.threadDictionary["source"] as? UniversalSoftwareVideoSourceImpl else {
params.completion(nil)
return
}
source.cancelRead = params.cancel
source.state.set(.generatingFrame)
let startTime = CFAbsoluteTimeGetCurrent()
source.seek(timestamp: params.timestamp)
let image = source.readImage().0
params.completion(image)
source.state.set(.ready)
print("take frame: \(CFAbsoluteTimeGetCurrent() - startTime) s")
}
}
final class UniversalSoftwareVideoSource {
private let thread: Thread
private let stateValue: ValuePromise<UniversalSoftwareVideoSourceState> = ValuePromise(.initializing, ignoreRepeated: true)
private let cancelInitialization: ValuePromise<Bool> = ValuePromise(false)
var ready: Signal<Bool, NoError> {
return self.stateValue.get()
|> map { value -> Bool in
switch value {
case .ready:
return true
default:
return false
}
}
}
init(mediaBox: MediaBox, fileReference: FileMediaReference) {
self.thread = Thread(target: UniversalSoftwareVideoSourceThread.self, selector: #selector(UniversalSoftwareVideoSourceThread.entryPoint(_:)), object: UniversalSoftwareVideoSourceThreadParams(mediaBox: mediaBox, fileReference: fileReference, state: self.stateValue, cancelInitialization: self.cancelInitialization.get()))
self.thread.name = "UniversalSoftwareVideoSource"
self.thread.start()
}
deinit {
UniversalSoftwareVideoSourceThread.self.perform(#selector(UniversalSoftwareVideoSourceThread.stop), on: self.thread, with: nil, waitUntilDone: false)
self.cancelInitialization.set(true)
}
public func takeFrame(at timestamp: Double) -> Signal<UIImage?, NoError> {
return Signal { subscriber in
let cancel = ValuePromise<Bool>(false)
UniversalSoftwareVideoSourceThread.self.perform(#selector(UniversalSoftwareVideoSourceThread.takeFrame(_:)), on: self.thread, with: UniversalSoftwareVideoSourceTakeFrameParams(timestamp: timestamp, completion: { image in
subscriber.putNext(image)
subscriber.putCompletion()
}, cancel: cancel.get()), waitUntilDone: false)
return ActionDisposable {
cancel.set(true)
}
}
}
}

View File

@ -7,6 +7,8 @@
objects = {
/* Begin PBXBuildFile section */
D03B054022D8866A0000BE1A /* MediaPlayerFramePreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = D03B053F22D8866A0000BE1A /* MediaPlayerFramePreview.swift */; };
D03B054222D888A00000BE1A /* SoftwareVideoSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = D03B054122D888A00000BE1A /* SoftwareVideoSource.swift */; };
D0750C6E22B28E6600BE5F6E /* RingBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = D0750C6B22B28E6500BE5F6E /* RingBuffer.m */; };
D0750C6F22B28E6600BE5F6E /* RingByteBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0750C6C22B28E6600BE5F6E /* RingByteBuffer.swift */; };
D0750C7022B28E6600BE5F6E /* RingBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = D0750C6D22B28E6600BE5F6E /* RingBuffer.h */; settings = {ATTRIBUTES = (Public, ); }; };
@ -37,9 +39,12 @@
D0AE325B22B286A70058D3BC /* MediaTrackDecodableFrame.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0AE324922B286A70058D3BC /* MediaTrackDecodableFrame.swift */; };
D0AE325C22B286A70058D3BC /* MediaTrackFrame.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0AE324A22B286A70058D3BC /* MediaTrackFrame.swift */; };
D0AE325E22B286C30058D3BC /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0AE325D22B286C30058D3BC /* AVFoundation.framework */; };
D0E8B10C22D8B7E800C82570 /* UniversalSoftwareVideoSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0E8B10B22D8B7E800C82570 /* UniversalSoftwareVideoSource.swift */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
D03B053F22D8866A0000BE1A /* MediaPlayerFramePreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MediaPlayerFramePreview.swift; sourceTree = "<group>"; };
D03B054122D888A00000BE1A /* SoftwareVideoSource.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SoftwareVideoSource.swift; sourceTree = "<group>"; };
D0750C6B22B28E6500BE5F6E /* RingBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RingBuffer.m; sourceTree = "<group>"; };
D0750C6C22B28E6600BE5F6E /* RingByteBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RingByteBuffer.swift; sourceTree = "<group>"; };
D0750C6D22B28E6600BE5F6E /* RingBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RingBuffer.h; sourceTree = "<group>"; };
@ -72,6 +77,7 @@
D0AE324922B286A70058D3BC /* MediaTrackDecodableFrame.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MediaTrackDecodableFrame.swift; sourceTree = "<group>"; };
D0AE324A22B286A70058D3BC /* MediaTrackFrame.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MediaTrackFrame.swift; sourceTree = "<group>"; };
D0AE325D22B286C30058D3BC /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
D0E8B10B22D8B7E800C82570 /* UniversalSoftwareVideoSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UniversalSoftwareVideoSource.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@ -135,7 +141,10 @@
D0AE323E22B286A60058D3BC /* MediaTrackFrameBuffer.swift */,
D0AE324222B286A60058D3BC /* MediaTrackFrameDecoder.swift */,
D0AE323C22B286A50058D3BC /* VideoPlayerProxy.swift */,
D03B054122D888A00000BE1A /* SoftwareVideoSource.swift */,
D03B053F22D8866A0000BE1A /* MediaPlayerFramePreview.swift */,
D0AE322222B285F70058D3BC /* UniversalMediaPlayer.h */,
D0E8B10B22D8B7E800C82570 /* UniversalSoftwareVideoSource.swift */,
);
path = Sources;
sourceTree = "<group>";
@ -241,6 +250,7 @@
D0AE325A22B286A70058D3BC /* FFMpegMediaVideoFrameDecoder.swift in Sources */,
D0750C6E22B28E6600BE5F6E /* RingBuffer.m in Sources */,
D0AE325422B286A70058D3BC /* MediaTrackFrameDecoder.swift in Sources */,
D0E8B10C22D8B7E800C82570 /* UniversalSoftwareVideoSource.swift in Sources */,
D0AE325322B286A70058D3BC /* MediaPlayerNode.swift in Sources */,
D0AE325122B286A70058D3BC /* MediaPlayer.swift in Sources */,
D0AE325722B286A70058D3BC /* MediaPlaybackData.swift in Sources */,
@ -251,11 +261,13 @@
D0AE325222B286A70058D3BC /* FFMpegMediaFrameSourceContext.swift in Sources */,
D0AE324E22B286A70058D3BC /* VideoPlayerProxy.swift in Sources */,
D0AE325022B286A70058D3BC /* MediaTrackFrameBuffer.swift in Sources */,
D03B054022D8866A0000BE1A /* MediaPlayerFramePreview.swift in Sources */,
D0AE325C22B286A70058D3BC /* MediaTrackFrame.swift in Sources */,
D0AE324F22B286A70058D3BC /* MediaPlayerTimeTextNode.swift in Sources */,
D0750C6F22B28E6600BE5F6E /* RingByteBuffer.swift in Sources */,
D0AE325822B286A70058D3BC /* FFMpegMediaPassthroughVideoFrameDecoder.swift in Sources */,
D0AE325B22B286A70058D3BC /* MediaTrackDecodableFrame.swift in Sources */,
D03B054222D888A00000BE1A /* SoftwareVideoSource.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

View File

@ -167,6 +167,10 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, UIScroll
private let messageContextDisposable = MetaDisposable()
private var videoFramePreviewNode: ASImageNode?
private var validLayout: (CGSize, LayoutMetrics, CGFloat, CGFloat, CGFloat, CGFloat)?
var playbackControl: (() -> Void)?
var seekBackward: (() -> Void)?
var seekForward: (() -> Void)?
@ -225,6 +229,8 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, UIScroll
}
}
private var scrubbingHandleRelativePosition: CGFloat = 0.0
var scrubberView: ChatVideoGalleryItemScrubberView? = nil {
willSet {
if let scrubberView = self.scrubberView, scrubberView.superview == self.view {
@ -234,6 +240,15 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, UIScroll
didSet {
if let scrubberView = self.scrubberView {
self.view.addSubview(scrubberView)
scrubberView.updateScrubbingHandlePosition = { [weak self] value in
guard let strongSelf = self else {
return
}
strongSelf.scrubbingHandleRelativePosition = value
if let validLayout = strongSelf.validLayout {
let _ = strongSelf.updateLayout(size: validLayout.0, metrics: validLayout.1, leftInset: validLayout.2, rightInset: validLayout.3, bottomInset: validLayout.4, contentInset: validLayout.5, transition: .immediate)
}
}
}
}
}
@ -500,6 +515,8 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, UIScroll
}
override func updateLayout(size: CGSize, metrics: LayoutMetrics, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, contentInset: CGFloat, transition: ContainedViewLayoutTransition) -> CGFloat {
self.validLayout = (size, metrics, leftInset, rightInset, bottomInset, contentInset)
let width = size.width
var bottomInset = bottomInset
if !bottomInset.isZero && bottomInset < 30.0 {
@ -621,6 +638,16 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, UIScroll
self.dateNode.frame = CGRect(origin: CGPoint(x: floor((width - dateSize.width) / 2.0), y: panelHeight - bottomInset - 44.0 + floor((44.0 - dateSize.height - authorNameSize.height - labelsSpacing) / 2.0) + authorNameSize.height + labelsSpacing), size: dateSize)
}
if let videoFramePreviewNode = self.videoFramePreviewNode {
let intrinsicImageSize = videoFramePreviewNode.image?.size ?? CGSize(width: 320.0, height: 240.0)
let imageSize = intrinsicImageSize.aspectFitted(CGSize(width: 200.0, height: 200.0))
var imageFrame = CGRect(origin: CGPoint(x: leftInset + floor(self.scrubbingHandleRelativePosition * (width - leftInset - rightInset) - imageSize.width / 2.0), y: self.scrollNode.frame.minY - 10.0 - imageSize.height), size: imageSize)
imageFrame.origin.x = min(imageFrame.origin.x, width - rightInset - 10.0 - imageSize.width)
imageFrame.origin.x = max(imageFrame.origin.x, leftInset + 10.0)
videoFramePreviewNode.frame = imageFrame
}
return panelHeight
}
@ -993,4 +1020,34 @@ final class ChatItemGalleryFooterContentNode: GalleryFooterContentNode, UIScroll
@objc private func statusPressed() {
self.fetchControl?()
}
func setFramePreviewImage(image: UIImage?) {
if let image = image {
let videoFramePreviewNode: ASImageNode
var animateIn = false
if let current = self.videoFramePreviewNode {
videoFramePreviewNode = current
} else {
videoFramePreviewNode = ASImageNode()
videoFramePreviewNode.displaysAsynchronously = false
videoFramePreviewNode.displayWithoutProcessing = true
self.videoFramePreviewNode = videoFramePreviewNode
self.addSubnode(videoFramePreviewNode)
animateIn = true
}
let updateLayout = videoFramePreviewNode.image?.size != image.size
videoFramePreviewNode.image = image
if updateLayout, let validLayout = self.validLayout {
let _ = self.updateLayout(size: validLayout.0, metrics: validLayout.1, leftInset: validLayout.2, rightInset: validLayout.3, bottomInset: validLayout.4, contentInset: validLayout.5, transition: .immediate)
}
if animateIn {
videoFramePreviewNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.1)
}
} else if let videoFramePreviewNode = self.videoFramePreviewNode {
self.videoFramePreviewNode = nil
videoFramePreviewNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.1, removeOnCompletion: false, completion: { [weak videoFramePreviewNode] _ in
videoFramePreviewNode?.removeFromSupernode()
})
}
}
}

View File

@ -42,6 +42,8 @@ final class ChatVideoGalleryItemScrubberView: UIView {
}
}
var updateScrubbing: (Double?) -> Void = { _ in }
var updateScrubbingHandlePosition: (CGFloat) -> Void = { _ in }
var seek: (Double) -> Void = { _ in }
override init(frame: CGRect) {
@ -63,6 +65,11 @@ final class ChatVideoGalleryItemScrubberView: UIView {
self?.seek(timestamp)
}
self.scrubberNode.update = { [weak self] timestamp, position in
self?.updateScrubbing(timestamp)
self?.updateScrubbingHandlePosition(position)
}
self.scrubberNode.playerStatusUpdated = { [weak self] status in
if let strongSelf = self {
strongSelf.playbackStatus = status

View File

@ -154,6 +154,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
private let footerContentNode: ChatItemGalleryFooterContentNode
private var videoNode: UniversalVideoNode?
private var videoFramePreview: MediaPlayerFramePreview?
private var pictureInPictureNode: UniversalVideoGalleryItemPictureInPictureNode?
private let statusButtonNode: HighlightableButtonNode
private let statusNode: RadialStatusNode
@ -178,6 +179,10 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
private var fetchStatus: MediaResourceStatus?
private var fetchControls: FetchControls?
private var scrubbingFrame = Promise<UIImage?>(nil)
private var scrubbingFrames = false
private var scrubbingFrameDisposable: Disposable?
var playbackCompleted: (() -> Void)?
init(context: AccountContext, presentationData: PresentationData, performAction: @escaping (GalleryControllerInteractionTapAction) -> Void, openActionOptions: @escaping (GalleryControllerInteractionTapAction) -> Void) {
@ -203,6 +208,23 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
self?.videoNode?.seek(timecode)
}
self.scrubberView.updateScrubbing = { [weak self] timecode in
guard let strongSelf = self, let videoFramePreview = strongSelf.videoFramePreview else {
return
}
if let timecode = timecode {
if !strongSelf.scrubbingFrames {
strongSelf.scrubbingFrames = true
strongSelf.scrubbingFrame.set(videoFramePreview.generatedFrames)
}
videoFramePreview.generateFrame(at: timecode)
} else {
strongSelf.scrubbingFrame.set(.single(nil))
videoFramePreview.cancelPendingFrames()
strongSelf.scrubbingFrames = false
}
}
self.statusButtonNode.addSubnode(self.statusNode)
self.statusButtonNode.addTarget(self, action: #selector(statusButtonPressed), forControlEvents: .touchUpInside)
@ -255,10 +277,19 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
break
}
}
self.scrubbingFrameDisposable = (self.scrubbingFrame.get()
|> deliverOnMainQueue).start(next: { [weak self] image in
guard let strongSelf = self else {
return
}
strongSelf.footerContentNode.setFramePreviewImage(image: image)
})
}
deinit {
self.statusDisposable.dispose()
self.scrubbingFrameDisposable?.dispose()
}
override func ready() -> Signal<Void, NoError> {
@ -304,6 +335,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
var isAnimated = false
if let content = item.content as? NativeVideoContent {
isAnimated = content.fileReference.media.isAnimated
self.videoFramePreview = MediaPlayerFramePreview(postbox: item.context.account.postbox, fileReference: content.fileReference)
} else if let _ = item.content as? SystemVideoContent {
self._title.set(.single(item.presentationData.strings.Message_Video))
} else if let content = item.content as? WebEmbedVideoContent, case .iframe = webEmbedType(content: content.webpageContent) {

View File

@ -2,6 +2,11 @@
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSUInteger, FFMpegAVFrameColorRange) {
FFMpegAVFrameColorRangeRestricted,
FFMpegAVFrameColorRangeFull
};
@interface FFMpegAVFrame : NSObject
@property (nonatomic, readonly) int32_t width;
@ -9,6 +14,7 @@ NS_ASSUME_NONNULL_BEGIN
@property (nonatomic, readonly) uint8_t **data;
@property (nonatomic, readonly) int *lineSize;
@property (nonatomic, readonly) int64_t pts;
@property (nonatomic, readonly) FFMpegAVFrameColorRange colorRange;
- (instancetype)init;

View File

@ -44,6 +44,16 @@
return _impl->pts;
}
- (FFMpegAVFrameColorRange)colorRange {
switch (_impl->color_range) {
case AVCOL_RANGE_MPEG:
case AVCOL_RANGE_UNSPECIFIED:
return FFMpegAVFrameColorRangeRestricted;
default:
return FFMpegAVFrameColorRangeFull;
}
}
- (void *)impl {
return _impl;
}

View File

@ -48,7 +48,6 @@ set -e
CONFIGURE_FLAGS="--enable-cross-compile --disable-programs \
--disable-armv5te --disable-armv6 --disable-armv6t2 \
--disable-doc --enable-pic --disable-all --disable-everything \
--disable-videotoolbox \
--enable-avcodec \
--enable-swresample \
--enable-avformat \
@ -56,13 +55,16 @@ CONFIGURE_FLAGS="--enable-cross-compile --disable-programs \
--enable-libopus \
--enable-audiotoolbox \
--enable-bsf=aac_adtstoasc \
--enable-decoder=h264,libopus,mp3_at,aac_at,flac,alac_at,pcm_s16le,pcm_s24le,gsm_ms_at \
--enable-decoder=h264,hevc,libopus,mp3_at,aac_at,flac,alac_at,pcm_s16le,pcm_s24le,gsm_ms_at \
--enable-demuxer=aac,mov,m4v,mp3,ogg,libopus,flac,wav,aiff,matroska \
--enable-parser=aac,h264,mp3,libopus \
--enable-protocol=file \
--enable-muxer=mp4 \
"
#--enable-hwaccel=h264_videotoolbox,hevc_videotoolbox \
if [ "$1" = "debug" ];
then
CONFIGURE_FLAGS="$CONFIGURE_FLAGS --disable-optimizations --disable-stripping"

View File

@ -36,6 +36,7 @@
D04555D721BF8B2F007A6DD9 /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D04555D621BF8B2F007A6DD9 /* AudioToolbox.framework */; };
D04555D921BF8B4E007A6DD9 /* libiconv.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = D04555D821BF8B4E007A6DD9 /* libiconv.tbd */; };
D04555DB21BF8B77007A6DD9 /* libopus.a in Frameworks */ = {isa = PBXBuildFile; fileRef = D04555DA21BF8B77007A6DD9 /* libopus.a */; };
D0E8B10E22D8E97B00C82570 /* VideoToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0E8B10D22D8E97B00C82570 /* VideoToolbox.framework */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
@ -71,6 +72,7 @@
D04555D821BF8B4E007A6DD9 /* libiconv.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libiconv.tbd; path = usr/lib/libiconv.tbd; sourceTree = SDKROOT; };
D04555DA21BF8B77007A6DD9 /* libopus.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libopus.a; path = opus/lib/libopus.a; sourceTree = "<group>"; };
D0CAD6A621C049D9001E3055 /* ffmpeg-4.1 */ = {isa = PBXFileReference; lastKnownFileType = folder; path = "ffmpeg-4.1"; sourceTree = "<group>"; };
D0E8B10D22D8E97B00C82570 /* VideoToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = VideoToolbox.framework; path = System/Library/Frameworks/VideoToolbox.framework; sourceTree = SDKROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@ -78,6 +80,7 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
D0E8B10E22D8E97B00C82570 /* VideoToolbox.framework in Frameworks */,
D000CABF21F76B1B0011B15D /* libbz2.tbd in Frameworks */,
D04555DB21BF8B77007A6DD9 /* libopus.a in Frameworks */,
D04555D921BF8B4E007A6DD9 /* libiconv.tbd in Frameworks */,
@ -142,6 +145,7 @@
D04554C921BF1119007A6DD9 /* Frameworks */ = {
isa = PBXGroup;
children = (
D0E8B10D22D8E97B00C82570 /* VideoToolbox.framework */,
D000CABE21F76B1B0011B15D /* libbz2.tbd */,
D04555DA21BF8B77007A6DD9 /* libopus.a */,
D04555D821BF8B4E007A6DD9 /* libiconv.tbd */,