Video Stickers Fixes

This commit is contained in:
Ilya Laktyushin 2022-01-29 17:25:22 +03:00
parent a506cbf7cf
commit d1802a460e
4 changed files with 39 additions and 20 deletions

View File

@ -115,7 +115,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
} }
} }
public func decode(frame: MediaTrackDecodableFrame, ptsOffset: CMTime?) -> MediaTrackFrame? { public func decode(frame: MediaTrackDecodableFrame, ptsOffset: CMTime?, forceARGB: Bool = false) -> MediaTrackFrame? {
let status = frame.packet.send(toDecoder: self.codecContext) let status = frame.packet.send(toDecoder: self.codecContext)
if status == 0 { if status == 0 {
self.defaultDuration = frame.duration self.defaultDuration = frame.duration
@ -126,7 +126,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
if let ptsOffset = ptsOffset { if let ptsOffset = ptsOffset {
pts = CMTimeAdd(pts, ptsOffset) pts = CMTimeAdd(pts, ptsOffset)
} }
return convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: frame.duration) return convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: frame.duration, forceARGB: forceARGB)
} }
} }
@ -236,7 +236,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
return UIImage(cgImage: image, scale: 1.0, orientation: .up) return UIImage(cgImage: image, scale: 1.0, orientation: .up)
} }
private func convertVideoFrame(_ frame: FFMpegAVFrame, pts: CMTime, dts: CMTime, duration: CMTime) -> MediaTrackFrame? { private func convertVideoFrame(_ frame: FFMpegAVFrame, pts: CMTime, dts: CMTime, duration: CMTime, forceARGB: Bool = false) -> MediaTrackFrame? {
if frame.data[0] == nil { if frame.data[0] == nil {
return nil return nil
} }
@ -247,14 +247,27 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
var pixelBufferRef: CVPixelBuffer? var pixelBufferRef: CVPixelBuffer?
let pixelFormat: OSType let pixelFormat: OSType
var hasAlpha = false
if forceARGB {
pixelFormat = kCVPixelFormatType_32ARGB
switch frame.pixelFormat {
case .YUV:
hasAlpha = false
case .YUVA:
hasAlpha = true
default:
hasAlpha = false
}
} else {
switch frame.pixelFormat { switch frame.pixelFormat {
case .YUV: case .YUV:
pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
case .YUVA: case .YUVA:
pixelFormat = kCVPixelFormatType_32ARGB pixelFormat = kCVPixelFormatType_420YpCbCr8VideoRange_8A_TriPlanar
default: default:
pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
} }
}
if let pixelBufferPool = self.pixelBufferPool { if let pixelBufferPool = self.pixelBufferPool {
let auxAttributes: [String: Any] = [kCVPixelBufferPoolAllocationThresholdKey as String: bufferCount as NSNumber]; let auxAttributes: [String: Any] = [kCVPixelBufferPoolAllocationThresholdKey as String: bufferCount as NSNumber];
@ -290,7 +303,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
var base: UnsafeMutableRawPointer var base: UnsafeMutableRawPointer
if pixelFormat == kCVPixelFormatType_32ARGB { if pixelFormat == kCVPixelFormatType_32ARGB {
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
decodeYUVAPlanesToRGBA(frame.data[0], Int32(frame.lineSize[0]), frame.data[1], Int32(frame.lineSize[1]), frame.data[2], Int32(frame.lineSize[2]), frame.data[3], CVPixelBufferGetBaseAddress(pixelBuffer)?.assumingMemoryBound(to: UInt8.self), Int32(frame.width), Int32(frame.height), Int32(bytesPerRow)) decodeYUVAPlanesToRGBA(frame.data[0], Int32(frame.lineSize[0]), frame.data[1], Int32(frame.lineSize[1]), frame.data[2], Int32(frame.lineSize[2]), hasAlpha, frame.data[3], CVPixelBufferGetBaseAddress(pixelBuffer)?.assumingMemoryBound(to: UInt8.self), Int32(frame.width), Int32(frame.height), Int32(bytesPerRow))
} else { } else {
let srcPlaneSize = Int(frame.lineSize[1]) * Int(frame.height / 2) let srcPlaneSize = Int(frame.lineSize[1]) * Int(frame.height / 2)
let uvPlaneSize = srcPlaneSize * 2 let uvPlaneSize = srcPlaneSize * 2

View File

@ -58,12 +58,16 @@ public final class SoftwareVideoSource {
fileprivate let fd: Int32? fileprivate let fd: Int32?
fileprivate let size: Int32 fileprivate let size: Int32
private let hintVP9: Bool
private var enqueuedFrames: [(MediaTrackFrame, CGFloat, CGFloat, Bool)] = [] private var enqueuedFrames: [(MediaTrackFrame, CGFloat, CGFloat, Bool)] = []
private var hasReadToEnd: Bool = false private var hasReadToEnd: Bool = false
public init(path: String, hintVP9: Bool) { public init(path: String, hintVP9: Bool) {
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
self.hintVP9 = hintVP9
var s = stat() var s = stat()
stat(path, &s) stat(path, &s)
self.size = Int32(s.st_size) self.size = Int32(s.st_size)
@ -224,7 +228,7 @@ public final class SoftwareVideoSource {
if let maxPts = maxPts, CMTimeCompare(decodableFrame.pts, maxPts) < 0 { if let maxPts = maxPts, CMTimeCompare(decodableFrame.pts, maxPts) < 0 {
ptsOffset = maxPts ptsOffset = maxPts
} }
result = (videoStream.decoder.decode(frame: decodableFrame, ptsOffset: ptsOffset), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) result = (videoStream.decoder.decode(frame: decodableFrame, ptsOffset: ptsOffset, forceARGB: self.hintVP9), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
} else { } else {
result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop) result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
} }

View File

@ -4,4 +4,4 @@ void encodeRGBAToYUVA(uint8_t *yuva, uint8_t const *argb, int width, int height,
void resizeAndEncodeRGBAToYUVA(uint8_t *yuva, uint8_t const *argb, int width, int height, int bytesPerRow, int originalWidth, int originalHeight, int originalBytesPerRow); void resizeAndEncodeRGBAToYUVA(uint8_t *yuva, uint8_t const *argb, int width, int height, int bytesPerRow, int originalWidth, int originalHeight, int originalBytesPerRow);
void decodeYUVAToRGBA(uint8_t const *yuva, uint8_t *argb, int width, int height, int bytesPerRow); void decodeYUVAToRGBA(uint8_t const *yuva, uint8_t *argb, int width, int height, int bytesPerRow);
void decodeYUVAPlanesToRGBA(uint8_t const *srcYpData, int srcYpBytesPerRow, uint8_t const *srcCbData, int srcCbBytesPerRow, uint8_t const *srcCrData, int srcCrBytesPerRow, uint8_t const *alphaData, uint8_t *argb, int width, int height, int bytesPerRow); void decodeYUVAPlanesToRGBA(uint8_t const *srcYpData, int srcYpBytesPerRow, uint8_t const *srcCbData, int srcCbBytesPerRow, uint8_t const *srcCrData, int srcCrBytesPerRow, bool hasAlpha, uint8_t const *alphaData, uint8_t *argb, int width, int height, int bytesPerRow);

View File

@ -169,7 +169,7 @@ void decodeYUVAToRGBA(uint8_t const *yuva, uint8_t *argb, int width, int height,
} }
} }
void decodeYUVAPlanesToRGBA(uint8_t const *srcYpData, int srcYpBytesPerRow, uint8_t const *srcCbData, int srcCbBytesPerRow, uint8_t const *srcCrData, int srcCrBytesPerRow, uint8_t const *alphaData, uint8_t *argb, int width, int height, int bytesPerRow) { void decodeYUVAPlanesToRGBA(uint8_t const *srcYpData, int srcYpBytesPerRow, uint8_t const *srcCbData, int srcCbBytesPerRow, uint8_t const *srcCrData, int srcCrBytesPerRow, bool hasAlpha, uint8_t const *alphaData, uint8_t *argb, int width, int height, int bytesPerRow) {
static vImage_YpCbCrToARGB info; static vImage_YpCbCrToARGB info;
static dispatch_once_t onceToken; static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{ dispatch_once(&onceToken, ^{
@ -204,6 +204,7 @@ void decodeYUVAPlanesToRGBA(uint8_t const *srcYpData, int srcYpBytesPerRow, uint
dest.rowBytes = bytesPerRow; dest.rowBytes = bytesPerRow;
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp, &srcCb, &srcCr, &dest, &info, NULL, 0xff, kvImageDoNotTile); error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp, &srcCb, &srcCr, &dest, &info, NULL, 0xff, kvImageDoNotTile);
if (hasAlpha) {
for (int y = 0; y < height; y += 1) { for (int y = 0; y < height; y += 1) {
uint8_t *argbRow = argb + y * bytesPerRow; uint8_t *argbRow = argb + y * bytesPerRow;
int alphaRow = y * srcYpBytesPerRow; int alphaRow = y * srcYpBytesPerRow;
@ -212,6 +213,7 @@ void decodeYUVAPlanesToRGBA(uint8_t const *srcYpData, int srcYpBytesPerRow, uint
argbRow[x * 4] = alphaData[alphaRow + x]; argbRow[x * 4] = alphaData[alphaRow + x];
} }
} }
}
error = vImagePremultiplyData_ARGB8888(&dest, &dest, kvImageDoNotTile); error = vImagePremultiplyData_ARGB8888(&dest, &dest, kvImageDoNotTile);