Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin
2021-07-28 02:10:47 +03:00
4 changed files with 71 additions and 19 deletions

View File

@@ -27,12 +27,21 @@ private func getCubeVertexData(
frameWidth: Int, frameWidth: Int,
frameHeight: Int, frameHeight: Int,
rotation: Int, rotation: Int,
mirrorHorizontally: Bool,
mirrorVertically: Bool,
buffer: UnsafeMutablePointer<Float> buffer: UnsafeMutablePointer<Float>
) { ) {
let cropLeft = Float(cropX) / Float(frameWidth) var cropLeft = Float(cropX) / Float(frameWidth)
let cropRight = Float(cropX + cropWidth) / Float(frameWidth) var cropRight = Float(cropX + cropWidth) / Float(frameWidth)
let cropTop = Float(cropY) / Float(frameHeight) var cropTop = Float(cropY) / Float(frameHeight)
let cropBottom = Float(cropY + cropHeight) / Float(frameHeight) var cropBottom = Float(cropY + cropHeight) / Float(frameHeight)
if mirrorHorizontally {
swap(&cropLeft, &cropRight)
}
if mirrorVertically {
swap(&cropTop, &cropBottom)
}
switch rotation { switch rotation {
default: default:
@@ -49,6 +58,8 @@ private func getCubeVertexData(
@available(iOS 13.0, *) @available(iOS 13.0, *)
private protocol FrameBufferRenderingState { private protocol FrameBufferRenderingState {
var frameSize: CGSize? { get } var frameSize: CGSize? { get }
var mirrorHorizontally: Bool { get }
var mirrorVertically: Bool { get }
func encode(renderingContext: MetalVideoRenderingContext, vertexBuffer: MTLBuffer, renderEncoder: MTLRenderCommandEncoder) -> Bool func encode(renderingContext: MetalVideoRenderingContext, vertexBuffer: MTLBuffer, renderEncoder: MTLRenderCommandEncoder) -> Bool
} }
@@ -73,6 +84,9 @@ private final class NV12FrameBufferRenderingState: FrameBufferRenderingState {
private var yTexture: MTLTexture? private var yTexture: MTLTexture?
private var uvTexture: MTLTexture? private var uvTexture: MTLTexture?
private(set) var mirrorHorizontally: Bool = false
private(set) var mirrorVertically: Bool = false
var frameSize: CGSize? { var frameSize: CGSize? {
if let yTexture = self.yTexture { if let yTexture = self.yTexture {
return CGSize(width: yTexture.width, height: yTexture.height) return CGSize(width: yTexture.width, height: yTexture.height)
@@ -81,7 +95,7 @@ private final class NV12FrameBufferRenderingState: FrameBufferRenderingState {
} }
} }
func updateTextureBuffers(renderingContext: MetalVideoRenderingContext, frameBuffer: OngoingGroupCallContext.VideoFrameData.NativeBuffer) { func updateTextureBuffers(renderingContext: MetalVideoRenderingContext, frameBuffer: OngoingGroupCallContext.VideoFrameData.NativeBuffer, mirrorHorizontally: Bool, mirrorVertically: Bool) {
let pixelBuffer = frameBuffer.pixelBuffer let pixelBuffer = frameBuffer.pixelBuffer
var lumaTexture: MTLTexture? var lumaTexture: MTLTexture?
@@ -112,6 +126,9 @@ private final class NV12FrameBufferRenderingState: FrameBufferRenderingState {
self.yTexture = nil self.yTexture = nil
self.uvTexture = nil self.uvTexture = nil
} }
self.mirrorHorizontally = mirrorHorizontally
self.mirrorVertically = mirrorVertically
} }
func encode(renderingContext: MetalVideoRenderingContext, vertexBuffer: MTLBuffer, renderEncoder: MTLRenderCommandEncoder) -> Bool { func encode(renderingContext: MetalVideoRenderingContext, vertexBuffer: MTLBuffer, renderEncoder: MTLRenderCommandEncoder) -> Bool {
@@ -142,6 +159,9 @@ private final class I420FrameBufferRenderingState: FrameBufferRenderingState {
private var lumaTextureDescriptor: MTLTextureDescriptor? private var lumaTextureDescriptor: MTLTextureDescriptor?
private var chromaTextureDescriptor: MTLTextureDescriptor? private var chromaTextureDescriptor: MTLTextureDescriptor?
private(set) var mirrorHorizontally: Bool = false
private(set) var mirrorVertically: Bool = false
var frameSize: CGSize? { var frameSize: CGSize? {
if let yTexture = self.yTexture { if let yTexture = self.yTexture {
return CGSize(width: yTexture.width, height: yTexture.height) return CGSize(width: yTexture.width, height: yTexture.height)
@@ -318,7 +338,7 @@ final class MetalVideoRenderingView: UIView, VideoRenderingView {
renderingState = NV12FrameBufferRenderingState() renderingState = NV12FrameBufferRenderingState()
self.frameBufferRenderingState = renderingState self.frameBufferRenderingState = renderingState
} }
renderingState.updateTextureBuffers(renderingContext: renderingContext, frameBuffer: buffer) renderingState.updateTextureBuffers(renderingContext: renderingContext, frameBuffer: buffer, mirrorHorizontally: videoFrameData.mirrorHorizontally, mirrorVertically: videoFrameData.mirrorVertically)
self.needsRedraw = true self.needsRedraw = true
case let .i420(buffer): case let .i420(buffer):
let renderingState: I420FrameBufferRenderingState let renderingState: I420FrameBufferRenderingState
@@ -350,6 +370,8 @@ final class MetalVideoRenderingView: UIView, VideoRenderingView {
guard let frameSize = frameBufferRenderingState.frameSize else { guard let frameSize = frameBufferRenderingState.frameSize else {
return nil return nil
} }
let mirrorHorizontally = frameBufferRenderingState.mirrorHorizontally
let mirrorVertically = frameBufferRenderingState.mirrorVertically
let drawableSize: CGSize let drawableSize: CGSize
if self.blur { if self.blur {
@@ -382,19 +404,20 @@ final class MetalVideoRenderingView: UIView, VideoRenderingView {
if self.metalLayer.drawableSize != drawableSize { if self.metalLayer.drawableSize != drawableSize {
self.metalLayer.drawableSize = drawableSize self.metalLayer.drawableSize = drawableSize
getCubeVertexData(
cropX: 0,
cropY: 0,
cropWidth: Int(drawableSize.width),
cropHeight: Int(drawableSize.height),
frameWidth: Int(drawableSize.width),
frameHeight: Int(drawableSize.height),
rotation: 0,
buffer: self.vertexBuffer.contents().assumingMemoryBound(to: Float.self)
)
} }
getCubeVertexData(
cropX: 0,
cropY: 0,
cropWidth: Int(drawableSize.width),
cropHeight: Int(drawableSize.height),
frameWidth: Int(drawableSize.width),
frameHeight: Int(drawableSize.height),
rotation: 0,
mirrorHorizontally: mirrorHorizontally,
mirrorVertically: mirrorVertically,
buffer: self.vertexBuffer.contents().assumingMemoryBound(to: Float.self)
)
guard let drawable = self.metalLayer.nextDrawable() else { guard let drawable = self.metalLayer.nextDrawable() else {
return nil return nil

View File

@@ -290,6 +290,8 @@ public final class OngoingGroupCallContext {
public let width: Int public let width: Int
public let height: Int public let height: Int
public let orientation: OngoingCallVideoOrientation public let orientation: OngoingCallVideoOrientation
public let mirrorHorizontally: Bool
public let mirrorVertically: Bool
init(frameData: CallVideoFrameData) { init(frameData: CallVideoFrameData) {
if let nativeBuffer = frameData.buffer as? CallVideoFrameNativePixelBuffer { if let nativeBuffer = frameData.buffer as? CallVideoFrameNativePixelBuffer {
@@ -305,6 +307,8 @@ public final class OngoingGroupCallContext {
self.width = Int(frameData.width) self.width = Int(frameData.width)
self.height = Int(frameData.height) self.height = Int(frameData.height)
self.orientation = OngoingCallVideoOrientation(frameData.orientation) self.orientation = OngoingCallVideoOrientation(frameData.orientation)
self.mirrorHorizontally = frameData.mirrorHorizontally
self.mirrorVertically = frameData.mirrorVertically
} }
} }

View File

@@ -161,6 +161,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@property (nonatomic, readonly) int width; @property (nonatomic, readonly) int width;
@property (nonatomic, readonly) int height; @property (nonatomic, readonly) int height;
@property (nonatomic, readonly) OngoingCallVideoOrientationWebrtc orientation; @property (nonatomic, readonly) OngoingCallVideoOrientationWebrtc orientation;
@property (nonatomic, readonly) bool mirrorHorizontally;
@property (nonatomic, readonly) bool mirrorVertically;
@end @end

View File

@@ -34,6 +34,7 @@
#include "sdk/objc/native/src/objc_frame_buffer.h" #include "sdk/objc/native/src/objc_frame_buffer.h"
#import "components/video_frame_buffer/RTCCVPixelBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h"
#import "platform/darwin/TGRTCCVPixelBuffer.h"
@implementation OngoingCallConnectionDescriptionWebrtc @implementation OngoingCallConnectionDescriptionWebrtc
@@ -326,7 +327,7 @@
@implementation CallVideoFrameData @implementation CallVideoFrameData
- (instancetype)initWithBuffer:(id<CallVideoFrameBuffer>)buffer frame:(webrtc::VideoFrame const &)frame { - (instancetype)initWithBuffer:(id<CallVideoFrameBuffer>)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically {
self = [super init]; self = [super init];
if (self != nil) { if (self != nil) {
_buffer = buffer; _buffer = buffer;
@@ -356,6 +357,9 @@
break; break;
} }
} }
_mirrorHorizontally = mirrorHorizontally;
_mirrorVertically = mirrorVertically;
} }
return self; return self;
} }
@@ -400,12 +404,31 @@ private:
_adapter.reset(new GroupCallVideoSinkAdapter(^(webrtc::VideoFrame const &videoFrame) { _adapter.reset(new GroupCallVideoSinkAdapter(^(webrtc::VideoFrame const &videoFrame) {
id<CallVideoFrameBuffer> mappedBuffer = nil; id<CallVideoFrameBuffer> mappedBuffer = nil;
bool mirrorHorizontally = false;
bool mirrorVertically = false;
if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) { if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) {
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> nativeBuffer = static_cast<webrtc::ObjCFrameBuffer *>(videoFrame.video_frame_buffer().get())->wrapped_frame_buffer(); id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> nativeBuffer = static_cast<webrtc::ObjCFrameBuffer *>(videoFrame.video_frame_buffer().get())->wrapped_frame_buffer();
if ([nativeBuffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { if ([nativeBuffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)nativeBuffer; RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)nativeBuffer;
mappedBuffer = [[CallVideoFrameNativePixelBuffer alloc] initWithPixelBuffer:pixelBuffer.pixelBuffer]; mappedBuffer = [[CallVideoFrameNativePixelBuffer alloc] initWithPixelBuffer:pixelBuffer.pixelBuffer];
} }
if ([nativeBuffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
if (((TGRTCCVPixelBuffer *)nativeBuffer).shouldBeMirrored) {
switch (videoFrame.rotation()) {
case webrtc::kVideoRotation_0:
case webrtc::kVideoRotation_180:
mirrorHorizontally = true;
break;
case webrtc::kVideoRotation_90:
case webrtc::kVideoRotation_270:
mirrorVertically = true;
break;
default:
break;
}
}
}
} else if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNV12) { } else if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
rtc::scoped_refptr<webrtc::NV12BufferInterface> nv12Buffer = (webrtc::NV12BufferInterface *)videoFrame.video_frame_buffer().get(); rtc::scoped_refptr<webrtc::NV12BufferInterface> nv12Buffer = (webrtc::NV12BufferInterface *)videoFrame.video_frame_buffer().get();
mappedBuffer = [[CallVideoFrameNV12Buffer alloc] initWithBuffer:nv12Buffer]; mappedBuffer = [[CallVideoFrameNV12Buffer alloc] initWithBuffer:nv12Buffer];
@@ -415,7 +438,7 @@ private:
} }
if (storedSink && mappedBuffer) { if (storedSink && mappedBuffer) {
storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame]); storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically]);
} }
})); }));
} }