mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
4e661d97b3
@ -27,12 +27,21 @@ private func getCubeVertexData(
|
||||
frameWidth: Int,
|
||||
frameHeight: Int,
|
||||
rotation: Int,
|
||||
mirrorHorizontally: Bool,
|
||||
mirrorVertically: Bool,
|
||||
buffer: UnsafeMutablePointer<Float>
|
||||
) {
|
||||
let cropLeft = Float(cropX) / Float(frameWidth)
|
||||
let cropRight = Float(cropX + cropWidth) / Float(frameWidth)
|
||||
let cropTop = Float(cropY) / Float(frameHeight)
|
||||
let cropBottom = Float(cropY + cropHeight) / Float(frameHeight)
|
||||
var cropLeft = Float(cropX) / Float(frameWidth)
|
||||
var cropRight = Float(cropX + cropWidth) / Float(frameWidth)
|
||||
var cropTop = Float(cropY) / Float(frameHeight)
|
||||
var cropBottom = Float(cropY + cropHeight) / Float(frameHeight)
|
||||
|
||||
if mirrorHorizontally {
|
||||
swap(&cropLeft, &cropRight)
|
||||
}
|
||||
if mirrorVertically {
|
||||
swap(&cropTop, &cropBottom)
|
||||
}
|
||||
|
||||
switch rotation {
|
||||
default:
|
||||
@ -49,6 +58,8 @@ private func getCubeVertexData(
|
||||
@available(iOS 13.0, *)
|
||||
private protocol FrameBufferRenderingState {
|
||||
var frameSize: CGSize? { get }
|
||||
var mirrorHorizontally: Bool { get }
|
||||
var mirrorVertically: Bool { get }
|
||||
|
||||
func encode(renderingContext: MetalVideoRenderingContext, vertexBuffer: MTLBuffer, renderEncoder: MTLRenderCommandEncoder) -> Bool
|
||||
}
|
||||
@ -73,6 +84,9 @@ private final class NV12FrameBufferRenderingState: FrameBufferRenderingState {
|
||||
private var yTexture: MTLTexture?
|
||||
private var uvTexture: MTLTexture?
|
||||
|
||||
private(set) var mirrorHorizontally: Bool = false
|
||||
private(set) var mirrorVertically: Bool = false
|
||||
|
||||
var frameSize: CGSize? {
|
||||
if let yTexture = self.yTexture {
|
||||
return CGSize(width: yTexture.width, height: yTexture.height)
|
||||
@ -81,7 +95,7 @@ private final class NV12FrameBufferRenderingState: FrameBufferRenderingState {
|
||||
}
|
||||
}
|
||||
|
||||
func updateTextureBuffers(renderingContext: MetalVideoRenderingContext, frameBuffer: OngoingGroupCallContext.VideoFrameData.NativeBuffer) {
|
||||
func updateTextureBuffers(renderingContext: MetalVideoRenderingContext, frameBuffer: OngoingGroupCallContext.VideoFrameData.NativeBuffer, mirrorHorizontally: Bool, mirrorVertically: Bool) {
|
||||
let pixelBuffer = frameBuffer.pixelBuffer
|
||||
|
||||
var lumaTexture: MTLTexture?
|
||||
@ -112,6 +126,9 @@ private final class NV12FrameBufferRenderingState: FrameBufferRenderingState {
|
||||
self.yTexture = nil
|
||||
self.uvTexture = nil
|
||||
}
|
||||
|
||||
self.mirrorHorizontally = mirrorHorizontally
|
||||
self.mirrorVertically = mirrorVertically
|
||||
}
|
||||
|
||||
func encode(renderingContext: MetalVideoRenderingContext, vertexBuffer: MTLBuffer, renderEncoder: MTLRenderCommandEncoder) -> Bool {
|
||||
@ -142,6 +159,9 @@ private final class I420FrameBufferRenderingState: FrameBufferRenderingState {
|
||||
private var lumaTextureDescriptor: MTLTextureDescriptor?
|
||||
private var chromaTextureDescriptor: MTLTextureDescriptor?
|
||||
|
||||
private(set) var mirrorHorizontally: Bool = false
|
||||
private(set) var mirrorVertically: Bool = false
|
||||
|
||||
var frameSize: CGSize? {
|
||||
if let yTexture = self.yTexture {
|
||||
return CGSize(width: yTexture.width, height: yTexture.height)
|
||||
@ -318,7 +338,7 @@ final class MetalVideoRenderingView: UIView, VideoRenderingView {
|
||||
renderingState = NV12FrameBufferRenderingState()
|
||||
self.frameBufferRenderingState = renderingState
|
||||
}
|
||||
renderingState.updateTextureBuffers(renderingContext: renderingContext, frameBuffer: buffer)
|
||||
renderingState.updateTextureBuffers(renderingContext: renderingContext, frameBuffer: buffer, mirrorHorizontally: videoFrameData.mirrorHorizontally, mirrorVertically: videoFrameData.mirrorVertically)
|
||||
self.needsRedraw = true
|
||||
case let .i420(buffer):
|
||||
let renderingState: I420FrameBufferRenderingState
|
||||
@ -350,6 +370,8 @@ final class MetalVideoRenderingView: UIView, VideoRenderingView {
|
||||
guard let frameSize = frameBufferRenderingState.frameSize else {
|
||||
return nil
|
||||
}
|
||||
let mirrorHorizontally = frameBufferRenderingState.mirrorHorizontally
|
||||
let mirrorVertically = frameBufferRenderingState.mirrorVertically
|
||||
|
||||
let drawableSize: CGSize
|
||||
if self.blur {
|
||||
@ -382,19 +404,20 @@ final class MetalVideoRenderingView: UIView, VideoRenderingView {
|
||||
|
||||
if self.metalLayer.drawableSize != drawableSize {
|
||||
self.metalLayer.drawableSize = drawableSize
|
||||
|
||||
getCubeVertexData(
|
||||
cropX: 0,
|
||||
cropY: 0,
|
||||
cropWidth: Int(drawableSize.width),
|
||||
cropHeight: Int(drawableSize.height),
|
||||
frameWidth: Int(drawableSize.width),
|
||||
frameHeight: Int(drawableSize.height),
|
||||
rotation: 0,
|
||||
buffer: self.vertexBuffer.contents().assumingMemoryBound(to: Float.self)
|
||||
)
|
||||
}
|
||||
|
||||
getCubeVertexData(
|
||||
cropX: 0,
|
||||
cropY: 0,
|
||||
cropWidth: Int(drawableSize.width),
|
||||
cropHeight: Int(drawableSize.height),
|
||||
frameWidth: Int(drawableSize.width),
|
||||
frameHeight: Int(drawableSize.height),
|
||||
rotation: 0,
|
||||
mirrorHorizontally: mirrorHorizontally,
|
||||
mirrorVertically: mirrorVertically,
|
||||
buffer: self.vertexBuffer.contents().assumingMemoryBound(to: Float.self)
|
||||
)
|
||||
|
||||
guard let drawable = self.metalLayer.nextDrawable() else {
|
||||
return nil
|
||||
|
@ -290,6 +290,8 @@ public final class OngoingGroupCallContext {
|
||||
public let width: Int
|
||||
public let height: Int
|
||||
public let orientation: OngoingCallVideoOrientation
|
||||
public let mirrorHorizontally: Bool
|
||||
public let mirrorVertically: Bool
|
||||
|
||||
init(frameData: CallVideoFrameData) {
|
||||
if let nativeBuffer = frameData.buffer as? CallVideoFrameNativePixelBuffer {
|
||||
@ -305,6 +307,8 @@ public final class OngoingGroupCallContext {
|
||||
self.width = Int(frameData.width)
|
||||
self.height = Int(frameData.height)
|
||||
self.orientation = OngoingCallVideoOrientation(frameData.orientation)
|
||||
self.mirrorHorizontally = frameData.mirrorHorizontally
|
||||
self.mirrorVertically = frameData.mirrorVertically
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -161,6 +161,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
@property (nonatomic, readonly) int width;
|
||||
@property (nonatomic, readonly) int height;
|
||||
@property (nonatomic, readonly) OngoingCallVideoOrientationWebrtc orientation;
|
||||
@property (nonatomic, readonly) bool mirrorHorizontally;
|
||||
@property (nonatomic, readonly) bool mirrorVertically;
|
||||
|
||||
@end
|
||||
|
||||
|
@ -34,6 +34,7 @@
|
||||
|
||||
#include "sdk/objc/native/src/objc_frame_buffer.h"
|
||||
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
|
||||
#import "platform/darwin/TGRTCCVPixelBuffer.h"
|
||||
|
||||
@implementation OngoingCallConnectionDescriptionWebrtc
|
||||
|
||||
@ -326,7 +327,7 @@
|
||||
|
||||
@implementation CallVideoFrameData
|
||||
|
||||
- (instancetype)initWithBuffer:(id<CallVideoFrameBuffer>)buffer frame:(webrtc::VideoFrame const &)frame {
|
||||
- (instancetype)initWithBuffer:(id<CallVideoFrameBuffer>)buffer frame:(webrtc::VideoFrame const &)frame mirrorHorizontally:(bool)mirrorHorizontally mirrorVertically:(bool)mirrorVertically {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_buffer = buffer;
|
||||
@ -356,6 +357,9 @@
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_mirrorHorizontally = mirrorHorizontally;
|
||||
_mirrorVertically = mirrorVertically;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
@ -400,12 +404,31 @@ private:
|
||||
_adapter.reset(new GroupCallVideoSinkAdapter(^(webrtc::VideoFrame const &videoFrame) {
|
||||
id<CallVideoFrameBuffer> mappedBuffer = nil;
|
||||
|
||||
bool mirrorHorizontally = false;
|
||||
bool mirrorVertically = false;
|
||||
|
||||
if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNative) {
|
||||
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> nativeBuffer = static_cast<webrtc::ObjCFrameBuffer *>(videoFrame.video_frame_buffer().get())->wrapped_frame_buffer();
|
||||
if ([nativeBuffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
|
||||
RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)nativeBuffer;
|
||||
mappedBuffer = [[CallVideoFrameNativePixelBuffer alloc] initWithPixelBuffer:pixelBuffer.pixelBuffer];
|
||||
}
|
||||
if ([nativeBuffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
|
||||
if (((TGRTCCVPixelBuffer *)nativeBuffer).shouldBeMirrored) {
|
||||
switch (videoFrame.rotation()) {
|
||||
case webrtc::kVideoRotation_0:
|
||||
case webrtc::kVideoRotation_180:
|
||||
mirrorHorizontally = true;
|
||||
break;
|
||||
case webrtc::kVideoRotation_90:
|
||||
case webrtc::kVideoRotation_270:
|
||||
mirrorVertically = true;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (videoFrame.video_frame_buffer()->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
|
||||
rtc::scoped_refptr<webrtc::NV12BufferInterface> nv12Buffer = (webrtc::NV12BufferInterface *)videoFrame.video_frame_buffer().get();
|
||||
mappedBuffer = [[CallVideoFrameNV12Buffer alloc] initWithBuffer:nv12Buffer];
|
||||
@ -415,7 +438,7 @@ private:
|
||||
}
|
||||
|
||||
if (storedSink && mappedBuffer) {
|
||||
storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame]);
|
||||
storedSink([[CallVideoFrameData alloc] initWithBuffer:mappedBuffer frame:videoFrame mirrorHorizontally:mirrorHorizontally mirrorVertically:mirrorVertically]);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user