Merge commit '0e086c2a107ade097cc03cdb5d3d540fd85a1413'

This commit is contained in:
Ali 2021-06-10 18:32:03 +04:00
commit 80f3fce5b3
8 changed files with 154 additions and 39 deletions

View File

@ -40,7 +40,9 @@ final class GroupVideoNode: ASDisplayNode {
return self.readyPromise.get()
}
init(videoView: PresentationCallVideoView, backdropVideoView: PresentationCallVideoView?, disabledText: String? = nil) {
public var isMainstageExclusive = false
init(videoView: PresentationCallVideoView, backdropVideoView: PresentationCallVideoView?) {
self.sourceContainerNode = PinchSourceContainerNode()
self.containerNode = ASDisplayNode()
self.videoViewContainer = UIView()
@ -193,7 +195,6 @@ final class GroupVideoNode: ASDisplayNode {
return rotatedAspect
}
var keepBackdropSize = false
func updateLayout(size: CGSize, layoutMode: LayoutMode, transition: ContainedViewLayoutTransition) {
self.validLayout = (size, layoutMode)
let bounds = CGRect(origin: CGPoint(), size: size)

View File

@ -2556,6 +2556,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
public func disableVideo() {
self.hasVideo = false
self.useFrontCamera = true;
if let _ = self.videoCapturer {
self.videoCapturer = nil
self.isVideoMutedDisposable.set(nil)

View File

@ -901,8 +901,10 @@ public final class VoiceChatController: ViewController {
private var wideVideoNodes = Set<String>()
private var videoOrder: [String] = []
private var readyVideoEndpointIds = Set<String>()
private var readyVideoEndpointIdsPromise = ValuePromise<Set<String>>(Set())
private var timeoutedEndpointIds = Set<String>()
private var readyVideoDisposables = DisposableDict<String>()
private var myPeerVideoReadyDisposable = MetaDisposable()
private var peerIdToEndpointId: [PeerId: String] = [:]
@ -1761,7 +1763,9 @@ public final class VoiceChatController: ViewController {
return nil
}
var ignore = false
if case .fullscreen = strongSelf.displayMode, !strongSelf.isPanning {
if case .mainstage = position {
ignore = false
} else if case .fullscreen = strongSelf.displayMode, !strongSelf.isPanning {
ignore = ![.mainstage, .list].contains(position)
} else {
ignore = position != .tile
@ -1774,6 +1778,9 @@ public final class VoiceChatController: ViewController {
}
for (listEndpointId, videoNode) in strongSelf.videoNodes {
if listEndpointId == endpointId {
if position != .mainstage && videoNode.isMainstageExclusive {
return nil
}
return videoNode
}
}
@ -2276,6 +2283,33 @@ public final class VoiceChatController: ViewController {
return self?.itemInteraction?.getAudioLevel(peerId) ?? .single(0.0)
}
self.mainStageNode.getVideo = { [weak self] endpointId, isMyPeer, completion in
if let strongSelf = self {
if isMyPeer {
if strongSelf.readyVideoEndpointIds.contains(endpointId) {
completion(strongSelf.itemInteraction?.getPeerVideo(endpointId, .mainstage))
} else {
strongSelf.myPeerVideoReadyDisposable.set((strongSelf.readyVideoEndpointIdsPromise.get()
|> filter { $0.contains(endpointId) }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
if let strongSelf = self {
completion(strongSelf.itemInteraction?.getPeerVideo(endpointId, .mainstage))
}
}))
}
} else {
strongSelf.call.makeIncomingVideoView(endpointId: endpointId, requestClone: true, completion: { videoView, backdropVideoView in
if let videoView = videoView {
completion(GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView))
} else {
completion(nil)
}
})
}
}
}
self.applicationStateDisposable = (self.context.sharedContext.applicationBindings.applicationIsActive
|> deliverOnMainQueue).start(next: { [weak self] active in
guard let strongSelf = self else {
@ -2305,6 +2339,7 @@ public final class VoiceChatController: ViewController {
self.ignoreConnectingTimer?.invalidate()
self.readyVideoDisposables.dispose()
self.applicationStateDisposable?.dispose()
self.myPeerVideoReadyDisposable.dispose()
}
private func openSettingsMenu(sourceNode: ASDisplayNode, gesture: ContextGesture?) {
@ -3621,7 +3656,7 @@ public final class VoiceChatController: ViewController {
if !self.mainStageNode.animating {
transition.updateFrame(node: self.mainStageNode, frame: videoFrame)
}
self.mainStageNode.update(size: videoFrame.size, sideInset: layout.safeInsets.left, bottomInset: bottomInset, isLandscape: self.isLandscape, isTablet: isTablet, transition: transition)
self.mainStageNode.update(size: videoFrame.size, sideInset: layout.safeInsets.left, bottomInset: bottomInset, isLandscape: videoFrame.width > videoFrame.height, isTablet: isTablet, transition: transition)
let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY), size: CGSize(width: size.width, height: layout.size.height))
@ -3633,8 +3668,13 @@ public final class VoiceChatController: ViewController {
leftBorderFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY - additionalInset), size: CGSize(width: (size.width - contentWidth) / 2.0 + sideInset, height: layout.size.height))
rightBorderFrame = CGRect(origin: CGPoint(x: size.width - (size.width - contentWidth) / 2.0 - sideInset, y: topPanelFrame.maxY - additionalInset), size: CGSize(width: layout.safeInsets.right + (size.width - contentWidth) / 2.0 + sideInset, height: layout.size.height))
} else {
leftBorderFrame = CGRect(origin: CGPoint(x: -additionalInset, y: topPanelFrame.maxY - additionalInset * 0.6), size: CGSize(width: sideInset + additionalInset + (contentLeftInset.isZero ? additionalSideInset : contentLeftInset), height: layout.size.height))
rightBorderFrame = CGRect(origin: CGPoint(x: size.width - sideInset - (contentLeftInset.isZero ? additionalSideInset : 0.0), y: topPanelFrame.maxY - additionalInset * 0.6), size: CGSize(width: sideInset + additionalInset + additionalSideInset, height: layout.size.height))
var isFullscreen = false
if case .fullscreen = self.displayMode {
isFullscreen = true
forceUpdate = true
}
leftBorderFrame = CGRect(origin: CGPoint(x: -additionalInset, y: topPanelFrame.maxY - additionalInset * (isFullscreen ? 0.95 : 0.8)), size: CGSize(width: sideInset + additionalInset + (contentLeftInset.isZero ? additionalSideInset : contentLeftInset), height: layout.size.height))
rightBorderFrame = CGRect(origin: CGPoint(x: size.width - sideInset - (contentLeftInset.isZero ? additionalSideInset : 0.0), y: topPanelFrame.maxY - additionalInset * (isFullscreen ? 0.95 : 0.8)), size: CGSize(width: sideInset + additionalInset + additionalSideInset, height: layout.size.height))
}
let topCornersFrame = CGRect(x: sideInset + (contentLeftInset.isZero ? floorToScreenPixels((size.width - contentWidth) / 2.0) : contentLeftInset), y: topPanelFrame.maxY - 60.0, width: contentWidth - sideInset * 2.0, height: 50.0 + 60.0)
@ -5079,11 +5119,10 @@ public final class VoiceChatController: ViewController {
self.requestedVideoSources.insert(channel.endpointId)
self.call.makeIncomingVideoView(endpointId: channel.endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
Queue.mainQueue().async {
print("create video \(channel.endpointId)")
guard let strongSelf = self, let videoView = videoView else {
return
}
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView, disabledText: strongSelf.presentationData.strings.VoiceChat_VideoPaused)
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView)
strongSelf.readyVideoDisposables.set((combineLatest(videoNode.ready, .single(false) |> then(.single(true) |> delay(10.0, queue: Queue.mainQueue())))
|> deliverOnMainQueue
@ -5093,11 +5132,13 @@ public final class VoiceChatController: ViewController {
if timeouted && !ready {
strongSelf.timeoutedEndpointIds.insert(channel.endpointId)
strongSelf.readyVideoEndpointIds.remove(channel.endpointId)
strongSelf.readyVideoEndpointIdsPromise.set(strongSelf.readyVideoEndpointIds)
strongSelf.wideVideoNodes.remove(channel.endpointId)
strongSelf.updateMembers()
} else if ready {
strongSelf.readyVideoEndpointIds.insert(channel.endpointId)
strongSelf.readyVideoEndpointIdsPromise.set(strongSelf.readyVideoEndpointIds)
strongSelf.timeoutedEndpointIds.remove(channel.endpointId)
if videoNode.aspectRatio <= 0.77 {
strongSelf.wideVideoNodes.insert(channel.endpointId)
@ -5152,6 +5193,7 @@ public final class VoiceChatController: ViewController {
self.videoNodes[videoEndpointId] = nil
self.videoOrder.removeAll(where: { $0 == videoEndpointId })
self.readyVideoEndpointIds.remove(videoEndpointId)
self.readyVideoEndpointIdsPromise.set(self.readyVideoEndpointIds)
self.readyVideoDisposables.set(nil, forKey: videoEndpointId)
}
}

View File

@ -315,6 +315,10 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
videoNode = item.getVideo()
}
if videoNode?.isMainstageExclusive == true && active {
videoNode = nil
}
if let videoNode = videoNode {
if active {
self.avatarNode.alpha = 1.0
@ -488,7 +492,9 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.videoContainerNode.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -9.0), duration: appearanceDuration, additive: true)
strongSelf.audioLevelView?.layer.animateScale(from: 0.0, to: 1.0, duration: appearanceDuration)
}
apperanceTransition.updateAlpha(node: currentVideoNode, alpha: 0.0)
if currentVideoNode.supernode === strongSelf.videoContainerNode {
apperanceTransition.updateAlpha(node: currentVideoNode, alpha: 0.0)
}
apperanceTransition.updateAlpha(node: strongSelf.videoFadeNode, alpha: 0.0)
apperanceTransition.updateAlpha(node: strongSelf.avatarNode, alpha: 1.0)
if let audioLevelView = strongSelf.audioLevelView {

View File

@ -78,6 +78,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
var controlsHidden: ((Bool) -> Void)?
var getAudioLevel: ((PeerId) -> Signal<Float, NoError>)?
var getVideo: ((String, Bool, @escaping (GroupVideoNode?) -> Void) -> Void)?
private let videoReadyDisposable = MetaDisposable()
private var silenceTimer: SwiftSignalKit.Timer?
@ -116,8 +117,6 @@ final class VoiceChatMainStageNode: ASDisplayNode {
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
}) {
self.bottomFadeNode.backgroundColor = UIColor(patternImage: image)
self.bottomFadeNode.view.layer.rasterizationScale = UIScreen.main.scale
self.bottomFadeNode.view.layer.shouldRasterize = true
}
self.bottomFillNode = ASDisplayNode()
@ -288,6 +287,11 @@ final class VoiceChatMainStageNode: ASDisplayNode {
self.layer.cornerCurve = .continuous
}
self.topFadeNode.view.layer.rasterizationScale = UIScreen.main.scale
self.topFadeNode.view.layer.shouldRasterize = true
self.bottomFadeNode.view.layer.rasterizationScale = UIScreen.main.scale
self.bottomFadeNode.view.layer.shouldRasterize = true
let speakingEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
speakingEffectView.layer.cornerRadius = 19.0
speakingEffectView.clipsToBounds = true
@ -479,7 +483,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
self.animatingOut = true
let targetFrame = targetNode.view.convert(targetNode.bounds, to: self.supernode?.view)
self.currentVideoNode?.keepBackdropSize = true
let currentVideoNode = self.currentVideoNode
var infoView: UIView?
if let snapshotView = targetNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
@ -525,6 +529,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
infoView?.removeFromSuperview()
textView?.removeFromSuperview()
currentVideoNode?.isMainstageExclusive = false
targetNode.transitionIn(from: nil)
targetNode.alpha = 1.0
targetNode.highlightNode.layer.animateAlpha(from: 0.0, to: targetNode.highlightNode.alpha, duration: 0.2)
strongSelf.animatingOut = false
@ -812,13 +818,19 @@ final class VoiceChatMainStageNode: ASDisplayNode {
if !delayTransition {
self.setAvatarHidden(true)
}
self.call.makeIncomingVideoView(endpointId: endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
var waitForFullSize = waitForFullSize
if isMyPeer && !isPresentation && isReady && !self.appeared {
waitForFullSize = false
}
self.getVideo?(endpointId, isMyPeer && !isPresentation, { [weak self] videoNode in
Queue.mainQueue().async {
guard let strongSelf = self, let videoView = videoView else {
guard let strongSelf = self, let videoNode = videoNode else {
return
}
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView, disabledText: presentationData.strings.VoiceChat_VideoPaused)
videoNode.isMainstageExclusive = isMyPeer
videoNode.tapped = { [weak self] in
guard let strongSelf = self else {
return
@ -846,20 +858,31 @@ final class VoiceChatMainStageNode: ASDisplayNode {
videoNode.updateIsBlurred(isBlurred: isPaused, light: true, animated: false)
videoNode.isUserInteractionEnabled = true
let previousVideoNode = strongSelf.currentVideoNode
var previousVideoNodeSnapshot: UIView?
if let previousVideoNode = previousVideoNode, previousVideoNode.isMainstageExclusive, let snapshotView = previousVideoNode.view.snapshotView(afterScreenUpdates: false) {
previousVideoNodeSnapshot = snapshotView
snapshotView.frame = previousVideoNode.frame
previousVideoNode.view.superview?.insertSubview(snapshotView, aboveSubview: previousVideoNode.view)
}
strongSelf.currentVideoNode = videoNode
strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backdropAvatarNode)
if !isReady {
if delayTransition {
videoNode.alpha = 0.0
} else if !isReady {
videoNode.alpha = 0.0
strongSelf.topFadeNode.isHidden = true
strongSelf.bottomFadeNode.isHidden = true
strongSelf.bottomFillNode.isHidden = true
} else if delayTransition {
videoNode.alpha = 0.0
} else if isMyPeer {
videoNode.layer.removeAnimation(forKey: "opacity")
videoNode.alpha = 1.0
}
if waitForFullSize {
previousVideoNode?.isMainstageExclusive = false
Queue.mainQueue().after(2.0) {
if let previousVideoNode = previousVideoNode {
previousVideoNodeSnapshot?.removeFromSuperview()
if let previousVideoNode = previousVideoNode, previousVideoNode.supernode === strongSelf && !previousVideoNode.isMainstageExclusive {
previousVideoNode.removeFromSupernode()
}
}
@ -881,23 +904,36 @@ final class VoiceChatMainStageNode: ASDisplayNode {
}
if videoNode.alpha.isZero {
strongSelf.topFadeNode.isHidden = true
strongSelf.bottomFadeNode.isHidden = true
strongSelf.bottomFillNode.isHidden = true
if delayTransition {
strongSelf.topFadeNode.isHidden = false
strongSelf.bottomFadeNode.isHidden = false
strongSelf.bottomFillNode.isHidden = false
strongSelf.topFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
strongSelf.bottomFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
strongSelf.bottomFillNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
strongSelf.avatarNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false)
strongSelf.audioLevelNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false)
}
if let videoNode = strongSelf.currentVideoNode {
videoNode.alpha = 1.0
videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.setAvatarHidden(true)
if let previousVideoNode = previousVideoNode {
strongSelf.avatarNode.layer.removeAllAnimations()
strongSelf.audioLevelNode.layer.removeAllAnimations()
previousVideoNodeSnapshot?.removeFromSuperview()
if let previousVideoNode = previousVideoNode, previousVideoNode.supernode === strongSelf {
previousVideoNode.removeFromSupernode()
}
}
})
}
} else {
previousVideoNodeSnapshot?.removeFromSuperview()
previousVideoNode?.isMainstageExclusive = false
Queue.mainQueue().after(0.07) {
if let previousVideoNode = previousVideoNode {
if let previousVideoNode = previousVideoNode, previousVideoNode.supernode === strongSelf {
previousVideoNode.removeFromSupernode()
}
}
@ -909,7 +945,11 @@ final class VoiceChatMainStageNode: ASDisplayNode {
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
}
if let previousVideoNode = previousVideoNode {
previousVideoNode.removeFromSupernode()
previousVideoNodeSnapshot?.removeFromSuperview()
previousVideoNode.isMainstageExclusive = false
if previousVideoNode.supernode === strongSelf {
previousVideoNode.removeFromSupernode()
}
}
strongSelf.videoReadyDisposable.set(nil)
completion?()
@ -918,7 +958,10 @@ final class VoiceChatMainStageNode: ASDisplayNode {
})
} else {
if let currentVideoNode = self.currentVideoNode {
currentVideoNode.removeFromSupernode()
currentVideoNode.isMainstageExclusive = false
if currentVideoNode.supernode === self {
currentVideoNode.removeFromSupernode()
}
self.currentVideoNode = nil
}
self.setAvatarHidden(false)
@ -970,7 +1013,10 @@ final class VoiceChatMainStageNode: ASDisplayNode {
} else {
self.videoReadyDisposable.set(nil)
if let currentVideoNode = self.currentVideoNode {
currentVideoNode.removeFromSupernode()
currentVideoNode.isMainstageExclusive = false
if currentVideoNode.supernode === self {
currentVideoNode.removeFromSupernode()
}
self.currentVideoNode = nil
}
completion?()

View File

@ -1277,9 +1277,9 @@ public final class GroupCallParticipantsContext {
private let resetInviteLinksDisposable = MetaDisposable()
private let updateShouldBeRecordingDisposable = MetaDisposable()
private var localVideoIsMuted: Bool = true
private var localIsVideoPaused: Bool = true
private var localVideoIsMuted: Bool? = nil
private var localIsVideoPaused: Bool? = nil
private var localIsPresentationPaused: Bool? = nil
public struct ServiceState {
fileprivate var nextActivityRank: Int = 0
}
@ -1877,13 +1877,14 @@ public final class GroupCallParticipantsContext {
}))
}
public func updateVideoState(peerId: PeerId, isVideoMuted: Bool, isVideoPaused: Bool) {
if self.localVideoIsMuted == isVideoMuted && self.localIsVideoPaused == isVideoPaused {
public func updateVideoState(peerId: PeerId, isVideoMuted: Bool?, isVideoPaused: Bool?, isPresentationPaused: Bool?) {
if self.localVideoIsMuted == isVideoMuted && self.localIsVideoPaused == isVideoPaused && self.localIsPresentationPaused == isPresentationPaused {
return
}
self.localVideoIsMuted = isVideoMuted
self.localIsVideoPaused = isVideoPaused
self.localIsPresentationPaused = isPresentationPaused
let disposable = MetaDisposable()
let account = self.account
@ -1900,16 +1901,24 @@ public final class GroupCallParticipantsContext {
var flags: Int32 = 0
var videoMuted: Api.Bool?
videoMuted = isVideoMuted ? .boolTrue : .boolFalse
flags |= 1 << 3
if let isVideoMuted = isVideoMuted {
videoMuted = isVideoMuted ? .boolTrue : .boolFalse
flags |= 1 << 3
}
var videoPaused: Api.Bool?
if !isVideoMuted {
if isVideoMuted != nil, let isVideoPaused = isVideoPaused {
videoPaused = isVideoPaused ? .boolTrue : .boolFalse
flags |= 1 << 4
}
var presentationPaused: Api.Bool?
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: nil, volume: nil, raiseHand: nil, videoStopped: videoMuted, videoPaused: videoPaused, presentationPaused: nil))
if let isPresentationPaused = isPresentationPaused {
presentationPaused = isPresentationPaused ? .boolTrue : .boolFalse
flags |= 1 << 5
}
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: nil, volume: nil, raiseHand: nil, videoStopped: videoMuted, videoPaused: videoPaused, presentationPaused: presentationPaused))
|> map(Optional.init)
|> `catch` { _ -> Signal<Api.Updates?, NoError> in
return .single(nil)

View File

@ -122,6 +122,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
- (void)makeOutgoingVideoView:(bool)requestClone completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable, UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
- (void)setOnFatalError:(dispatch_block_t _Nullable)onError;
- (void)setOnPause:(void (^ _Nullable)(bool))onPause;
- (void)setOnIsActiveUpdated:(void (^_Nonnull)(bool))onIsActiveUpdated;
#if TARGET_OS_IOS

View File

@ -324,6 +324,13 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
#endif
}
-(void)setOnPause:(void (^)(bool))onPause {
#if TARGET_OS_IOS
#else
_interface->setOnPause(onPause);
#endif
}
- (void)setOnIsActiveUpdated:(void (^)(bool))onIsActiveUpdated {
_interface->setOnIsActiveUpdated([onIsActiveUpdated](bool isActive) {
if (onIsActiveUpdated) {
@ -354,7 +361,9 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
if (requestClone) {
cloneRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero];
cloneRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
#ifdef WEBRTC_IOS
[remoteRenderer setCloneTarget:cloneRenderer];
#endif
}
completion(remoteRenderer, cloneRenderer);
@ -367,10 +376,10 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
cloneRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
#ifdef WEBRTC_IOS
cloneRenderer.videoContentMode = UIViewContentModeScaleToFill;
[remoteRenderer setClone:cloneRenderer];
#else
cloneRenderer.videoContentMode = kCAGravityResizeAspectFill;
#endif
[remoteRenderer setClone:cloneRenderer];
}
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];