Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin 2021-07-27 23:18:51 +03:00
commit 36ea9c8691
10 changed files with 84 additions and 38 deletions

View File

@ -6564,4 +6564,8 @@ Sorry for the inconvenience.";
"VoiceChat.VideoPreviewContinue" = "Continue"; "VoiceChat.VideoPreviewContinue" = "Continue";
"VoiceChat.VideoPreviewShareScreenInfo" = "Everything on your screen\nwill be shared"; "VoiceChat.VideoPreviewShareScreenInfo" = "Everything on your screen\nwill be shared";
"Gallery.SaveToGallery" = "Save to Gallery";
"Gallery.VideoSaved" = "Video Saved";
"Gallery.WaitForVideoDownoad" = "Please wait for the video to be fully downloaded.";
"VoiceChat.VideoParticipantsLimitExceededExtended" = "The voice chat is over %@ members.\nNew participants only have access to audio stream. "; "VoiceChat.VideoParticipantsLimitExceededExtended" = "The voice chat is over %@ members.\nNew participants only have access to audio stream. ";

View File

@ -2090,7 +2090,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
c.setItems(strongSelf.contextMenuSpeedItems()) c.setItems(strongSelf.contextMenuSpeedItems())
}))) })))
if let (message, maybeFile, isWebpage) = strongSelf.contentInfo(), let file = maybeFile, !isWebpage { if let (message, maybeFile, isWebpage) = strongSelf.contentInfo(), let file = maybeFile, !isWebpage {
items.append(.action(ContextMenuActionItem(text: "Save to Gallery", icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Download"), color: theme.actionSheet.primaryTextColor) }, action: { _, f in items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Gallery_SaveToGallery, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Download"), color: theme.actionSheet.primaryTextColor) }, action: { _, f in
f(.default) f(.default)
if let strongSelf = self { if let strongSelf = self {
@ -2104,22 +2104,20 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
guard let controller = strongSelf.galleryController() else { guard let controller = strongSelf.galleryController() else {
return return
} }
//TODO:localize controller.present(UndoOverlayController(presentationData: strongSelf.presentationData, content: .mediaSaved(text: strongSelf.presentationData.strings.Gallery_VideoSaved), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root))
controller.present(UndoOverlayController(presentationData: strongSelf.presentationData, content: .mediaSaved(text: "Video Saved"), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root))
}) })
default: default:
guard let controller = strongSelf.galleryController() else { guard let controller = strongSelf.galleryController() else {
return return
} }
//TODO:localize controller.present(textAlertController(context: strongSelf.context, title: nil, text: strongSelf.presentationData.strings.Gallery_WaitForVideoDownoad, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {
controller.present(textAlertController(context: strongSelf.context, title: nil, text: "Please wait for the video to be fully downloaded.", actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {
})]), in: .window(.root)) })]), in: .window(.root))
} }
} }
}))) })))
} }
if strongSelf.canDelete() { if strongSelf.canDelete() {
items.append(.action(ContextMenuActionItem(text: "Delete", textColor: .destructive, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Delete"), color: theme.contextMenu.destructiveColor) }, action: { _, f in items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Common_Delete, textColor: .destructive, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Delete"), color: theme.contextMenu.destructiveColor) }, action: { _, f in
f(.default) f(.default)
if let strongSelf = self { if let strongSelf = self {

View File

@ -117,7 +117,8 @@ private func peerAutoremoveSetupEntries(peer: Peer?, presentationData: Presentat
var availableValues: [Int32] = [ var availableValues: [Int32] = [
Int32.max, Int32.max,
24 * 60 * 60, 24 * 60 * 60,
24 * 60 * 60 * 7 24 * 60 * 60 * 7,
24 * 60 * 60 * 31,
] ]
if isDebug { if isDebug {
availableValues[1] = 5 availableValues[1] = 5

View File

@ -130,7 +130,7 @@ class PeerRemoveTimeoutItemNode: ListViewItemNode, ItemListItemNode {
self.disabledOverlayNode = ASDisplayNode() self.disabledOverlayNode = ASDisplayNode()
self.titleNodes = (0 ..< 3).map { _ in self.titleNodes = (0 ..< 4).map { _ in
return TextNode() return TextNode()
} }
@ -150,9 +150,9 @@ class PeerRemoveTimeoutItemNode: ListViewItemNode, ItemListItemNode {
sliderView.lineSize = 2.0 sliderView.lineSize = 2.0
sliderView.dotSize = 5.0 sliderView.dotSize = 5.0
sliderView.minimumValue = 0.0 sliderView.minimumValue = 0.0
sliderView.maximumValue = 2.0 sliderView.maximumValue = CGFloat(self.titleNodes.count - 1)
sliderView.startValue = 0.0 sliderView.startValue = 0.0
sliderView.positionsCount = 3 sliderView.positionsCount = self.titleNodes.count
sliderView.useLinesForPositions = true sliderView.useLinesForPositions = true
sliderView.minimumUndottedValue = 0 sliderView.minimumUndottedValue = 0
sliderView.disablesInteractiveTransitionGestureRecognizer = true sliderView.disablesInteractiveTransitionGestureRecognizer = true
@ -195,7 +195,7 @@ class PeerRemoveTimeoutItemNode: ListViewItemNode, ItemListItemNode {
if item.availableValues[index] == Int32.max { if item.availableValues[index] == Int32.max {
text = item.presentationData.strings.AutoremoveSetup_TimerValueNever text = item.presentationData.strings.AutoremoveSetup_TimerValueNever
} else { } else {
text = item.presentationData.strings.AutoremoveSetup_TimerValueAfter(timeIntervalString(strings: item.presentationData.strings, value: item.availableValues[index])).string text = timeIntervalString(strings: item.presentationData.strings, value: item.availableValues[index])
} }
return makeLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: text, font: Font.regular(13.0), textColor: item.presentationData.theme.list.itemSecondaryTextColor), maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: 100.0, height: 100.0))) return makeLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: text, font: Font.regular(13.0), textColor: item.presentationData.theme.list.itemSecondaryTextColor), maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: 100.0, height: 100.0)))
} }
@ -265,18 +265,23 @@ class PeerRemoveTimeoutItemNode: ListViewItemNode, ItemListItemNode {
strongSelf.topStripeNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -min(insets.top, separatorHeight)), size: CGSize(width: layoutSize.width, height: separatorHeight)) strongSelf.topStripeNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -min(insets.top, separatorHeight)), size: CGSize(width: layoutSize.width, height: separatorHeight))
strongSelf.bottomStripeNode.frame = CGRect(origin: CGPoint(x: bottomStripeInset, y: contentSize.height + bottomStripeOffset), size: CGSize(width: layoutSize.width - bottomStripeInset, height: separatorHeight)) strongSelf.bottomStripeNode.frame = CGRect(origin: CGPoint(x: bottomStripeInset, y: contentSize.height + bottomStripeOffset), size: CGSize(width: layoutSize.width - bottomStripeInset, height: separatorHeight))
zip(0 ..< titleLayouts.count, titleLayouts).forEach { index, layoutAndApply in let usableWidth = params.width - (leftInset + 7.0) * 2.0
let textNode = layoutAndApply.1()
let size = layoutAndApply.0.size for i in 0 ..< titleLayouts.count {
switch index { let textNode = titleLayouts[i].1()
case 0:
textNode.frame = CGRect(origin: CGPoint(x: leftInset, y: 13.0), size: size) let size = titleLayouts[i].0.size
case 1:
textNode.frame = CGRect(origin: CGPoint(x: floor((params.width - size.width) / 2.0), y: 13.0), size: size) let nextX: CGFloat
default: if i == 0 {
textNode.frame = CGRect(origin: CGPoint(x: params.width - leftInset - size.width, y: 13.0), size: size) nextX = leftInset
} else if i == titleLayouts.count - 1 {
nextX = params.width - leftInset - size.width
} else {
nextX = floor(leftInset + 7.0 + CGFloat(i) * usableWidth / CGFloat(titleLayouts.count - 1) - size.width / 2.0)
} }
textNode.frame = CGRect(origin: CGPoint(x: nextX, y: 13.0), size: size)
} }
if let sliderView = strongSelf.sliderView { if let sliderView = strongSelf.sliderView {
@ -302,8 +307,7 @@ class PeerRemoveTimeoutItemNode: ListViewItemNode, ItemListItemNode {
sliderView.value = value sliderView.value = value
} }
let sliderInset: CGFloat = leftInset sliderView.frame = CGRect(origin: CGPoint(x: leftInset, y: 38.0), size: CGSize(width: params.width - leftInset * 2.0, height: 44.0))
sliderView.frame = CGRect(origin: CGPoint(x: sliderInset, y: 38.0), size: CGSize(width: params.width - sliderInset * 2.0, height: 44.0))
} }
} }
}) })

View File

@ -58,6 +58,11 @@ final class GroupVideoNode: ASDisplayNode, PreviewVideoNode {
super.init() super.init()
if let backdropVideoView = backdropVideoView {
self.backdropVideoViewContainer.addSubview(backdropVideoView)
self.view.addSubview(self.backdropVideoViewContainer)
}
self.videoViewContainer.addSubview(self.videoView) self.videoViewContainer.addSubview(self.videoView)
self.addSubnode(self.sourceContainerNode) self.addSubnode(self.sourceContainerNode)
self.containerNode.view.addSubview(self.videoViewContainer) self.containerNode.view.addSubview(self.videoViewContainer)
@ -299,7 +304,20 @@ final class GroupVideoNode: ASDisplayNode, PreviewVideoNode {
let normalizedVideoSize = rotatedVideoFrame.size.aspectFilled(CGSize(width: 1080.0, height: 1080.0)) let normalizedVideoSize = rotatedVideoFrame.size.aspectFilled(CGSize(width: 1080.0, height: 1080.0))
self.backdropVideoView?.updateIsEnabled(self.isEnabled && self.isBlurEnabled) let effectiveBlurEnabled = self.isEnabled && self.isBlurEnabled
if effectiveBlurEnabled {
self.backdropVideoView?.updateIsEnabled(true)
}
transition.updatePosition(layer: backdropVideoView.layer, position: rotatedVideoFrame.center, force: true, completion: { [weak self] value in
guard let strongSelf = self, value else {
return
}
if !(strongSelf.isEnabled && strongSelf.isBlurEnabled) {
strongSelf.backdropVideoView?.updateIsEnabled(false)
}
})
transition.updateBounds(layer: backdropVideoView.layer, bounds: CGRect(origin: CGPoint(), size: normalizedVideoSize)) transition.updateBounds(layer: backdropVideoView.layer, bounds: CGRect(origin: CGPoint(), size: normalizedVideoSize))

View File

@ -3504,6 +3504,8 @@ public final class VoiceChatController: ViewController {
let videoCapturer = OngoingCallVideoCapturer() let videoCapturer = OngoingCallVideoCapturer()
let input = videoCapturer.video() let input = videoCapturer.video()
if let videoView = strongSelf.videoRenderingContext.makeView(input: input, blur: false) { if let videoView = strongSelf.videoRenderingContext.makeView(input: input, blur: false) {
videoView.updateIsEnabled(true)
let cameraNode = GroupVideoNode(videoView: videoView, backdropVideoView: nil) let cameraNode = GroupVideoNode(videoView: videoView, backdropVideoView: nil)
let controller = VoiceChatCameraPreviewController(sharedContext: strongSelf.context.sharedContext, cameraNode: cameraNode, shareCamera: { [weak self] _, unmuted in let controller = VoiceChatCameraPreviewController(sharedContext: strongSelf.context.sharedContext, cameraNode: cameraNode, shareCamera: { [weak self] _, unmuted in
if let strongSelf = self { if let strongSelf = self {

View File

@ -39,9 +39,17 @@ private var fadeImage: UIImage? = {
let bounds = CGRect(origin: CGPoint(), size: size) let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds) context.clear(bounds)
let colorsArray = [fadeColor.withAlphaComponent(0.0).cgColor, fadeColor.cgColor] as CFArray let stepCount = 10
var locations: [CGFloat] = [1.0, 0.0] var colors: [CGColor] = []
let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! var locations: [CGFloat] = []
for i in 0 ... stepCount {
let t = CGFloat(i) / CGFloat(stepCount)
colors.append(fadeColor.withAlphaComponent((1.0 - t * t) * 0.7).cgColor)
locations.append(t)
}
let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colors as CFArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
}) })
}() }()

View File

@ -162,9 +162,17 @@ final class VoiceChatMainStageNode: ASDisplayNode {
let bounds = CGRect(origin: CGPoint(), size: size) let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds) context.clear(bounds)
let colorsArray = [fadeColor.cgColor, fadeColor.withAlphaComponent(0.0).cgColor] as CFArray let stepCount = 10
var locations: [CGFloat] = [1.0, 0.0] var colors: [CGColor] = []
let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! var locations: [CGFloat] = []
for i in 0 ... stepCount {
let t = CGFloat(i) / CGFloat(stepCount)
colors.append(fadeColor.withAlphaComponent(t * t).cgColor)
locations.append(t)
}
let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colors as CFArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
}) { }) {
self.topFadeNode.backgroundColor = UIColor(patternImage: image) self.topFadeNode.backgroundColor = UIColor(patternImage: image)
@ -181,6 +189,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
let colorsArray = [fadeColor.withAlphaComponent(0.0).cgColor, fadeColor.cgColor] as CFArray let colorsArray = [fadeColor.withAlphaComponent(0.0).cgColor, fadeColor.cgColor] as CFArray
var locations: [CGFloat] = [1.0, 0.0] var locations: [CGFloat] = [1.0, 0.0]
let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)! let gradient = CGGradient(colorsSpace: deviceColorSpace, colors: colorsArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions()) context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
}) { }) {
self.bottomGradientNode.backgroundColor = UIColor(patternImage: image) self.bottomGradientNode.backgroundColor = UIColor(patternImage: image)

View File

@ -446,6 +446,7 @@ private:
_interface = interface; _interface = interface;
_isProcessingCustomSampleBuffer = [[IsProcessingCustomSampleBufferFlag alloc] init]; _isProcessingCustomSampleBuffer = [[IsProcessingCustomSampleBufferFlag alloc] init];
_croppingBuffer = std::make_shared<std::vector<uint8_t>>(); _croppingBuffer = std::make_shared<std::vector<uint8_t>>();
_sinks = [[NSMutableDictionary alloc] init];
} }
return self; return self;
} }
@ -460,6 +461,7 @@ private:
resolvedId += std::string(":landscape"); resolvedId += std::string(":landscape");
} }
_interface = tgcalls::VideoCaptureInterface::Create(tgcalls::StaticThreads::getThreads(), resolvedId); _interface = tgcalls::VideoCaptureInterface::Create(tgcalls::StaticThreads::getThreads(), resolvedId);
_sinks = [[NSMutableDictionary alloc] init];
} }
return self; return self;
} }
@ -473,7 +475,7 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
} }
+ (instancetype _Nonnull)capturerWithExternalSampleBufferProvider { + (instancetype _Nonnull)capturerWithExternalSampleBufferProvider {
std::shared_ptr<tgcalls::VideoCaptureInterface> interface = tgcalls::VideoCaptureInterface::Create(tgcalls::StaticThreads::getThreads(), ":ios_custom"); std::shared_ptr<tgcalls::VideoCaptureInterface> interface = tgcalls::VideoCaptureInterface::Create(tgcalls::StaticThreads::getThreads(), ":ios_custom", true);
return [[OngoingCallThreadLocalContextVideoCapturer alloc] initWithInterface:interface]; return [[OngoingCallThreadLocalContextVideoCapturer alloc] initWithInterface:interface];
} }
#endif #endif

@ -1 +1 @@
Subproject commit e8f7d439309abd4da1c15b97141c546295fcdcb4 Subproject commit ce20405bf1aa732fc83c0057ae4deaa49681bb8e