Various improvements

This commit is contained in:
Ilya Laktyushin 2023-08-24 17:56:41 +04:00
parent b53be04a2e
commit 55411b1d06
32 changed files with 1686 additions and 199 deletions

View File

@ -102,6 +102,8 @@ swift_library(
"//submodules/MediaPlayer:UniversalMediaPlayer",
"//submodules/TelegramUniversalVideoContent",
"//submodules/TelegramUI/Components/CameraButtonComponent",
"//submodules/ReactionSelectionNode",
"//submodules/TelegramUI/Components/EntityKeyboard",
],
visibility = [
"//visibility:public",

View File

@ -7,6 +7,7 @@ import AccountContext
import MediaEditor
import ComponentFlow
import LottieAnimationComponent
import ReactionSelectionNode
public func decodeDrawingEntities(data: Data) -> [DrawingEntity] {
if let codableEntities = try? JSONDecoder().decode([CodableDrawingEntity].self, from: data) {
@ -36,6 +37,9 @@ private func makeEntityView(context: AccountContext, entity: DrawingEntity) -> D
}
private func prepareForRendering(entityView: DrawingEntityView) {
if let entityView = entityView as? DrawingStickerEntityView {
entityView.entity.renderImage = entityView.getRenderImage()
}
if let entityView = entityView as? DrawingBubbleEntityView {
entityView.entity.renderImage = entityView.getRenderImage()
}
@ -70,6 +74,8 @@ public final class DrawingEntitiesView: UIView, TGPhotoDrawingEntitiesView {
public var getEntityInitialRotation: () -> CGFloat = { return 0.0 }
public var getEntityAdditionalScale: () -> CGFloat = { return 1.0 }
public var getAvailableReactions: () -> [ReactionItem] = { return [] }
public var hasSelectionChanged: (Bool) -> Void = { _ in }
var selectionChanged: (DrawingEntity?) -> Void = { _ in }
var requestedMenuForEntityView: (DrawingEntityView, Bool) -> Void = { _, _ in }
@ -654,6 +660,8 @@ public final class DrawingEntitiesView: UIView, TGPhotoDrawingEntitiesView {
} else {
return
}
} else if let stickerEntityView = selectedEntityView as? DrawingStickerEntityView {
stickerEntityView.onDeselection()
}
self.selectedEntityView = nil

View File

@ -432,7 +432,7 @@ final class DrawingLocationEntititySelectionView: DrawingEntitySelectionView {
self.border.lineCap = .round
self.border.fillColor = UIColor.clear.cgColor
self.border.strokeColor = UIColor(rgb: 0xffffff, alpha: 0.5).cgColor
self.border.strokeColor = UIColor(rgb: 0xffffff, alpha: 0.75).cgColor
self.layer.addSublayer(self.border)
for handle in handles {

View File

@ -2880,13 +2880,13 @@ public class DrawingScreen: ViewController, TGPhotoDrawingInterfaceController, U
var stickers: [Any] = []
for entity in self.entitiesView.entities {
if let sticker = entity as? DrawingStickerEntity, case let .file(file) = sticker.content {
if let sticker = entity as? DrawingStickerEntity, case let .file(file, _) = sticker.content {
let coder = PostboxEncoder()
coder.encodeRootObject(file)
stickers.append(coder.makeData())
} else if let text = entity as? DrawingTextEntity, let subEntities = text.renderSubEntities {
for sticker in subEntities {
if let sticker = sticker as? DrawingStickerEntity, case let .file(file) = sticker.content {
if let sticker = sticker as? DrawingStickerEntity, case let .file(file, _) = sticker.content {
let coder = PostboxEncoder()
coder.encodeRootObject(file)
stickers.append(coder.makeData())
@ -3117,12 +3117,16 @@ public final class DrawingToolsInteraction {
}))
}
if !isVideo {
actions.append(ContextMenuAction(content: .text(title: presentationData.strings.Paint_Duplicate, accessibilityLabel: presentationData.strings.Paint_Duplicate), action: { [weak self, weak entityView] in
if let self, let entityView {
let newEntity = self.entitiesView.duplicate(entityView.entity)
self.entitiesView.selectEntity(newEntity)
}
}))
if let stickerEntity = entityView.entity as? DrawingStickerEntity, case let .file(_, type) = stickerEntity.content, case .reaction = type {
} else {
actions.append(ContextMenuAction(content: .text(title: presentationData.strings.Paint_Duplicate, accessibilityLabel: presentationData.strings.Paint_Duplicate), action: { [weak self, weak entityView] in
if let self, let entityView {
let newEntity = self.entitiesView.duplicate(entityView.entity)
self.entitiesView.selectEntity(newEntity)
}
}))
}
}
let entityFrame = entityView.convert(entityView.selectionBounds, to: node.view).offsetBy(dx: 0.0, dy: -6.0)
let controller = ContextMenuController(actions: actions)

View File

@ -11,7 +11,12 @@ import StickerResources
import AccountContext
import MediaEditor
import UniversalMediaPlayer
import TelegramPresentationData
import TelegramUniversalVideoContent
import ReactionSelectionNode
import UndoUI
import EntityKeyboard
import ComponentFlow
public final class DrawingStickerEntityView: DrawingEntityView {
private var stickerEntity: DrawingStickerEntity {
@ -24,6 +29,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
private var currentSize: CGSize?
private var backgroundNode: ASImageNode?
private let imageNode: TransformImageNode
private var animationNode: AnimatedStickerNode?
private var videoNode: UniversalVideoNode?
@ -40,6 +46,14 @@ public final class DrawingStickerEntityView: DrawingEntityView {
super.init(context: context, entity: entity)
if case .file(_, .reaction) = entity.content {
let backgroundNode = ASImageNode()
backgroundNode.image = UIImage(bundleImageName: "Media Editor/ReactionBackground")
backgroundNode.displaysAsynchronously = false
self.addSubnode(backgroundNode)
self.backgroundNode = backgroundNode
}
self.addSubview(self.imageNode.view)
self.setup()
@ -55,7 +69,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
}
private var file: TelegramMediaFile? {
if case let .file(file) = self.stickerEntity.content {
if case let .file(file, _) = self.stickerEntity.content {
return file
} else {
return nil
@ -70,6 +84,19 @@ public final class DrawingStickerEntityView: DrawingEntityView {
}
}
func getRenderImage() -> UIImage? {
guard case let .file(_, type) = self.stickerEntity.content, case .reaction = type else {
return nil
}
let rect = self.bounds
UIGraphicsBeginImageContextWithOptions(rect.size, false, 2.0)
self.drawHierarchy(in: rect, afterScreenUpdates: true)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image
}
private var video: TelegramMediaFile? {
if case let .video(file) = self.stickerEntity.content {
return file
@ -80,7 +107,7 @@ public final class DrawingStickerEntityView: DrawingEntityView {
private var dimensions: CGSize {
switch self.stickerEntity.content {
case let .file(file):
case let .file(file, _):
return file.dimensions?.cgSize ?? CGSize(width: 512.0, height: 512.0)
case let .image(image, _):
return image.size
@ -118,6 +145,10 @@ public final class DrawingStickerEntityView: DrawingEntityView {
if file.isCustomTemplateEmoji {
animationNode.dynamicColor = UIColor(rgb: 0xffffff)
}
if !self.stickerEntity.isAnimated {
self.imageNode.isHidden = true
}
}
self.imageNode.setSignal(chatMessageAnimatedSticker(postbox: self.context.account.postbox, userLocation: .other, file: file, small: false, size: dimensions.cgSize.aspectFitted(CGSize(width: 256.0, height: 256.0))))
self.stickerFetchedDisposable.set(freeMediaFileResourceInteractiveFetched(account: self.context.account, userLocation: .other, fileReference: stickerPackFileReference(file), resource: file.resource).start())
@ -264,7 +295,8 @@ public final class DrawingStickerEntityView: DrawingEntityView {
let fittedDimensions = dimensions.cgSize.aspectFitted(CGSize(width: 384.0, height: 384.0))
let source = AnimatedStickerResourceSource(account: self.context.account, resource: file.resource, isVideo: file.isVideoSticker || file.mimeType == "video/webm")
let pathPrefix = self.context.account.postbox.mediaBox.shortLivedResourceCachePathPrefix(file.resource.id)
self.animationNode?.setup(source: source, width: Int(fittedDimensions.width), height: Int(fittedDimensions.height), playbackMode: .loop, mode: .direct(cachePathPrefix: pathPrefix))
let playbackMode: AnimatedStickerPlaybackMode = self.stickerEntity.isAnimated ? .loop : .still(.start)
self.animationNode?.setup(source: source, width: Int(fittedDimensions.width), height: Int(fittedDimensions.height), playbackMode: playbackMode, mode: .direct(cachePathPrefix: pathPrefix))
self.cachedDisposable.set((source.cachedDataPath(width: 384, height: 384)
|> deliverOn(Queue.concurrentDefaultQueue())).start())
@ -279,15 +311,23 @@ public final class DrawingStickerEntityView: DrawingEntityView {
super.layoutSubviews()
let size = self.bounds.size
if size.width > 0 && self.currentSize != size {
self.currentSize = size
let sideSize: CGFloat = max(size.width, size.height)
let boundingSize = CGSize(width: sideSize, height: sideSize)
var boundingSize = CGSize(width: sideSize, height: sideSize)
if let backgroundNode = self.backgroundNode {
backgroundNode.frame = CGRect(origin: .zero, size: boundingSize)
boundingSize = CGSize(width: floor(sideSize * 0.63), height: floor(sideSize * 0.63))
}
let imageSize = self.dimensions.aspectFitted(boundingSize)
let imageFrame = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: (size.height - imageSize.height) / 2.0), size: imageSize)
var imageFrame = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: (size.height - imageSize.height) / 2.0), size: imageSize)
if case let .file(_, type) = self.stickerEntity.content, case .reaction = type {
imageFrame = imageFrame.offsetBy(dx: -3.0, dy: -9.0)
}
self.imageNode.asyncLayout()(TransformImageArguments(corners: ImageCorners(), imageSize: imageSize, boundingSize: imageSize, intrinsicInsets: UIEdgeInsets()))()
self.imageNode.frame = imageFrame
if let animationNode = self.animationNode {
@ -310,6 +350,207 @@ public final class DrawingStickerEntityView: DrawingEntityView {
self.update(animated: false)
}
}
func onDeselection() {
let _ = self.dismissReactionSelection()
}
private weak var reactionContextNode: ReactionContextNode?
fileprivate func dismissReactionSelection() -> Bool {
if let reactionContextNode = self.reactionContextNode {
reactionContextNode.animateOut(to: nil, animatingOutToReaction: false)
self.reactionContextNode = nil
Queue.mainQueue().after(0.35) {
reactionContextNode.view.removeFromSuperview()
}
return false
} else {
return true
}
}
override func selectedTapAction() -> Bool {
if case let .file(_, type) = self.stickerEntity.content, case .reaction = type {
guard let containerView = self.containerView, let superview = containerView.superview?.superview?.superview?.superview, self.reactionContextNode == nil else {
return self.dismissReactionSelection()
}
let availableSize = superview.frame.size
let reactionItems = containerView.getAvailableReactions()
let insets = UIEdgeInsets(top: 64.0, left: 0.0, bottom: 64.0, right: 0.0)
let layout: (ContainedViewLayoutTransition) -> Void = { [weak self, weak superview] transition in
guard let self, let superview, let reactionContextNode = self.reactionContextNode else {
return
}
let anchorRect = self.convert(self.bounds, to: superview).offsetBy(dx: 0.0, dy: -20.0)
reactionContextNode.updateLayout(size: availableSize, insets: insets, anchorRect: anchorRect, centerAligned: true, isCoveredByInput: false, isAnimatingOut: false, transition: transition)
}
let reactionContextNodeTransition: Transition = .immediate
let reactionContextNode: ReactionContextNode
reactionContextNode = ReactionContextNode(
context: self.context,
animationCache: self.context.animationCache,
presentationData: self.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme),
items: reactionItems.map(ReactionContextItem.reaction),
selectedItems: Set(),
title: nil,
getEmojiContent: { [weak self] animationCache, animationRenderer in
guard let self else {
preconditionFailure()
}
let mappedReactionItems: [EmojiComponentReactionItem] = reactionItems.map { reaction -> EmojiComponentReactionItem in
return EmojiComponentReactionItem(reaction: reaction.reaction.rawValue, file: reaction.stillAnimation)
}
return EmojiPagerContentComponent.emojiInputData(
context: self.context,
animationCache: animationCache,
animationRenderer: animationRenderer,
isStandalone: false,
isStatusSelection: false,
isReactionSelection: true,
isEmojiSelection: false,
hasTrending: false,
topReactionItems: mappedReactionItems,
areUnicodeEmojiEnabled: false,
areCustomEmojiEnabled: true,
chatPeerId: self.context.account.peerId,
selectedItems: Set(),
premiumIfSavedMessages: false
)
},
isExpandedUpdated: { transition in
layout(transition)
},
requestLayout: { transition in
layout(transition)
},
requestUpdateOverlayWantsToBeBelowKeyboard: { transition in
layout(transition)
}
)
reactionContextNode.displayTail = true
reactionContextNode.forceTailToRight = true
reactionContextNode.forceDark = true
self.reactionContextNode = reactionContextNode
reactionContextNode.reactionSelected = { [weak self] updateReaction, _ in
guard let self else {
return
}
let _ = (self.context.engine.stickers.availableReactions()
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] availableReactions in
guard let self, let availableReactions else {
return
}
var animation: TelegramMediaFile?
for reaction in availableReactions.reactions {
if reaction.value == updateReaction.reaction {
animation = reaction.selectAnimation
break
}
}
guard let animation else {
return
}
self.stickerEntity.content = .file(animation, .reaction(updateReaction.reaction))
if let animationNode = self.animationNode, let snapshot = animationNode.view.snapshotView(afterScreenUpdates: false) {
snapshot.frame = animationNode.frame
snapshot.layer.transform = animationNode.transform
snapshot.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
snapshot.removeFromSuperview()
})
snapshot.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2)
self.addSubview(snapshot)
}
self.animationNode?.removeFromSupernode()
self.animationNode = nil
self.didSetUpAnimationNode = false
self.isPlaying = false
self.currentSize = nil
self.setup()
self.applyVisibility()
self.setNeedsLayout()
self.animationNode?.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.animationNode?.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
let _ = self.dismissReactionSelection()
})
}
reactionContextNode.premiumReactionsSelected = { [weak self] file in
let _ = self
let _ = file
// guard let self, let component = self.component else {
// return
// }
//
// guard let file else {
// let context = component.context
// var replaceImpl: ((ViewController) -> Void)?
// let controller = PremiumDemoScreen(context: context, subject: .uniqueReactions, forceDark: true, action: {
// let controller = PremiumIntroScreen(context: context, source: .reactions)
// replaceImpl?(controller)
// })
// controller.disposed = { [weak self] in
// self?.updateIsProgressPaused()
// }
// replaceImpl = { [weak controller] c in
// controller?.replace(with: c)
// }
// component.controller()?.push(controller)
// return
// }
//
// let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
// let undoController = UndoOverlayController(presentationData: presentationData, content: .sticker(context: component.context, file: file, loop: true, title: nil, text: presentationData.strings.Chat_PremiumReactionToastTitle, undoText: presentationData.strings.Chat_PremiumReactionToastAction, customAction: { [weak self] in
// guard let self, let component = self.component else {
// return
// }
//
// let context = component.context
// var replaceImpl: ((ViewController) -> Void)?
// let controller = PremiumDemoScreen(context: context, subject: .uniqueReactions, forceDark: true, action: {
// let controller = PremiumIntroScreen(context: context, source: .reactions)
// replaceImpl?(controller)
// })
// controller.disposed = { [weak self] in
// self?.updateIsProgressPaused()
// }
// replaceImpl = { [weak controller] c in
// controller?.replace(with: c)
// }
// component.controller()?.push(controller)
// }), elevatedLayout: false, animateInAsReplacement: false, blurred: true, action: { _ in true })
// component.controller()?.present(undoController, in: .current)
}
let anchorRect = self.convert(self.bounds, to: superview).offsetBy(dx: 0.0, dy: -20.0)
reactionContextNodeTransition.setFrame(view: reactionContextNode.view, frame: CGRect(origin: CGPoint(), size: availableSize))
reactionContextNode.updateLayout(size: availableSize, insets: insets, anchorRect: anchorRect, centerAligned: true, isCoveredByInput: false, isAnimatingOut: false, transition: reactionContextNodeTransition.containedViewLayoutTransition)
superview.addSubnode(reactionContextNode)
reactionContextNode.animateIn(from: anchorRect)
return true
} else {
return super.selectedTapAction()
}
}
public override func update(animated: Bool) {
self.center = self.stickerEntity.position
@ -366,9 +607,13 @@ public final class DrawingStickerEntityView: DrawingEntityView {
selectionView.transform = .identity
let maxSide = max(self.selectionBounds.width, self.selectionBounds.height)
let center = self.selectionBounds.center
var center = self.selectionBounds.center
let scale = self.superview?.superview?.layer.value(forKeyPath: "transform.scale.x") as? CGFloat ?? 1.0
if case let .file(_, type) = self.stickerEntity.content, case .reaction = type {
center = center.offsetBy(dx: -8.0 * scale, dy: -18.0 * scale)
}
selectionView.center = self.convert(center, to: selectionView.superview)
selectionView.bounds = CGRect(origin: .zero, size: CGSize(width: (maxSide * self.stickerEntity.scale) * scale + selectionView.selectionInset * 2.0, height: (maxSide * self.stickerEntity.scale) * scale + selectionView.selectionInset * 2.0))
@ -406,7 +651,7 @@ final class DrawingStickerEntititySelectionView: DrawingEntitySelectionView {
self.border.lineCap = .round
self.border.fillColor = UIColor.clear.cgColor
self.border.strokeColor = UIColor(rgb: 0xffffff, alpha: 0.5).cgColor
self.border.strokeColor = UIColor(rgb: 0xffffff, alpha: 0.75).cgColor
self.border.shadowColor = UIColor.black.cgColor
self.border.shadowRadius = 1.0
self.border.shadowOpacity = 0.5
@ -448,7 +693,7 @@ final class DrawingStickerEntititySelectionView: DrawingEntitySelectionView {
private var currentHandle: CALayer?
override func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
guard let entityView = self.entityView, let entity = entityView.entity as? DrawingStickerEntity else {
guard let entityView = self.entityView as? DrawingStickerEntityView, let entity = entityView.entity as? DrawingStickerEntity else {
return
}
let location = gestureRecognizer.location(in: self)
@ -457,6 +702,8 @@ final class DrawingStickerEntititySelectionView: DrawingEntitySelectionView {
case .began:
self.snapTool.maybeSkipFromStart(entityView: entityView, position: entity.position)
let _ = entityView.dismissReactionSelection()
if let sublayers = self.layer.sublayers {
for layer in sublayers {
if layer.frame.contains(location) {
@ -515,7 +762,7 @@ final class DrawingStickerEntititySelectionView: DrawingEntitySelectionView {
entity.position = updatedPosition
entity.scale = updatedScale
entity.rotation = updatedRotation
entityView.update()
entityView.update(animated: false)
gestureRecognizer.setTranslation(.zero, in: entityView)
case .ended, .cancelled:

View File

@ -736,7 +736,7 @@ public final class DrawingTextEntityView: DrawingEntityView, UITextViewDelegate
}
let emojiTextPosition = emojiRect.center.offsetBy(dx: -textSize.width / 2.0, dy: -textSize.height / 2.0)
let entity = DrawingStickerEntity(content: .file(file))
let entity = DrawingStickerEntity(content: .file(file, .sticker))
entity.referenceDrawingSize = CGSize(width: itemSize * 4.0, height: itemSize * 4.0)
entity.scale = scale
entity.position = textPosition.offsetBy(
@ -773,7 +773,7 @@ final class DrawingTextEntititySelectionView: DrawingEntitySelectionView {
self.border.lineCap = .round
self.border.fillColor = UIColor.clear.cgColor
self.border.strokeColor = UIColor(rgb: 0xffffff, alpha: 0.5).cgColor
self.border.strokeColor = UIColor(rgb: 0xffffff, alpha: 0.75).cgColor
self.layer.addSublayer(self.border)
for handle in handles {

View File

@ -147,7 +147,7 @@ private final class StickerSelectionComponent: Component {
self.interaction = ChatEntityKeyboardInputNode.Interaction(
sendSticker: { [weak self] file, silent, schedule, query, clearInput, sourceView, sourceRect, sourceLayer, _ in
if let self, let controller = self.component?.getController() {
controller.completion(.file(file.media))
controller.completion(.file(file.media, .sticker))
controller.forEachController { c in
if let c = c as? StickerPackScreenImpl {
c.dismiss(animated: true)
@ -532,6 +532,9 @@ public class StickerPickerScreen: ViewController {
self.storyStickersContentView?.audioAction = { [weak self] in
self?.controller?.presentAudioPicker()
}
self.storyStickersContentView?.reactionAction = { [weak self] in
self?.controller?.addReaction()
}
let gifItems: Signal<EntityKeyboardGifContent?, NoError>
if controller.hasGifs {
@ -864,7 +867,7 @@ public class StickerPickerScreen: ViewController {
})
})
} else if let file = item.itemFile {
strongSelf.controller?.completion(.file(file))
strongSelf.controller?.completion(.file(file, .sticker))
strongSelf.controller?.dismiss(animated: true)
} else if case let .staticEmoji(emoji) = item.content {
if let image = generateImage(CGSize(width: 256.0, height: 256.0), scale: 1.0, rotatedContext: { size, context in
@ -1263,7 +1266,7 @@ public class StickerPickerScreen: ViewController {
guard let self else {
return false
}
self.controller?.completion(.file(fileReference.media))
self.controller?.completion(.file(fileReference.media, .sticker))
self.controller?.dismiss(animated: true)
return true
}
@ -1274,7 +1277,7 @@ public class StickerPickerScreen: ViewController {
}
})
} else {
self.controller?.completion(.file(file))
self.controller?.completion(.file(file, .sticker))
self.controller?.dismiss(animated: true)
}
},
@ -1952,6 +1955,7 @@ public class StickerPickerScreen: ViewController {
public var presentGallery: () -> Void = { }
public var presentLocationPicker: () -> Void = { }
public var presentAudioPicker: () -> Void = { }
public var addReaction: () -> Void = { }
public init(context: AccountContext, inputData: Signal<StickerPickerInputData, NoError>, defaultToEmoji: Bool = false, hasGifs: Bool = false) {
self.context = context
@ -2011,7 +2015,7 @@ public class StickerPickerScreen: ViewController {
}
}
private final class CustomContentButton: Component {
private final class InteractiveStickerButtonContent: Component {
let theme: PresentationTheme
let title: String
let iconName: String
@ -2032,7 +2036,7 @@ private final class CustomContentButton: Component {
self.tintContainerView = tintContainerView
}
public static func ==(lhs: CustomContentButton, rhs: CustomContentButton) -> Bool {
public static func ==(lhs: InteractiveStickerButtonContent, rhs: InteractiveStickerButtonContent) -> Bool {
if lhs.theme !== rhs.theme {
return false
}
@ -2059,7 +2063,7 @@ private final class CustomContentButton: Component {
private var icon: ComponentView<Empty>
private var title: ComponentView<Empty>
private var component: CustomContentButton?
private var component: InteractiveStickerButtonContent?
override init(frame: CGRect) {
self.icon = ComponentView<Empty>()
@ -2076,7 +2080,7 @@ private final class CustomContentButton: Component {
fatalError("init(coder:) has not been implemented")
}
func update(component: CustomContentButton, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
func update(component: InteractiveStickerButtonContent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
// if component.useOpaqueTheme {
// self.backgroundLayer.backgroundColor = component.theme.chat.inputMediaPanel.panelContentControlOpaqueSelectionColor.cgColor
// self.tintBackgroundLayer.backgroundColor = UIColor.white.cgColor
@ -2107,8 +2111,8 @@ private final class CustomContentButton: Component {
containerSize: availableSize
)
let padding: CGFloat = 30.0
let spacing: CGFloat = 3.0
let padding: CGFloat = 7.0
let spacing: CGFloat = 4.0
let buttonSize = CGSize(width: padding + iconSize.width + spacing + titleSize.width + padding, height: 34.0)
if let view = self.icon.view {
@ -2149,6 +2153,83 @@ private final class CustomContentButton: Component {
}
}
private final class InteractiveReactionButtonContent: Component {
let theme: PresentationTheme
public init(
theme: PresentationTheme
) {
self.theme = theme
}
public static func ==(lhs: InteractiveReactionButtonContent, rhs: InteractiveReactionButtonContent) -> Bool {
if lhs.theme !== rhs.theme {
return false
}
return true
}
final class View: UIView {
override public static var layerClass: AnyClass {
return PassthroughLayer.self
}
private var icon: ComponentView<Empty>
private var component: InteractiveReactionButtonContent?
override init(frame: CGRect) {
self.icon = ComponentView<Empty>()
super.init(frame: frame)
self.isExclusiveTouch = true
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func update(component: InteractiveReactionButtonContent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
// if component.useOpaqueTheme {
// self.backgroundLayer.backgroundColor = component.theme.chat.inputMediaPanel.panelContentControlOpaqueSelectionColor.cgColor
// self.tintBackgroundLayer.backgroundColor = UIColor.white.cgColor
// } else {
// self.backgroundLayer.backgroundColor = component.theme.chat.inputMediaPanel.panelContentControlVibrantSelectionColor.cgColor
// self.tintBackgroundLayer.backgroundColor = UIColor(white: 1.0, alpha: 0.2).cgColor
// }
let iconSize = self.icon.update(
transition: .immediate,
component: AnyComponent(BundleIconComponent(
name: "Media Editor/Reaction",
tintColor: nil,
maxSize: CGSize(width: 52.0, height: 52.0)
)),
environment: {},
containerSize: availableSize
)
if let view = self.icon.view {
if view.superview == nil {
self.addSubview(view)
}
transition.setFrame(view: view, frame: CGRect(origin: .zero, size: iconSize))
}
return iconSize
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
}
}
final class ItemStack<ChildEnvironment: Equatable>: CombinedComponent {
typealias EnvironmentType = ChildEnvironment
@ -2203,9 +2284,9 @@ final class ItemStack<ChildEnvironment: Equatable>: CombinedComponent {
let spacing = remainingWidth / CGFloat(rowItemsCount - 1)
if spacing < context.component.minSpacing {
groups.append(currentGroup)
} else {
currentGroup.append(i)
currentGroup = []
}
currentGroup.append(i)
}
if !currentGroup.isEmpty {
groups.append(currentGroup)
@ -2224,7 +2305,7 @@ final class ItemStack<ChildEnvironment: Equatable>: CombinedComponent {
let remainingWidth = context.availableSize.width - itemsWidth - context.component.padding * 2.0
spacing = remainingWidth / CGFloat(group.count - 1)
var nextX: CGFloat = floorToScreenPixels((context.availableSize.width - itemsWidth) / 2.0) //context.component.padding
var nextX: CGFloat = context.component.padding
for i in group {
let child = updatedChildren[i]
let frame = CGRect(origin: CGPoint(x: nextX, y: size.height + floorToScreenPixels((groupHeight - child.size.height) / 2.0)), size: child.size)
@ -2250,6 +2331,7 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView {
var locationAction: () -> Void = {}
var audioAction: () -> Void = {}
var reactionAction: () -> Void = {}
func update(theme: PresentationTheme, strings: PresentationStrings, useOpaqueTheme: Bool, availableSize: CGSize, transition: Transition) -> CGSize {
let padding: CGFloat = 22.0
@ -2265,7 +2347,7 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView {
content: AnyComponentWithIdentity(
id: "content",
component: AnyComponent(
CustomContentButton(
InteractiveStickerButtonContent(
theme: theme,
title: "LOCATION",
iconName: "Chat/Attach Menu/Location",
@ -2280,7 +2362,7 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView {
}
})
)
)
),
// AnyComponentWithIdentity(
// id: "audio",
// component: AnyComponent(
@ -2288,7 +2370,7 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView {
// content: AnyComponentWithIdentity(
// id: "audio",
// component: AnyComponent(
// CustomContentButton(
// InteractiveStickerButtonContent(
// theme: theme,
// title: "AUDIO",
// iconName: "Media Editor/Audio",
@ -2303,7 +2385,24 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView {
// }
// })
// )
// )
// ),
AnyComponentWithIdentity(
id: "reaction",
component: AnyComponent(
CameraButton(
content: AnyComponentWithIdentity(
id: "reaction",
component: AnyComponent(
InteractiveReactionButtonContent(theme: theme)
)
),
action: { [weak self] in
if let self {
self.reactionAction()
}
})
)
)
],
padding: 18.0,
minSpacing: 8.0

View File

@ -102,7 +102,7 @@ private class LegacyPaintStickerEntity: LegacyPaintEntity {
self.animated = entity.isAnimated
switch entity.content {
case let .file(file):
case let .file(file, _):
self.file = file
if file.isAnimatedSticker || file.isVideoSticker || file.mimeType == "video/webm" {
self.source = AnimatedStickerResourceSource(postbox: postbox, resource: file.resource, isVideo: file.isVideoSticker || file.mimeType == "video/webm")

View File

@ -656,7 +656,7 @@ public final class ReactionContextNode: ASDisplayNode, UIScrollViewDelegate {
}
public func updateLayout(size: CGSize, insets: UIEdgeInsets, anchorRect: CGRect, centerAligned: Bool = false, isCoveredByInput: Bool, isAnimatingOut: Bool, transition: ContainedViewLayoutTransition) {
self.updateLayout(size: size, insets: insets, anchorRect: anchorRect, centerAligned: centerAligned, isCoveredByInput: isCoveredByInput, isAnimatingOut: isAnimatingOut, transition: transition, animateInFromAnchorRect: nil, animateOutToAnchorRect: nil)
self.updateLayout(size: size, insets: insets, anchorRect: anchorRect, centerAligned: centerAligned, isCoveredByInput: isCoveredByInput, isAnimatingOut: isAnimatingOut, transition: transition, animateInFromAnchorRect: nil, animateOutToAnchorRect: nil)
}
public func updateIsIntersectingContent(isIntersectingContent: Bool, transition: ContainedViewLayoutTransition) {

View File

@ -453,6 +453,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1361650766] = { return Api.MaskCoords.parse_maskCoords($0) }
dict[-1300094593] = { return Api.MediaArea.parse_inputMediaAreaVenue($0) }
dict[-544523486] = { return Api.MediaArea.parse_mediaAreaGeoPoint($0) }
dict[1272429760] = { return Api.MediaArea.parse_mediaAreaSuggestedReaction($0) }
dict[-1098720356] = { return Api.MediaArea.parse_mediaAreaVenue($0) }
dict[64088654] = { return Api.MediaAreaCoordinates.parse_mediaAreaCoordinates($0) }
dict[940666592] = { return Api.Message.parse_message($0) }

View File

@ -50,6 +50,7 @@ public extension Api {
enum MediaArea: TypeConstructorDescription {
case inputMediaAreaVenue(coordinates: Api.MediaAreaCoordinates, queryId: Int64, resultId: String)
case mediaAreaGeoPoint(coordinates: Api.MediaAreaCoordinates, geo: Api.GeoPoint)
case mediaAreaSuggestedReaction(coordinates: Api.MediaAreaCoordinates, reaction: Api.Reaction)
case mediaAreaVenue(coordinates: Api.MediaAreaCoordinates, geo: Api.GeoPoint, title: String, address: String, provider: String, venueId: String, venueType: String)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
@ -69,6 +70,13 @@ public extension Api {
coordinates.serialize(buffer, true)
geo.serialize(buffer, true)
break
case .mediaAreaSuggestedReaction(let coordinates, let reaction):
if boxed {
buffer.appendInt32(1272429760)
}
coordinates.serialize(buffer, true)
reaction.serialize(buffer, true)
break
case .mediaAreaVenue(let coordinates, let geo, let title, let address, let provider, let venueId, let venueType):
if boxed {
buffer.appendInt32(-1098720356)
@ -90,6 +98,8 @@ public extension Api {
return ("inputMediaAreaVenue", [("coordinates", coordinates as Any), ("queryId", queryId as Any), ("resultId", resultId as Any)])
case .mediaAreaGeoPoint(let coordinates, let geo):
return ("mediaAreaGeoPoint", [("coordinates", coordinates as Any), ("geo", geo as Any)])
case .mediaAreaSuggestedReaction(let coordinates, let reaction):
return ("mediaAreaSuggestedReaction", [("coordinates", coordinates as Any), ("reaction", reaction as Any)])
case .mediaAreaVenue(let coordinates, let geo, let title, let address, let provider, let venueId, let venueType):
return ("mediaAreaVenue", [("coordinates", coordinates as Any), ("geo", geo as Any), ("title", title as Any), ("address", address as Any), ("provider", provider as Any), ("venueId", venueId as Any), ("venueType", venueType as Any)])
}
@ -132,6 +142,24 @@ public extension Api {
return nil
}
}
public static func parse_mediaAreaSuggestedReaction(_ reader: BufferReader) -> MediaArea? {
var _1: Api.MediaAreaCoordinates?
if let signature = reader.readInt32() {
_1 = Api.parse(reader, signature: signature) as? Api.MediaAreaCoordinates
}
var _2: Api.Reaction?
if let signature = reader.readInt32() {
_2 = Api.parse(reader, signature: signature) as? Api.Reaction
}
let _c1 = _1 != nil
let _c2 = _2 != nil
if _c1 && _c2 {
return Api.MediaArea.mediaAreaSuggestedReaction(coordinates: _1!, reaction: _2!)
}
else {
return nil
}
}
public static func parse_mediaAreaVenue(_ reader: BufferReader) -> MediaArea? {
var _1: Api.MediaAreaCoordinates?
if let signature = reader.readInt32() {

View File

@ -423,6 +423,12 @@ func mediaAreaFromApiMediaArea(_ mediaArea: Api.MediaArea) -> MediaArea? {
longitude = 0.0
}
return .venue(coordinates: coodinatesFromApiMediaAreaCoordinates(coordinates), venue: MediaArea.Venue(latitude: latitude, longitude: longitude, venue: MapVenue(title: title, address: address, provider: provider, id: venueId, type: venueType), queryId: nil, resultId: nil))
case let .mediaAreaSuggestedReaction(coordinates, reaction):
if let reaction = MessageReaction.Reaction(apiReaction: reaction) {
return .reaction(coordinates: coodinatesFromApiMediaAreaCoordinates(coordinates), reaction: reaction)
} else {
return nil
}
}
}
@ -440,6 +446,8 @@ func apiMediaAreasFromMediaAreas(_ mediaAreas: [MediaArea]) -> [Api.MediaArea] {
} else {
apiMediaAreas.append(.mediaAreaGeoPoint(coordinates: inputCoordinates, geo: .geoPoint(flags: 0, long: venue.longitude, lat: venue.latitude, accessHash: 0, accuracyRadius: nil)))
}
case let .reaction(_, reaction):
apiMediaAreas.append(.mediaAreaSuggestedReaction(coordinates: inputCoordinates, reaction: reaction.apiReaction))
}
}
return apiMediaAreas

View File

@ -210,7 +210,7 @@ public class BoxedMessage: NSObject {
public class Serialization: NSObject, MTSerialization {
public func currentLayer() -> UInt {
return 161
return 162
}
public func parseMessage(_ data: Data!) -> Any! {

View File

@ -122,9 +122,11 @@ public enum MediaArea: Codable, Equatable {
}
case venue(coordinates: Coordinates, venue: Venue)
case reaction(coordinates: Coordinates, reaction: MessageReaction.Reaction)
private enum MediaAreaType: Int32 {
case venue
case reaction
}
public init(from decoder: Decoder) throws {
@ -138,6 +140,10 @@ public enum MediaArea: Codable, Equatable {
let coordinates = try container.decode(MediaArea.Coordinates.self, forKey: .coordinates)
let venue = try container.decode(MediaArea.Venue.self, forKey: .value)
self = .venue(coordinates: coordinates, venue: venue)
case .reaction:
let coordinates = try container.decode(MediaArea.Coordinates.self, forKey: .coordinates)
let reaction = try container.decode(MessageReaction.Reaction.self, forKey: .value)
self = .reaction(coordinates: coordinates, reaction: reaction)
}
}
@ -149,6 +155,10 @@ public enum MediaArea: Codable, Equatable {
try container.encode(MediaAreaType.venue.rawValue, forKey: .type)
try container.encode(coordinates, forKey: .coordinates)
try container.encode(venue, forKey: .value)
case let .reaction(coordinates, reaction):
try container.encode(MediaAreaType.reaction.rawValue, forKey: .type)
try container.encode(coordinates, forKey: .coordinates)
try container.encode(reaction, forKey: .value)
}
}
}
@ -158,6 +168,8 @@ public extension MediaArea {
switch self {
case let .venue(coordinates, _):
return coordinates
case let .reaction(coordinates, _):
return coordinates
}
}
}

View File

@ -1390,7 +1390,7 @@ final class AvatarEditorScreenComponent: Component {
try? backgroundImage.jpegData(compressionQuality: 0.8)?.write(to: tempUrl)
let drawingSize = CGSize(width: 1920.0, height: 1920.0)
let entity = DrawingStickerEntity(content: .file(file))
let entity = DrawingStickerEntity(content: .file(file, .sticker))
entity.referenceDrawingSize = drawingSize
entity.position = CGPoint(x: drawingSize.width / 2.0, y: drawingSize.height / 2.0)
entity.scale = 3.3
@ -1398,7 +1398,7 @@ final class AvatarEditorScreenComponent: Component {
var fileId: Int64 = 0
var stickerPackId: Int64 = 0
var stickerPackAccessHash: Int64 = 0
if case let .file(file) = entity.content {
if case let .file(file, _) = entity.content {
if file.isCustomEmoji {
fileId = file.fileId.id
} else if file.isAnimatedSticker {

View File

@ -48,17 +48,48 @@ public enum CodableDrawingEntity: Equatable {
}
}
private var coordinates: MediaArea.Coordinates? {
var position: CGPoint?
var size: CGSize?
var scale: CGFloat?
var rotation: CGFloat?
switch self {
case let .location(entity):
position = entity.position
size = entity.renderImage?.size
scale = entity.scale
rotation = entity.rotation
case let .sticker(entity):
position = entity.position
size = entity.baseSize
scale = entity.scale
rotation = entity.rotation
default:
return nil
}
guard let position, let size, let scale, let rotation else {
return nil
}
return MediaArea.Coordinates(
x: position.x / 1080.0 * 100.0,
y: position.y / 1920.0 * 100.0,
width: size.width * scale / 1080.0 * 100.0,
height: size.height * scale / 1920.0 * 100.0,
rotation: rotation / .pi * 180.0
)
}
public var mediaArea: MediaArea? {
guard let coordinates = self.coordinates else {
return nil
}
switch self {
case let .location(entity):
return .venue(
coordinates: MediaArea.Coordinates(
x: entity.position.x / 1080.0 * 100.0,
y: entity.position.y / 1920.0 * 100.0,
width: (entity.renderImage?.size.width ?? 0.0) * entity.scale / 1080.0 * 100.0,
height: (entity.renderImage?.size.height ?? 0.0) * entity.scale / 1920.0 * 100.0,
rotation: entity.rotation / .pi * 180.0
),
coordinates: coordinates,
venue: MediaArea.Venue(
latitude: entity.location.latitude,
longitude: entity.location.longitude,
@ -67,6 +98,15 @@ public enum CodableDrawingEntity: Equatable {
resultId: entity.resultId
)
)
case let .sticker(entity):
if case let .file(_, type) = entity.content, case let .reaction(reaction) = type {
return .reaction(
coordinates: coordinates,
reaction: reaction
)
} else {
return nil
}
default:
return nil
}

View File

@ -19,16 +19,20 @@ public final class DrawingStickerEntity: DrawingEntity, Codable {
case rectangle
case dualPhoto
}
case file(TelegramMediaFile)
public enum FileType: Equatable {
case sticker
case reaction(MessageReaction.Reaction)
}
case file(TelegramMediaFile, FileType)
case image(UIImage, ImageType)
case video(TelegramMediaFile)
case dualVideoReference
public static func == (lhs: Content, rhs: Content) -> Bool {
switch lhs {
case let .file(lhsFile):
if case let .file(rhsFile) = rhs {
return lhsFile.fileId == rhsFile.fileId
case let .file(lhsFile, lhsFileType):
if case let .file(rhsFile, rhsFileType) = rhs {
return lhsFile.fileId == rhsFile.fileId && lhsFileType == rhsFileType
} else {
return false
}
@ -56,6 +60,7 @@ public final class DrawingStickerEntity: DrawingEntity, Codable {
private enum CodingKeys: String, CodingKey {
case uuid
case file
case reaction
case imagePath
case videoFile
case isRectangle
@ -67,10 +72,11 @@ public final class DrawingStickerEntity: DrawingEntity, Codable {
case rotation
case mirrored
case isExplicitlyStatic
case renderImage
}
public var uuid: UUID
public let content: Content
public var content: Content
public var referenceDrawingSize: CGSize
public var position: CGPoint
@ -94,7 +100,7 @@ public final class DrawingStickerEntity: DrawingEntity, Codable {
switch self.content {
case let .image(image, _):
dimensions = image.size
case let .file(file):
case let .file(file, _):
dimensions = file.dimensions?.cgSize ?? CGSize(width: 512.0, height: 512.0)
case let .video(file):
dimensions = file.dimensions?.cgSize ?? CGSize(width: 512.0, height: 512.0)
@ -108,11 +114,16 @@ public final class DrawingStickerEntity: DrawingEntity, Codable {
public var isAnimated: Bool {
switch self.content {
case let .file(file):
case let .file(file, type):
if self.isExplicitlyStatic {
return false
} else {
return file.isAnimatedSticker || file.isVideoSticker || file.mimeType == "video/webm"
switch type {
case .reaction:
return false
default:
return file.isAnimatedSticker || file.isVideoSticker || file.mimeType == "video/webm"
}
}
case .image:
return false
@ -160,7 +171,13 @@ public final class DrawingStickerEntity: DrawingEntity, Codable {
if let _ = try container.decodeIfPresent(Bool.self, forKey: .dualVideo) {
self.content = .dualVideoReference
} else if let file = try container.decodeIfPresent(TelegramMediaFile.self, forKey: .file) {
self.content = .file(file)
let fileType: Content.FileType
if let reaction = try container.decodeIfPresent(MessageReaction.Reaction.self, forKey: .reaction) {
fileType = .reaction(reaction)
} else {
fileType = .sticker
}
self.content = .file(file, fileType)
} else if let imagePath = try container.decodeIfPresent(String.self, forKey: .imagePath), let image = UIImage(contentsOfFile: fullEntityMediaPath(imagePath)) {
let isRectangle = try container.decodeIfPresent(Bool.self, forKey: .isRectangle) ?? false
let isDualPhoto = try container.decodeIfPresent(Bool.self, forKey: .isDualPhoto) ?? false
@ -184,14 +201,24 @@ public final class DrawingStickerEntity: DrawingEntity, Codable {
self.rotation = try container.decode(CGFloat.self, forKey: .rotation)
self.mirrored = try container.decode(Bool.self, forKey: .mirrored)
self.isExplicitlyStatic = try container.decodeIfPresent(Bool.self, forKey: .isExplicitlyStatic) ?? false
if let renderImageData = try? container.decodeIfPresent(Data.self, forKey: .renderImage) {
self.renderImage = UIImage(data: renderImageData)
}
}
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(self.uuid, forKey: .uuid)
switch self.content {
case let .file(file):
case let .file(file, fileType):
try container.encode(file, forKey: .file)
switch fileType {
case let .reaction(reaction):
try container.encode(reaction, forKey: .reaction)
default:
break
}
case let .image(image, imageType):
let imagePath = "\(self.uuid).png"
let fullImagePath = fullEntityMediaPath(imagePath)
@ -219,6 +246,10 @@ public final class DrawingStickerEntity: DrawingEntity, Codable {
try container.encode(self.rotation, forKey: .rotation)
try container.encode(self.mirrored, forKey: .mirrored)
try container.encode(self.isExplicitlyStatic, forKey: .isExplicitlyStatic)
if let renderImage, let data = renderImage.pngData() {
try container.encode(data, forKey: .renderImage)
}
}
public func duplicate(copy: Bool) -> DrawingEntity {

View File

@ -730,15 +730,17 @@ public final class MediaEditor {
}
private func setRate(_ rate: Float) {
let hostTime: UInt64 = 0
let hostTime: UInt64 = mach_absolute_time()
let time: TimeInterval = 0
let cmHostTime = CMClockMakeHostTimeFromSystemUnits(hostTime)
let cmVTime = CMTimeMakeWithSeconds(time, preferredTimescale: 1000000)
let futureTime = CMTimeAdd(cmHostTime, cmVTime)
self.player?.setRate(rate, time: .invalid, atHostTime: futureTime)
self.additionalPlayer?.setRate(rate, time: .invalid, atHostTime: futureTime)
self.audioPlayer?.setRate(rate, time: .invalid, atHostTime: futureTime)
let itemTime = self.player?.currentItem?.currentTime() ?? .invalid
self.player?.setRate(rate, time: itemTime, atHostTime: futureTime)
self.additionalPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime)
self.audioPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime)
if rate > 0.0 {
self.onPlaybackAction(.play)
@ -812,7 +814,7 @@ public final class MediaEditor {
public func setAudioTrack(_ audioTrack: MediaAudioTrack?) {
self.updateValues(mode: .skipRendering) { values in
return values.withUpdatedAudioTrack(audioTrack)
return values.withUpdatedAudioTrack(audioTrack).withUpdatedAudioTrackSamples(nil).withUpdatedAudioTrackTrimRange(nil)
}
if let audioTrack {
@ -828,12 +830,12 @@ public final class MediaEditor {
}
}
public func setAudioTrackTrimRange(_ trimRange: Range<Double>, apply: Bool) {
public func setAudioTrackTrimRange(_ trimRange: Range<Double>?, apply: Bool) {
self.updateValues(mode: .skipRendering) { values in
return values.withUpdatedAudioTrackTrimRange(trimRange)
}
if apply {
if apply, let trimRange {
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
}
}

View File

@ -63,18 +63,22 @@ private func prerenderTextTransformations(entity: DrawingEntity, image: UIImage,
func composerEntitiesForDrawingEntity(postbox: Postbox, textScale: CGFloat, entity: DrawingEntity, colorSpace: CGColorSpace, tintColor: UIColor? = nil) -> [MediaEditorComposerEntity] {
if let entity = entity as? DrawingStickerEntity {
let content: MediaEditorComposerStickerEntity.Content
switch entity.content {
case let .file(file):
content = .file(file)
case let .image(image, _):
content = .image(image)
case let .video(file):
content = .video(file)
case .dualVideoReference:
return []
if case let .file(_, type) = entity.content, case .reaction = type, let renderImage = entity.renderImage, let image = CIImage(image: renderImage, options: [.colorSpace: colorSpace]) {
return [MediaEditorComposerStaticEntity(image: image, position: entity.position, scale: entity.scale, rotation: entity.rotation, baseSize: entity.baseSize, mirrored: false)]
} else {
let content: MediaEditorComposerStickerEntity.Content
switch entity.content {
case let .file(file, _):
content = .file(file)
case let .image(image, _):
content = .image(image)
case let .video(file):
content = .video(file)
case .dualVideoReference:
return []
}
return [MediaEditorComposerStickerEntity(postbox: postbox, content: content, position: entity.position, scale: entity.scale, rotation: entity.rotation, baseSize: entity.baseSize, mirrored: entity.mirrored, colorSpace: colorSpace, tintColor: tintColor, isStatic: entity.isExplicitlyStatic)]
}
return [MediaEditorComposerStickerEntity(postbox: postbox, content: content, position: entity.position, scale: entity.scale, rotation: entity.rotation, baseSize: entity.baseSize, mirrored: entity.mirrored, colorSpace: colorSpace, tintColor: tintColor, isStatic: entity.isExplicitlyStatic)]
} else if let renderImage = entity.renderImage, let image = CIImage(image: renderImage, options: [.colorSpace: colorSpace]) {
if let entity = entity as? DrawingBubbleEntity {
return [MediaEditorComposerStaticEntity(image: image, position: entity.position, scale: 1.0, rotation: entity.rotation, baseSize: entity.size, mirrored: false)]

View File

@ -444,7 +444,7 @@ public final class MediaEditorValues: Codable, Equatable {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range<Double>) -> MediaEditorValues {
func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range<Double>?) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
}

View File

@ -220,6 +220,14 @@ public final class MediaEditorVideoExport {
}
}
var audioTimeRange: CMTimeRange? {
if let audioTrimRange = self.values.audioTrackTrimRange {
return CMTimeRange(start: CMTime(seconds: audioTrimRange.lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), end: CMTime(seconds: audioTrimRange.upperBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)))
} else {
return nil
}
}
var composerDimensions: CGSize {
return CGSize(width: 1080.0, height: 1920.0)
}
@ -364,7 +372,6 @@ public final class MediaEditorVideoExport {
var inputAsset = asset
if let audioData = self.configuration.values.audioTrack {
let mixComposition = AVMutableComposition()
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: audioData.path))
guard
@ -378,13 +385,19 @@ public final class MediaEditorVideoExport {
return
}
try? videoTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: duration), of: videoAssetTrack, at: .zero)
let timeRange: CMTimeRange = CMTimeRangeMake(start: .zero, duration: duration)
try? videoTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: .zero)
if let audioAssetTrack = asset.tracks(withMediaType: .audio).first, let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
try? audioTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: duration), of: audioAssetTrack, at: .zero)
try? audioTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)
}
try? musicTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: duration), of: musicAssetTrack, at: .zero)
var musicRange = timeRange
if let audioTrackRange = self.configuration.audioTimeRange {
musicRange = audioTrackRange
}
try? musicTrack.insertTimeRange(musicRange, of: musicAssetTrack, at: .zero)
inputAsset = mixComposition
}

View File

@ -45,6 +45,8 @@ swift_library(
"//submodules/DeviceAccess",
"//submodules/LocationUI",
"//submodules/TelegramUI/Components/AudioWaveformComponent",
"//submodules/ReactionSelectionNode",
"//submodules/TelegramUI/Components/VolumeSliderContextItem",
],
visibility = [
"//visibility:public",

View File

@ -9,6 +9,7 @@ import ViewControllerComponent
import ComponentDisplayAdapters
import TelegramPresentationData
import AccountContext
import Postbox
import TelegramCore
import MultilineTextComponent
import DrawingUI
@ -33,6 +34,8 @@ import TextFormat
import DeviceAccess
import LocationUI
import LegacyMediaPickerUI
import ReactionSelectionNode
import VolumeSliderContextItem
enum DrawingScreenType {
case drawing
@ -271,6 +274,8 @@ final class MediaEditorScreenComponent: Component {
private var inputMediaNodeStateContext = ChatEntityKeyboardInputNode.StateContext()
private var inputMediaInteraction: ChatEntityKeyboardInputNode.Interaction?
private var inputMediaNode: ChatEntityKeyboardInputNode?
private var appliedAudioData: VideoScrubberComponent.AudioData?
private var component: MediaEditorScreenComponent?
private weak var state: State?
@ -913,6 +918,7 @@ final class MediaEditorScreenComponent: Component {
if let controller = environment.controller() as? MediaEditorScreen {
mediaEditor = controller.node.mediaEditor
}
let previousAudioData = self.appliedAudioData
var audioData: VideoScrubberComponent.AudioData?
if let audioTrack = mediaEditor?.values.audioTrack {
let audioSamples = mediaEditor?.values.audioTrackSamples
@ -923,78 +929,8 @@ final class MediaEditorScreenComponent: Component {
peak: audioSamples?.peak ?? 0
)
}
var scrubberBottomInset: CGFloat = 0.0
if let playerState = state.playerState {
let scrubberInset: CGFloat = 9.0
let scrubberSize = self.scrubber.update(
transition: transition,
component: AnyComponent(VideoScrubberComponent(
context: component.context,
generationTimestamp: playerState.generationTimestamp,
duration: playerState.duration,
startPosition: playerState.timeRange?.lowerBound ?? 0.0,
endPosition: playerState.timeRange?.upperBound ?? min(playerState.duration, storyMaxVideoDuration),
position: playerState.position,
maxDuration: storyMaxVideoDuration,
isPlaying: playerState.isPlaying,
frames: playerState.frames,
framesUpdateTimestamp: playerState.framesUpdateTimestamp,
audioData: audioData,
videoTrimUpdated: { [weak mediaEditor] start, end, updatedEnd, done in
if let mediaEditor {
mediaEditor.setVideoTrimRange(start..<end, apply: done)
if done {
mediaEditor.seek(start, andPlay: true)
} else {
mediaEditor.seek(updatedEnd ? end : start, andPlay: false)
}
}
},
positionUpdated: { position, done in
if let mediaEditor {
mediaEditor.seek(position, andPlay: done)
}
},
audioTrimUpdated: { [weak mediaEditor] start, end, _, done in
if let mediaEditor {
mediaEditor.setAudioTrackTrimRange(start..<end, apply: done)
if done {
}
}
}
)),
environment: {},
containerSize: CGSize(width: previewSize.width - scrubberInset * 2.0, height: availableSize.height)
)
let scrubberFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - scrubberSize.width) / 2.0), y: availableSize.height - environment.safeInsets.bottom - scrubberSize.height - 8.0 + controlsBottomInset), size: scrubberSize)
if let scrubberView = self.scrubber.view {
var animateIn = false
if scrubberView.superview == nil {
animateIn = true
if let inputPanelBackgroundView = self.inputPanelBackground.view, inputPanelBackgroundView.superview != nil {
self.insertSubview(scrubberView, belowSubview: inputPanelBackgroundView)
} else {
self.addSubview(scrubberView)
}
}
transition.setFrame(view: scrubberView, frame: scrubberFrame)
if !self.animatingButtons {
transition.setAlpha(view: scrubberView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0)
} else if animateIn {
scrubberView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
scrubberView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
scrubberView.layer.animateScale(from: 0.6, to: 1.0, duration: 0.2)
}
}
scrubberBottomInset = scrubberSize.height + 10.0
} else {
}
self.appliedAudioData = audioData
var timeoutValue: String
let timeoutSelected: Bool
switch component.privacy.timeout {
@ -1317,7 +1253,7 @@ final class MediaEditorScreenComponent: Component {
sizeValue = textEntity.fontSize
}
var inputPanelBottomInset: CGFloat = scrubberBottomInset - controlsBottomInset
var inputPanelBottomInset: CGFloat = -controlsBottomInset
if inputHeight > 0.0 {
inputPanelBottomInset = inputHeight - environment.safeInsets.bottom
}
@ -1330,6 +1266,82 @@ final class MediaEditorScreenComponent: Component {
transition.setAlpha(view: inputPanelView, alpha: isEditingTextEntity || component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities ? 0.0 : 1.0)
}
var bottomControlsTransition = transition
if let playerState = state.playerState {
let scrubberInset: CGFloat = 9.0
if (audioData == nil) != (previousAudioData == nil) {
bottomControlsTransition = .easeInOut(duration: 0.25)
}
let scrubberSize = self.scrubber.update(
transition: transition,
component: AnyComponent(VideoScrubberComponent(
context: component.context,
generationTimestamp: playerState.generationTimestamp,
duration: playerState.duration,
startPosition: playerState.timeRange?.lowerBound ?? 0.0,
endPosition: playerState.timeRange?.upperBound ?? min(playerState.duration, storyMaxVideoDuration),
position: playerState.position,
maxDuration: storyMaxVideoDuration,
isPlaying: playerState.isPlaying,
frames: playerState.frames,
framesUpdateTimestamp: playerState.framesUpdateTimestamp,
audioData: audioData,
videoTrimUpdated: { [weak mediaEditor] start, end, updatedEnd, done in
if let mediaEditor {
mediaEditor.setVideoTrimRange(start..<end, apply: done)
if done {
mediaEditor.seek(start, andPlay: true)
} else {
mediaEditor.seek(updatedEnd ? end : start, andPlay: false)
}
}
},
positionUpdated: { position, done in
if let mediaEditor {
mediaEditor.seek(position, andPlay: done)
}
},
audioTrimUpdated: { [weak mediaEditor] start, end, _, done in
if let mediaEditor {
mediaEditor.setAudioTrackTrimRange(start..<end, apply: done)
if done {
}
}
},
audioLongPressed: { [weak self] sourceView in
if let self, let controller = self.environment?.controller() as? MediaEditorScreen {
controller.node.presentAudioOptions(sourceView: sourceView)
}
}
)),
environment: {},
containerSize: CGSize(width: previewSize.width - scrubberInset * 2.0, height: availableSize.height)
)
let scrubberFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - scrubberSize.width) / 2.0), y: availableSize.height - environment.safeInsets.bottom - scrubberSize.height + controlsBottomInset - inputPanelSize.height + 3.0), size: scrubberSize)
if let scrubberView = self.scrubber.view {
var animateIn = false
if scrubberView.superview == nil {
animateIn = true
if let inputPanelBackgroundView = self.inputPanelBackground.view, inputPanelBackgroundView.superview != nil {
self.insertSubview(scrubberView, belowSubview: inputPanelBackgroundView)
} else {
self.addSubview(scrubberView)
}
}
bottomControlsTransition.setFrame(view: scrubberView, frame: scrubberFrame)
if !self.animatingButtons {
transition.setAlpha(view: scrubberView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities || isEditingCaption ? 0.0 : 1.0)
} else if animateIn {
scrubberView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
scrubberView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
scrubberView.layer.animateScale(from: 0.6, to: 1.0, duration: 0.2)
}
}
} else {
}
let displayTopButtons = !(self.inputPanelExternalState.isEditing || isEditingTextEntity || component.isDisplayingTool)
@ -1744,6 +1756,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
private let stickerPickerInputData = Promise<StickerPickerInputData>()
private var availableReactions: [ReactionItem] = []
private var availableReactionsDisposable: Disposable?
private var dismissPanGestureRecognizer: UIPanGestureRecognizer?
private var isDisplayingTool = false
@ -1862,9 +1877,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
forceHasPremium: true
)
let signal = combineLatest(queue: .mainQueue(),
emojiItems,
stickerItems
let signal = combineLatest(
queue: .mainQueue(),
emojiItems,
stickerItems
) |> map { emoji, stickers -> StickerPickerInputData in
return StickerPickerInputData(emoji: emoji, stickers: stickers, gifs: nil)
}
@ -1892,6 +1908,17 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
}
})
self.entitiesView.getAvailableReactions = { [weak self] in
return self?.availableReactions ?? []
}
self.availableReactionsDisposable = (allowedStoryReactions(context: controller.context)
|> deliverOnMainQueue).start(next: { [weak self] reactions in
if let self {
self.availableReactions = reactions
}
})
}
deinit {
@ -1899,6 +1926,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.gradientColorsDisposable?.dispose()
self.appInForegroundDisposable?.dispose()
self.playbackPositionDisposable?.dispose()
self.availableReactionsDisposable?.dispose()
}
private func setup(with subject: MediaEditorScreen.Subject) {
@ -2986,6 +3014,32 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}), in: .window(.root))
}
func presentAudioOptions(sourceView: UIView) {
let items: [ContextMenuItem] = [
.custom(VolumeSliderContextItem(minValue: 0.0, value: 0.75, valueChanged: { _, _ in
}), false),
.action(
ContextMenuActionItem(
text: "Remove Audio",
icon: { theme in return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Delete"), color: theme.contextMenu.primaryColor)},
action: { [weak self] f in
f.dismissWithResult(.default)
if let self {
self.mediaEditor?.setAudioTrack(nil)
self.requestUpdate(transition: .easeInOut(duration: 0.25))
// strongSelf.insertEntity.invoke(DrawingSimpleShapeEntity(shapeType: .rectangle, drawType: .stroke, color: strongSelf.currentColor, lineWidth: 0.15))
}
}
)
)
]
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }.withUpdated(theme: defaultDarkPresentationTheme)
let contextController = ContextController(presentationData: presentationData, source: .reference(ReferenceContentSource(sourceView: sourceView, contentArea: UIScreen.main.bounds, customPosition: CGPoint(x: 0.0, y: -3.0))), items: .single(ContextController.Items(content: .list(items))))
self.controller?.present(contextController, in: .window(.root))
}
func updateModalTransitionFactor(_ value: CGFloat, transition: ContainedViewLayoutTransition) {
guard let layout = self.validLayout, case .compact = layout.metrics.widthClass else {
return
@ -3139,7 +3193,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.controller?.isSavingAvailable = true
self.controller?.requestLayout(transition: .immediate)
if case let .file(file) = content {
if case let .file(file, _) = content {
if file.isCustomEmoji {
self.defaultToEmoji = true
} else {
@ -3177,6 +3231,20 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.presentAudioPicker()
}
}
controller.addReaction = { [weak self, weak controller] in
if let self {
self.stickerScreen = nil
controller?.dismiss(animated: true)
let heart = "❤️".strippedEmoji
if let reaction = self.availableReactions.first(where: { reaction in
return reaction.reaction.rawValue == .builtin(heart)
}) {
let stickerEntity = DrawingStickerEntity(content: .file(reaction.stillAnimation, .reaction(.builtin(heart))))
self.interaction?.insertEntity(stickerEntity, scale: 1.33)
}
}
}
self.stickerScreen = controller
self.controller?.present(controller, in: .window(.root))
return
@ -4123,13 +4191,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
for entity in codableEntities {
switch entity {
case let .sticker(stickerEntity):
if case let .file(file) = stickerEntity.content {
if case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file)
}
case let .text(textEntity):
if let subEntities = textEntity.renderSubEntities {
for entity in subEntities {
if let stickerEntity = entity as? DrawingStickerEntity, case let .file(file) = stickerEntity.content {
if let stickerEntity = entity as? DrawingStickerEntity, case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file)
}
}
@ -4938,3 +5006,125 @@ func hasFirstResponder(_ view: UIView) -> Bool {
}
return false
}
private func allowedStoryReactions(context: AccountContext) -> Signal<[ReactionItem], NoError> {
let viewKey: PostboxViewKey = .orderedItemList(id: Namespaces.OrderedItemList.CloudTopReactions)
let topReactions = context.account.postbox.combinedView(keys: [viewKey])
|> map { views -> [RecentReactionItem] in
guard let view = views.views[viewKey] as? OrderedItemListView else {
return []
}
return view.items.compactMap { item -> RecentReactionItem? in
return item.contents.get(RecentReactionItem.self)
}
}
return combineLatest(
context.engine.stickers.availableReactions(),
topReactions
)
|> take(1)
|> map { availableReactions, topReactions -> [ReactionItem] in
guard let availableReactions = availableReactions else {
return []
}
var result: [ReactionItem] = []
var existingIds = Set<MessageReaction.Reaction>()
for topReaction in topReactions {
switch topReaction.content {
case let .builtin(value):
if let reaction = availableReactions.reactions.first(where: { $0.value == .builtin(value) }) {
guard let centerAnimation = reaction.centerAnimation else {
continue
}
guard let aroundAnimation = reaction.aroundAnimation else {
continue
}
if existingIds.contains(reaction.value) {
continue
}
existingIds.insert(reaction.value)
result.append(ReactionItem(
reaction: ReactionItem.Reaction(rawValue: reaction.value),
appearAnimation: reaction.appearAnimation,
stillAnimation: reaction.selectAnimation,
listAnimation: centerAnimation,
largeListAnimation: reaction.activateAnimation,
applicationAnimation: aroundAnimation,
largeApplicationAnimation: reaction.effectAnimation,
isCustom: false
))
} else {
continue
}
case let .custom(file):
if existingIds.contains(.custom(file.fileId.id)) {
continue
}
existingIds.insert(.custom(file.fileId.id))
result.append(ReactionItem(
reaction: ReactionItem.Reaction(rawValue: .custom(file.fileId.id)),
appearAnimation: file,
stillAnimation: file,
listAnimation: file,
largeListAnimation: file,
applicationAnimation: nil,
largeApplicationAnimation: nil,
isCustom: true
))
}
}
for reaction in availableReactions.reactions {
guard let centerAnimation = reaction.centerAnimation else {
continue
}
guard let aroundAnimation = reaction.aroundAnimation else {
continue
}
if !reaction.isEnabled {
continue
}
if existingIds.contains(reaction.value) {
continue
}
existingIds.insert(reaction.value)
result.append(ReactionItem(
reaction: ReactionItem.Reaction(rawValue: reaction.value),
appearAnimation: reaction.appearAnimation,
stillAnimation: reaction.selectAnimation,
listAnimation: centerAnimation,
largeListAnimation: reaction.activateAnimation,
applicationAnimation: aroundAnimation,
largeApplicationAnimation: reaction.effectAnimation,
isCustom: false
))
}
return result
}
}
private final class ReferenceContentSource: ContextReferenceContentSource {
private let sourceView: UIView
private let contentArea: CGRect
private let customPosition: CGPoint
init(sourceView: UIView, contentArea: CGRect, customPosition: CGPoint) {
self.sourceView = sourceView
self.contentArea = contentArea
self.customPosition = customPosition
}
func transitionInfo() -> ContextControllerReferenceViewInfo? {
return ContextControllerReferenceViewInfo(referenceView: self.sourceView, contentAreaInScreenSpace: self.contentArea, customPosition: self.customPosition, actionsPosition: .top)
}
}

View File

@ -64,6 +64,7 @@ final class VideoScrubberComponent: Component {
let videoTrimUpdated: (Double, Double, Bool, Bool) -> Void
let positionUpdated: (Double, Bool) -> Void
let audioTrimUpdated: (Double, Double, Bool, Bool) -> Void
let audioLongPressed: ((UIView) -> Void)?
init(
context: AccountContext,
@ -79,7 +80,8 @@ final class VideoScrubberComponent: Component {
audioData: AudioData?,
videoTrimUpdated: @escaping (Double, Double, Bool, Bool) -> Void,
positionUpdated: @escaping (Double, Bool) -> Void,
audioTrimUpdated: @escaping (Double, Double, Bool, Bool) -> Void
audioTrimUpdated: @escaping (Double, Double, Bool, Bool) -> Void,
audioLongPressed: ((UIView) -> Void)?
) {
self.context = context
self.generationTimestamp = generationTimestamp
@ -95,6 +97,7 @@ final class VideoScrubberComponent: Component {
self.videoTrimUpdated = videoTrimUpdated
self.positionUpdated = positionUpdated
self.audioTrimUpdated = audioTrimUpdated
self.audioLongPressed = audioLongPressed
}
static func ==(lhs: VideoScrubberComponent, rhs: VideoScrubberComponent) -> Bool {
@ -131,7 +134,7 @@ final class VideoScrubberComponent: Component {
return true
}
final class View: UIView, UITextFieldDelegate {
final class View: UIView, UIGestureRecognizerDelegate{
private let audioClippingView: UIView
private let audioContainerView: UIView
private let audioBackgroundView: BlurredBackgroundView
@ -251,6 +254,10 @@ final class VideoScrubberComponent: Component {
self.audioButton.addTarget(self, action: #selector(self.audioButtonPressed), for: .touchUpInside)
self.videoButton.addTarget(self, action: #selector(self.videoButtonPressed), for: .touchUpInside)
let longPressGesture = UILongPressGestureRecognizer(target: self, action: #selector(self.longPressed(_:)))
longPressGesture.delegate = self
self.addGestureRecognizer(longPressGesture)
}
required init?(coder: NSCoder) {
@ -261,6 +268,21 @@ final class VideoScrubberComponent: Component {
self.displayLink?.invalidate()
}
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
guard let component = self.component, component.audioData != nil else {
return false
}
let location = gestureRecognizer.location(in: self.audioContainerView)
return self.audioContainerView.bounds.contains(location)
}
@objc private func longPressed(_ gestureRecognizer: UILongPressGestureRecognizer) {
guard let component = self.component, component.audioData != nil, case .began = gestureRecognizer.state else {
return
}
component.audioLongPressed?(self.audioContainerView)
}
@objc private func audioButtonPressed() {
self.isAudioSelected = true
self.state?.updated(transition: .easeInOut(duration: 0.25))
@ -334,9 +356,17 @@ final class VideoScrubberComponent: Component {
self.component = component
self.state = state
if let previousComponent, previousComponent.audioData == nil, component.audioData != nil {
self.positionAnimation = nil
self.isAudioSelected = true
var animateAudioAppearance = false
if let previousComponent {
if previousComponent.audioData == nil, component.audioData != nil {
self.positionAnimation = nil
self.isAudioSelected = true
animateAudioAppearance = true
} else if previousComponent.audioData != nil, component.audioData == nil {
self.positionAnimation = nil
self.isAudioSelected = false
animateAudioAppearance = true
}
}
let scrubberSpacing: CGFloat = 4.0
@ -347,6 +377,13 @@ final class VideoScrubberComponent: Component {
let scrubberSize = CGSize(width: availableSize.width, height: scrubberHeight)
self.scrubberSize = scrubberSize
var audioTransition = transition
var videoTransition = transition
if animateAudioAppearance {
audioTransition = .easeInOut(duration: 0.25)
videoTransition = .easeInOut(duration: 0.25)
}
var originY: CGFloat = 0
var totalHeight = scrubberSize.height
var audioAlpha: CGFloat = 0.0
@ -364,24 +401,22 @@ final class VideoScrubberComponent: Component {
} else {
self.isAudioSelected = false
}
transition.setAlpha(view: self.audioClippingView, alpha: audioAlpha)
audioTransition.setAlpha(view: self.audioClippingView, alpha: audioAlpha)
self.audioButton.isUserInteractionEnabled = !self.isAudioSelected
self.videoButton.isUserInteractionEnabled = self.isAudioSelected
let audioClippingFrame = CGRect(origin: .zero, size: CGSize(width: availableSize.width, height: audioScrubberHeight))
transition.setFrame(view: self.audioButton, frame: audioClippingFrame)
transition.setFrame(view: self.audioClippingView, frame: audioClippingFrame)
audioTransition.setFrame(view: self.audioButton, frame: audioClippingFrame)
audioTransition.setFrame(view: self.audioClippingView, frame: audioClippingFrame)
let audioContainerFrame = CGRect(origin: .zero, size: audioClippingFrame.size)
transition.setFrame(view: self.audioContainerView, frame: audioContainerFrame)
transition.setFrame(view: self.audioBackgroundView, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
self.audioBackgroundView.update(size: audioClippingFrame.size, transition: transition.containedViewLayoutTransition)
transition.setFrame(view: self.audioVibrancyView, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
transition.setFrame(view: self.audioVibrancyContainer, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
transition.setAlpha(view: self.audioTrimView, alpha: self.isAudioSelected ? 1.0 : 0.0)
audioTransition.setFrame(view: self.audioContainerView, frame: audioContainerFrame)
audioTransition.setFrame(view: self.audioBackgroundView, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
self.audioBackgroundView.update(size: audioClippingFrame.size, transition: audioTransition.containedViewLayoutTransition)
audioTransition.setFrame(view: self.audioVibrancyView, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
audioTransition.setFrame(view: self.audioVibrancyContainer, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
if let audioData = component.audioData {
var components: [String] = []
if let artist = audioData.artist {
@ -409,20 +444,30 @@ final class VideoScrubberComponent: Component {
let iconSize = CGSize(width: 14.0, height: 14.0)
let totalWidth = iconSize.width + audioTitleSize.width + spacing
transition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
transition.setFrame(view: self.audioIconView, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - totalWidth) / 2.0), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize))
audioTransition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
let audioIconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - totalWidth) / 2.0), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize)
audioTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size))
audioTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center)
if let view = self.audioTitle.view {
if view.superview == nil {
view.alpha = 0.0
view.isUserInteractionEnabled = false
self.addSubview(self.audioIconView)
self.addSubview(view)
self.audioContainerView.addSubview(self.audioIconView)
self.audioContainerView.addSubview(view)
}
transition.setAlpha(view: view, alpha: self.isAudioSelected ? 0.0 : 1.0)
transition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - totalWidth) / 2.0) + iconSize.width + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize))
audioTransition.setAlpha(view: view, alpha: self.isAudioSelected ? 0.0 : 1.0)
let audioTitleFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - totalWidth) / 2.0) + iconSize.width + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize)
audioTransition.setBounds(view: view, bounds: CGRect(origin: .zero, size: audioTitleFrame.size))
audioTransition.setPosition(view: view, position: audioTitleFrame.center)
}
} else {
audioTransition.setAlpha(view: self.audioIconView, alpha: 0.0)
if let view = self.audioTitle.view {
audioTransition.setAlpha(view: view, alpha: 0.0)
}
}
if let audioData = component.audioData, let samples = audioData.samples {
@ -451,11 +496,26 @@ final class VideoScrubberComponent: Component {
view.layer.animateScaleY(from: 0.01, to: 1.0, duration: 0.2)
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
transition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: 0.0, y: self.isAudioSelected ? 0.0 : 6.0), size: audioWaveformSize))
audioTransition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: 0.0, y: self.isAudioSelected ? 0.0 : 6.0), size: audioWaveformSize))
}
}
let bounds = CGRect(origin: .zero, size: scrubberSize)
let totalWidth = scrubberSize.width - handleWidth
audioTransition.setAlpha(view: self.audioTrimView, alpha: self.isAudioSelected ? 1.0 : 0.0)
audioTransition.setFrame(view: self.audioTrimView, frame: bounds)
let _ = self.audioTrimView.update(
totalWidth: totalWidth,
scrubberSize: scrubberSize,
duration: component.duration,
startPosition: component.startPosition,
endPosition: component.duration,
position: component.position,
maxDuration: component.maxDuration,
transition: transition
)
if component.framesUpdateTimestamp != previousFramesUpdateTimestamp {
for i in 0 ..< component.frames.count {
@ -486,8 +546,6 @@ final class VideoScrubberComponent: Component {
}
}
let totalWidth = scrubberSize.width - handleWidth
let (leftHandleFrame, rightHandleFrame) = self.videoTrimView.update(
totalWidth: totalWidth,
scrubberSize: scrubberSize,
@ -504,7 +562,7 @@ final class VideoScrubberComponent: Component {
self.displayLink?.isPaused = true
let cursorHeight: CGFloat = component.audioData != nil ? 80.0 : 50.0
transition.setFrame(view: self.cursorView, frame: cursorFrame(size: scrubberSize, height: cursorHeight, position: component.position, duration: component.duration))
videoTransition.setFrame(view: self.cursorView, frame: cursorFrame(size: scrubberSize, height: cursorHeight, position: component.position, duration: component.duration))
} else {
if let (_, _, end, ended) = self.positionAnimation {
if ended, component.position >= component.startPosition && component.position < end - 1.0 {
@ -518,15 +576,14 @@ final class VideoScrubberComponent: Component {
}
// transition.setAlpha(view: self.cursorView, alpha: self.isPanningTrimHandle ? 0.0 : 1.0)
transition.setAlpha(view: self.videoTrimView, alpha: self.isAudioSelected ? 0.0 : 1.0)
transition.setFrame(view: self.videoTrimView, frame: bounds.offsetBy(dx: 0.0, dy: originY))
videoTransition.setAlpha(view: self.videoTrimView, alpha: self.isAudioSelected ? 0.0 : 1.0)
videoTransition.setFrame(view: self.videoTrimView, frame: bounds.offsetBy(dx: 0.0, dy: originY))
let handleInset: CGFloat = 7.0
transition.setFrame(view: self.transparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: originY), size: CGSize(width: scrubberSize.width, height: videoScrubberHeight)))
transition.setFrame(view: self.opaqueFramesContainer, frame: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: originY), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: videoScrubberHeight)))
transition.setBounds(view: self.opaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: videoScrubberHeight)))
videoTransition.setFrame(view: self.transparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: originY), size: CGSize(width: scrubberSize.width, height: videoScrubberHeight)))
videoTransition.setFrame(view: self.opaqueFramesContainer, frame: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: originY), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: videoScrubberHeight)))
videoTransition.setBounds(view: self.opaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: videoScrubberHeight)))
transition.setFrame(view: self.videoButton, frame: bounds.offsetBy(dx: 0.0, dy: originY))
videoTransition.setFrame(view: self.videoButton, frame: bounds.offsetBy(dx: 0.0, dy: originY))
var frameAspectRatio = 0.66
if let image = component.frames.first, image.size.height > 0.0 {
@ -543,8 +600,8 @@ final class VideoScrubberComponent: Component {
transparentFrameLayer.bounds = CGRect(origin: .zero, size: frame.size)
opaqueFrameLayer.bounds = CGRect(origin: .zero, size: frame.size)
transition.setPosition(layer: transparentFrameLayer, position: frame.center)
transition.setPosition(layer: opaqueFrameLayer, position: frame.center)
videoTransition.setPosition(layer: transparentFrameLayer, position: frame.center)
videoTransition.setPosition(layer: opaqueFrameLayer, position: frame.center)
}
frameOffset += frameSize.width
}

View File

@ -3267,6 +3267,16 @@ final class StoryItemSetContainerSendMessage {
}
controller?.push(locationController)
}))
case let .reaction(_, reaction):
if component.slice.peer.id != component.context.account.peerId {
self.performWithPossibleStealthModeConfirmation(view: view, action: { [weak view] in
guard let view, let component = view.component else {
return
}
let _ = component.context.engine.messages.setStoryReaction(peerId: component.slice.peer.id, id: component.slice.item.storyItem.id, reaction: reaction).start()
})
}
return
}
let referenceSize = view.controlsContainerView.frame.size

View File

@ -0,0 +1,23 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "VolumeSliderContextItem",
module_name = "VolumeSliderContextItem",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
"//submodules/AsyncDisplayKit:AsyncDisplayKit",
"//submodules/Display:Display",
"//submodules/ContextUI:ContextUI",
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/AnimatedCountLabelNode:AnimatedCountLabelNode",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,382 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
private final class VoiceChatSpeakerNodeDrawingState: NSObject {
let color: UIColor
let transition: CGFloat
let reverse: Bool
init(color: UIColor, transition: CGFloat, reverse: Bool) {
self.color = color
self.transition = transition
self.reverse = reverse
super.init()
}
}
private func generateWaveImage(color: UIColor, num: Int) -> UIImage? {
return generateImage(CGSize(width: 36.0, height: 36.0), rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setStrokeColor(color.cgColor)
context.setLineWidth(1.0 + UIScreenPixel)
context.setLineCap(.round)
context.translateBy(x: 6.0, y: 6.0)
switch num {
case 1:
let _ = try? drawSvgPath(context, path: "M15,9 C15.6666667,9.95023099 16,10.9487504 16,11.9955581 C16,13.0423659 15.6666667,14.0438465 15,15 S ")
case 2:
let _ = try? drawSvgPath(context, path: "M17.5,6.5 C18.8724771,8.24209014 19.5587156,10.072709 19.5587156,11.9918565 C19.5587156,13.9110041 18.8724771,15.7470519 17.5,17.5 S ")
case 3:
let _ = try? drawSvgPath(context, path: "M20,3.5 C22,6.19232113 23,9.02145934 23,11.9874146 C23,14.9533699 22,17.7908984 20,20.5 S ")
default:
break
}
})
}
final class VoiceChatSpeakerNode: ASDisplayNode {
class State: Equatable {
enum Value: Equatable {
case muted
case low
case medium
case high
}
let value: Value
let color: UIColor
init(value: Value, color: UIColor) {
self.value = value
self.color = color
}
static func ==(lhs: State, rhs: State) -> Bool {
if lhs.value != rhs.value {
return false
}
if lhs.color.argb != rhs.color.argb {
return false
}
return true
}
}
private var hasState = false
private var state: State = State(value: .medium, color: .black)
private let iconNode: IconNode
private let waveNode1: ASImageNode
private let waveNode2: ASImageNode
private let waveNode3: ASImageNode
override init() {
self.iconNode = IconNode()
self.waveNode1 = ASImageNode()
self.waveNode1.displaysAsynchronously = false
self.waveNode1.displayWithoutProcessing = true
self.waveNode2 = ASImageNode()
self.waveNode2.displaysAsynchronously = false
self.waveNode2.displayWithoutProcessing = true
self.waveNode3 = ASImageNode()
self.waveNode3.displaysAsynchronously = false
self.waveNode3.displayWithoutProcessing = true
super.init()
self.addSubnode(self.iconNode)
self.addSubnode(self.waveNode1)
self.addSubnode(self.waveNode2)
self.addSubnode(self.waveNode3)
}
private var animating = false
func update(state: State, animated: Bool, force: Bool = false) {
var animated = animated
if !self.hasState {
self.hasState = true
animated = false
}
if self.state != state || force {
let previousState = self.state
self.state = state
if animated && self.animating {
return
}
if previousState.color != state.color {
self.waveNode1.image = generateWaveImage(color: state.color, num: 1)
self.waveNode2.image = generateWaveImage(color: state.color, num: 2)
self.waveNode3.image = generateWaveImage(color: state.color, num: 3)
}
self.update(transition: animated ? .animated(duration: 0.2, curve: .easeInOut) : .immediate, completion: {
if self.state != state {
self.update(state: self.state, animated: animated, force: true)
}
})
}
}
private func update(transition: ContainedViewLayoutTransition, completion: @escaping () -> Void = {}) {
self.animating = transition.isAnimated
self.iconNode.update(state: IconNode.State(muted: self.state.value == .muted, color: self.state.color), animated: transition.isAnimated)
let bounds = self.bounds
let center = CGPoint(x: bounds.width / 2.0, y: bounds.height / 2.0)
self.iconNode.bounds = CGRect(origin: CGPoint(), size: bounds.size)
self.waveNode1.bounds = CGRect(origin: CGPoint(), size: bounds.size)
self.waveNode2.bounds = CGRect(origin: CGPoint(), size: bounds.size)
self.waveNode3.bounds = CGRect(origin: CGPoint(), size: bounds.size)
let iconPosition: CGPoint
let wave1Position: CGPoint
var wave1Alpha: CGFloat = 1.0
let wave2Position: CGPoint
var wave2Alpha: CGFloat = 1.0
let wave3Position: CGPoint
var wave3Alpha: CGFloat = 1.0
switch self.state.value {
case .muted:
iconPosition = CGPoint(x: center.x, y: center.y)
wave1Position = CGPoint(x: center.x + 4.0, y: center.y)
wave2Position = CGPoint(x: center.x + 4.0, y: center.y)
wave3Position = CGPoint(x: center.x + 4.0, y: center.y)
wave1Alpha = 0.0
wave2Alpha = 0.0
wave3Alpha = 0.0
case .low:
iconPosition = CGPoint(x: center.x - 1.0, y: center.y)
wave1Position = CGPoint(x: center.x + 3.0, y: center.y)
wave2Position = CGPoint(x: center.x + 3.0, y: center.y)
wave3Position = CGPoint(x: center.x + 3.0, y: center.y)
wave2Alpha = 0.0
wave3Alpha = 0.0
case .medium:
iconPosition = CGPoint(x: center.x - 3.0, y: center.y)
wave1Position = CGPoint(x: center.x + 1.0, y: center.y)
wave2Position = CGPoint(x: center.x + 1.0, y: center.y)
wave3Position = CGPoint(x: center.x + 1.0, y: center.y)
wave3Alpha = 0.0
case .high:
iconPosition = CGPoint(x: center.x - 4.0, y: center.y)
wave1Position = CGPoint(x: center.x, y: center.y)
wave2Position = CGPoint(x: center.x, y: center.y)
wave3Position = CGPoint(x: center.x, y: center.y)
}
transition.updatePosition(node: self.iconNode, position: iconPosition) { _ in
self.animating = false
completion()
}
transition.updatePosition(node: self.waveNode1, position: wave1Position)
transition.updatePosition(node: self.waveNode2, position: wave2Position)
transition.updatePosition(node: self.waveNode3, position: wave3Position)
transition.updateAlpha(node: self.waveNode1, alpha: wave1Alpha)
transition.updateAlpha(node: self.waveNode2, alpha: wave2Alpha)
transition.updateAlpha(node: self.waveNode3, alpha: wave3Alpha)
}
}
private class IconNode: ASDisplayNode {
class State: Equatable {
let muted: Bool
let color: UIColor
init(muted: Bool, color: UIColor) {
self.muted = muted
self.color = color
}
static func ==(lhs: State, rhs: State) -> Bool {
if lhs.muted != rhs.muted {
return false
}
if lhs.color.argb != rhs.color.argb {
return false
}
return true
}
}
private class TransitionContext {
let startTime: Double
let duration: Double
let previousState: State
init(startTime: Double, duration: Double, previousState: State) {
self.startTime = startTime
self.duration = duration
self.previousState = previousState
}
}
private var animator: ConstantDisplayLinkAnimator?
private var hasState = false
private var state: State = State(muted: false, color: .black)
private var transitionContext: TransitionContext?
override init() {
super.init()
self.isOpaque = false
}
func update(state: State, animated: Bool) {
var animated = animated
if !self.hasState {
self.hasState = true
animated = false
}
if self.state != state {
let previousState = self.state
self.state = state
if animated {
self.transitionContext = TransitionContext(startTime: CACurrentMediaTime(), duration: 0.18, previousState: previousState)
}
self.updateAnimations()
self.setNeedsDisplay()
}
}
private func updateAnimations() {
var animate = false
let timestamp = CACurrentMediaTime()
if let transitionContext = self.transitionContext {
if transitionContext.startTime + transitionContext.duration < timestamp {
self.transitionContext = nil
} else {
animate = true
}
}
if animate {
let animator: ConstantDisplayLinkAnimator
if let current = self.animator {
animator = current
} else {
animator = ConstantDisplayLinkAnimator(update: { [weak self] in
self?.updateAnimations()
})
self.animator = animator
}
animator.isPaused = false
} else {
self.animator?.isPaused = true
}
self.setNeedsDisplay()
}
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
var transitionFraction: CGFloat = self.state.muted ? 1.0 : 0.0
var color = self.state.color
var reverse = false
if let transitionContext = self.transitionContext {
let timestamp = CACurrentMediaTime()
var t = CGFloat((timestamp - transitionContext.startTime) / transitionContext.duration)
t = min(1.0, max(0.0, t))
if transitionContext.previousState.muted != self.state.muted {
transitionFraction = self.state.muted ? t : 1.0 - t
reverse = transitionContext.previousState.muted
}
if transitionContext.previousState.color.rgb != color.rgb {
color = transitionContext.previousState.color.interpolateTo(color, fraction: t)!
}
}
return VoiceChatSpeakerNodeDrawingState(color: color, transition: transitionFraction, reverse: reverse)
}
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
let context = UIGraphicsGetCurrentContext()!
if !isRasterizing {
context.setBlendMode(.copy)
context.setFillColor(UIColor.clear.cgColor)
context.fill(bounds)
}
guard let parameters = parameters as? VoiceChatSpeakerNodeDrawingState else {
return
}
let clearLineWidth: CGFloat = 4.0
let lineWidth: CGFloat = 1.0 + UIScreenPixel
context.setFillColor(parameters.color.cgColor)
context.setStrokeColor(parameters.color.cgColor)
context.setLineWidth(lineWidth)
context.translateBy(x: 7.0, y: 6.0)
let _ = try? drawSvgPath(context, path: "M7,9 L10,9 L13.6080479,5.03114726 C13.9052535,4.70422117 14.4112121,4.6801279 14.7381382,4.97733344 C14.9049178,5.12895118 15,5.34388952 15,5.5692855 L15,18.4307145 C15,18.8725423 14.6418278,19.2307145 14.2,19.2307145 C13.974604,19.2307145 13.7596657,19.1356323 13.6080479,18.9688527 L10,15 L7,15 C6.44771525,15 6,14.5522847 6,14 L6,10 C6,9.44771525 6.44771525,9 7,9 S ")
context.translateBy(x: -7.0, y: -6.0)
if parameters.transition > 0.0 {
let startPoint: CGPoint
let endPoint: CGPoint
let origin: CGPoint
let length: CGFloat
if bounds.width > 30.0 {
origin = CGPoint(x: 9.0, y: 10.0 - UIScreenPixel)
length = 17.0
} else {
origin = CGPoint(x: 5.0 + UIScreenPixel, y: 4.0 + UIScreenPixel)
length = 15.0
}
if parameters.reverse {
startPoint = CGPoint(x: origin.x + length * (1.0 - parameters.transition), y: origin.y + length * (1.0 - parameters.transition))
endPoint = CGPoint(x: origin.x + length, y: origin.y + length)
} else {
startPoint = origin
endPoint = CGPoint(x: origin.x + length * parameters.transition, y: origin.y + length * parameters.transition)
}
context.setBlendMode(.clear)
context.setLineWidth(clearLineWidth)
context.move(to: startPoint)
context.addLine(to: endPoint)
context.strokePath()
context.setBlendMode(.normal)
context.setStrokeColor(parameters.color.cgColor)
context.setLineWidth(lineWidth)
context.setLineCap(.round)
context.setLineJoin(.round)
context.move(to: startPoint)
context.addLine(to: endPoint)
context.strokePath()
}
}
}

View File

@ -0,0 +1,300 @@
import Foundation
import UIKit
import Display
import AsyncDisplayKit
import SwiftSignalKit
import TelegramPresentationData
import AppBundle
import ContextUI
import AnimatedCountLabelNode
public final class VolumeSliderContextItem: ContextMenuCustomItem {
private let minValue: CGFloat
private let value: CGFloat
private let valueChanged: (CGFloat, Bool) -> Void
public init(minValue: CGFloat, value: CGFloat, valueChanged: @escaping (CGFloat, Bool) -> Void) {
self.minValue = minValue
self.value = value
self.valueChanged = valueChanged
}
public func node(presentationData: PresentationData, getController: @escaping () -> ContextControllerProtocol?, actionSelected: @escaping (ContextMenuActionResult) -> Void) -> ContextMenuCustomNode {
return VolumeSliderContextItemNode(presentationData: presentationData, getController: getController, minValue: self.minValue, value: self.value, valueChanged: self.valueChanged)
}
}
private let textFont = Font.with(size: 17.0, design: .regular, traits: .monospacedNumbers)
private final class VolumeSliderContextItemNode: ASDisplayNode, ContextMenuCustomNode {
private var presentationData: PresentationData
private(set) var vibrancyEffectView: UIVisualEffectView?
private let backgroundIconNode: VoiceChatSpeakerNode
private let backgroundTextNode: ImmediateAnimatedCountLabelNode
private let dimBackgroundTextNode: ImmediateAnimatedCountLabelNode
private let foregroundNode: ASDisplayNode
private let foregroundIconNode: VoiceChatSpeakerNode
private let foregroundTextNode: ImmediateAnimatedCountLabelNode
let minValue: CGFloat
var value: CGFloat = 1.0 {
didSet {
self.updateValue(transition: .animated(duration: 0.2, curve: .spring))
}
}
private let valueChanged: (CGFloat, Bool) -> Void
private let hapticFeedback = HapticFeedback()
init(presentationData: PresentationData, getController: @escaping () -> ContextControllerProtocol?, minValue: CGFloat, value: CGFloat, valueChanged: @escaping (CGFloat, Bool) -> Void) {
self.presentationData = presentationData
self.minValue = minValue
self.value = value
self.valueChanged = valueChanged
self.backgroundIconNode = VoiceChatSpeakerNode()
self.backgroundTextNode = ImmediateAnimatedCountLabelNode()
self.backgroundTextNode.alwaysOneDirection = true
self.dimBackgroundTextNode = ImmediateAnimatedCountLabelNode()
self.dimBackgroundTextNode.alwaysOneDirection = true
self.foregroundNode = ASDisplayNode()
self.foregroundNode.clipsToBounds = true
self.foregroundNode.isAccessibilityElement = false
self.foregroundNode.backgroundColor = UIColor(rgb: 0xffffff)
self.foregroundNode.isUserInteractionEnabled = false
self.foregroundIconNode = VoiceChatSpeakerNode()
self.foregroundTextNode = ImmediateAnimatedCountLabelNode()
self.foregroundTextNode.alwaysOneDirection = true
super.init()
self.isUserInteractionEnabled = true
if presentationData.theme.overallDarkAppearance {
} else {
let style: UIBlurEffect.Style
style = .extraLight
let blurEffect = UIBlurEffect(style: style)
let vibrancyEffect = UIVibrancyEffect(blurEffect: blurEffect)
let vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect)
self.vibrancyEffectView = vibrancyEffectView
}
self.addSubnode(self.backgroundIconNode)
self.addSubnode(self.backgroundTextNode)
self.addSubnode(self.dimBackgroundTextNode)
self.addSubnode(self.foregroundNode)
self.foregroundNode.addSubnode(self.foregroundIconNode)
self.foregroundNode.addSubnode(self.foregroundTextNode)
let stringValue = "100%"
let dimBackgroundTextColor = self.vibrancyEffectView != nil ? UIColor(white: 0.0, alpha: 0.15) : .clear
let backgroundTextColor = self.vibrancyEffectView != nil ? UIColor(white: 1.0, alpha: 0.7) : self.presentationData.theme.contextMenu.secondaryColor
let foregroundTextColor = UIColor.black
var dimBackgroundSegments: [AnimatedCountLabelNode.Segment] = []
var backgroundSegments: [AnimatedCountLabelNode.Segment] = []
var foregroundSegments: [AnimatedCountLabelNode.Segment] = []
var textCount = 0
for char in stringValue {
if let intValue = Int(String(char)) {
dimBackgroundSegments.append(.number(intValue, NSAttributedString(string: String(char), font: textFont, textColor: dimBackgroundTextColor)))
backgroundSegments.append(.number(intValue, NSAttributedString(string: String(char), font: textFont, textColor: backgroundTextColor)))
foregroundSegments.append(.number(intValue, NSAttributedString(string: String(char), font: textFont, textColor: foregroundTextColor)))
} else {
dimBackgroundSegments.append(.text(textCount, NSAttributedString(string: String(char), font: textFont, textColor: dimBackgroundTextColor)))
backgroundSegments.append(.text(textCount, NSAttributedString(string: String(char), font: textFont, textColor: backgroundTextColor)))
foregroundSegments.append(.text(textCount, NSAttributedString(string: String(char), font: textFont, textColor: foregroundTextColor)))
textCount += 1
}
}
self.dimBackgroundTextNode.segments = dimBackgroundSegments
self.backgroundTextNode.segments = backgroundSegments
self.foregroundTextNode.segments = foregroundSegments
}
override func didLoad() {
super.didLoad()
if let vibrancyEffectView = self.vibrancyEffectView {
Queue.mainQueue().after(0.05) {
if let effectNode = findEffectNode(node: self.supernode) {
effectNode.effectView?.contentView.insertSubview(vibrancyEffectView, at: 0)
vibrancyEffectView.contentView.addSubnode(self.backgroundTextNode)
}
}
}
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:)))
self.view.addGestureRecognizer(panGestureRecognizer)
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))
self.view.addGestureRecognizer(tapGestureRecognizer)
}
func updateTheme(presentationData: PresentationData) {
self.presentationData = presentationData
self.updateValue()
}
private func updateValue(transition: ContainedViewLayoutTransition = .immediate) {
let width = self.frame.width
let value = self.value
transition.updateFrameAdditive(node: self.foregroundNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: value * width, height: self.frame.height)))
let stringValue = "\(Int(self.value * 100.0))%"
let dimBackgroundTextColor = self.vibrancyEffectView != nil ? UIColor(white: 0.0, alpha: 0.15) : .clear
let backgroundTextColor = self.vibrancyEffectView != nil ? UIColor(white: 1.0, alpha: 0.7) : self.presentationData.theme.contextMenu.secondaryColor
let foregroundTextColor = UIColor.black
var dimBackgroundSegments: [AnimatedCountLabelNode.Segment] = []
var backgroundSegments: [AnimatedCountLabelNode.Segment] = []
var foregroundSegments: [AnimatedCountLabelNode.Segment] = []
var textCount = 0
for char in stringValue {
if let intValue = Int(String(char)) {
dimBackgroundSegments.append(.number(intValue, NSAttributedString(string: String(char), font: textFont, textColor: dimBackgroundTextColor)))
backgroundSegments.append(.number(intValue, NSAttributedString(string: String(char), font: textFont, textColor: backgroundTextColor)))
foregroundSegments.append(.number(intValue, NSAttributedString(string: String(char), font: textFont, textColor: foregroundTextColor)))
} else {
dimBackgroundSegments.append(.text(textCount, NSAttributedString(string: String(char), font: textFont, textColor: dimBackgroundTextColor)))
backgroundSegments.append(.text(textCount, NSAttributedString(string: String(char), font: textFont, textColor: backgroundTextColor)))
foregroundSegments.append(.text(textCount, NSAttributedString(string: String(char), font: textFont, textColor: foregroundTextColor)))
textCount += 1
}
}
self.dimBackgroundTextNode.segments = dimBackgroundSegments
self.backgroundTextNode.segments = backgroundSegments
self.foregroundTextNode.segments = foregroundSegments
let _ = self.dimBackgroundTextNode.updateLayout(size: CGSize(width: 70.0, height: .greatestFiniteMagnitude), animated: transition.isAnimated)
let _ = self.backgroundTextNode.updateLayout(size: CGSize(width: 70.0, height: .greatestFiniteMagnitude), animated: transition.isAnimated)
let _ = self.foregroundTextNode.updateLayout(size: CGSize(width: 70.0, height: .greatestFiniteMagnitude), animated: transition.isAnimated)
let iconValue: VoiceChatSpeakerNode.State.Value
if value == 0.0 {
iconValue = .muted
} else if value < 0.33 {
iconValue = .low
} else if value < 0.66 {
iconValue = .medium
} else {
iconValue = .high
}
self.backgroundIconNode.update(state: VoiceChatSpeakerNode.State(value: iconValue, color: UIColor(rgb: 0xffffff)), animated: true)
self.foregroundIconNode.update(state: VoiceChatSpeakerNode.State(value: iconValue, color: UIColor(rgb: 0x000000)), animated: true)
}
func updateLayout(constrainedWidth: CGFloat, constrainedHeight: CGFloat) -> (CGSize, (CGSize, ContainedViewLayoutTransition) -> Void) {
let valueWidth: CGFloat = 70.0
let height: CGFloat = 45.0
var backgroundTextSize = self.backgroundTextNode.updateLayout(size: CGSize(width: 70.0, height: .greatestFiniteMagnitude), animated: true)
backgroundTextSize.width = valueWidth
return (CGSize(width: height * 3.0, height: height), { size, transition in
let leftInset: CGFloat = 17.0
self.vibrancyEffectView?.frame = CGRect(origin: .zero, size: size)
let textFrame = CGRect(origin: CGPoint(x: leftInset, y: floor((height - backgroundTextSize.height) / 2.0)), size: backgroundTextSize)
transition.updateFrameAdditive(node: self.dimBackgroundTextNode, frame: textFrame)
transition.updateFrameAdditive(node: self.backgroundTextNode, frame: textFrame)
transition.updateFrameAdditive(node: self.foregroundTextNode, frame: textFrame)
let iconSize = CGSize(width: 36.0, height: 36.0)
let iconFrame = CGRect(origin: CGPoint(x: size.width - iconSize.width - 10.0, y: floor((size.height - iconSize.height) / 2.0)), size: iconSize)
self.backgroundIconNode.frame = iconFrame
self.foregroundIconNode.frame = iconFrame
self.updateValue(transition: transition)
})
}
@objc private func panGesture(_ gestureRecognizer: UIPanGestureRecognizer) {
switch gestureRecognizer.state {
case .began:
break
case .changed:
let previousValue = self.value
let translation: CGFloat = gestureRecognizer.translation(in: gestureRecognizer.view).x
let delta = translation / self.bounds.width
self.value = max(self.minValue, min(1.0, self.value + delta))
gestureRecognizer.setTranslation(CGPoint(), in: gestureRecognizer.view)
if self.value == 1.0 && previousValue != 1.0 {
self.backgroundIconNode.layer.animateScale(from: 1.0, to: 1.1, duration: 0.16, removeOnCompletion: false, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.backgroundIconNode.layer.animateScale(from: 1.1, to: 1.0, duration: 0.16)
}
})
self.foregroundIconNode.layer.animateScale(from: 1.0, to: 1.1, duration: 0.16, removeOnCompletion: false, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.foregroundIconNode.layer.animateScale(from: 1.1, to: 1.0, duration: 0.16)
}
})
self.hapticFeedback.impact(.soft)
} else if self.value == 0.0 && previousValue != 0.0 {
self.hapticFeedback.impact(.soft)
}
if abs(previousValue - self.value) >= 0.01 {
self.valueChanged(self.value, false)
}
case .ended:
let translation: CGFloat = gestureRecognizer.translation(in: gestureRecognizer.view).x
let delta = translation / self.bounds.width
self.value = max(self.minValue, min(1.0, self.value + delta))
self.valueChanged(self.value, true)
default:
break
}
}
@objc private func tapGesture(_ gestureRecognizer: UITapGestureRecognizer) {
let location = gestureRecognizer.location(in: gestureRecognizer.view)
self.value = max(self.minValue, min(2.0, location.x / self.bounds.width * 2.0))
self.valueChanged(self.value, true)
}
func canBeHighlighted() -> Bool {
return false
}
func updateIsHighlighted(isHighlighted: Bool) {
}
func performAction() {
}
}
private func findEffectNode(node: ASDisplayNode?) -> NavigationBackgroundNode? {
if let node = node {
if let subnodes = node.subnodes {
for node in subnodes {
if let effectNode = node as? NavigationBackgroundNode {
return effectNode
}
}
}
return findEffectNode(node: node.supernode)
} else {
return nil
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "Reaction.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "Reaction Sticker.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}